diff --git a/compiler/src/dotty/tools/MainGenericRunner.scala b/compiler/src/dotty/tools/MainGenericRunner.scala
index 0a5a1b191a44..1540cc86d7a6 100644
--- a/compiler/src/dotty/tools/MainGenericRunner.scala
+++ b/compiler/src/dotty/tools/MainGenericRunner.scala
@@ -195,7 +195,7 @@ object MainGenericRunner {
case ExecuteMode.PossibleRun =>
val newClasspath = (settings.classPath :+ ".").flatMap(_.split(classpathSeparator).filter(_.nonEmpty)).map(File(_).toURI.toURL)
- import dotty.tools.runner.RichClassLoader._
+ import dotty.tools.runner.RichClassLoader.*
val newClassLoader = ScalaClassLoader.fromURLsParallelCapable(newClasspath)
val targetToRun = settings.possibleEntryPaths.to(LazyList).find { entryPath =>
newClassLoader.tryToLoadClass(entryPath).orElse {
diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeAsmCommon.scala b/compiler/src/dotty/tools/backend/jvm/BCodeAsmCommon.scala
index d95638be2695..4027cf9fb564 100644
--- a/compiler/src/dotty/tools/backend/jvm/BCodeAsmCommon.scala
+++ b/compiler/src/dotty/tools/backend/jvm/BCodeAsmCommon.scala
@@ -4,8 +4,8 @@ package jvm
import scala.language.unsafeNulls
-import dotty.tools.dotc.core.Flags._
-import dotty.tools.dotc.core.Symbols._
+import dotty.tools.dotc.core.Flags.*
+import dotty.tools.dotc.core.Symbols.*
import dotty.tools.dotc.report
/**
diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala b/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala
index 8aa79f4ee6da..f1029b702ee5 100644
--- a/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala
+++ b/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala
@@ -13,16 +13,16 @@ import BCodeHelpers.InvokeStyle
import dotty.tools.dotc.ast.tpd
import dotty.tools.dotc.CompilationUnit
-import dotty.tools.dotc.core.Constants._
+import dotty.tools.dotc.core.Constants.*
import dotty.tools.dotc.core.Flags.{Label => LabelFlag, _}
-import dotty.tools.dotc.core.Types._
+import dotty.tools.dotc.core.Types.*
import dotty.tools.dotc.core.StdNames.{nme, str}
-import dotty.tools.dotc.core.Symbols._
+import dotty.tools.dotc.core.Symbols.*
import dotty.tools.dotc.transform.Erasure
-import dotty.tools.dotc.transform.SymUtils._
-import dotty.tools.dotc.util.Spans._
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Phases._
+import dotty.tools.dotc.transform.SymUtils.*
+import dotty.tools.dotc.util.Spans.*
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Phases.*
import dotty.tools.dotc.core.Decorators.em
import dotty.tools.dotc.report
@@ -33,13 +33,13 @@ import dotty.tools.dotc.report
*
*/
trait BCodeBodyBuilder extends BCodeSkelBuilder {
- // import global._
- // import definitions._
- import tpd._
+ // import global.*
+ // import definitions.*
+ import tpd.*
import int.{_, given}
import DottyBackendInterface.symExtensions
- import bTypes._
- import coreBTypes._
+ import bTypes.*
+ import coreBTypes.*
protected val primitives: DottyPrimitives
@@ -126,7 +126,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder {
assert(resKind.isNumericType || (resKind == BOOL),
s"$resKind is not a numeric or boolean type [operation: ${fun.symbol}]")
- import ScalaPrimitivesOps._
+ import ScalaPrimitivesOps.*
args match {
// unary operation
@@ -179,7 +179,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder {
def genArrayOp(tree: Tree, code: Int, expectedType: BType): BType = tree match{
case Apply(DesugaredSelect(arrayObj, _), args) =>
- import ScalaPrimitivesOps._
+ import ScalaPrimitivesOps.*
val k = tpeTK(arrayObj)
genLoad(arrayObj, k)
val elementType = typeOfArrayOp.getOrElse[bTypes.BType](code, abort(s"Unknown operation on arrays: $tree code: $code"))
@@ -262,7 +262,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder {
val code = primitives.getPrimitive(tree, receiver.tpe)
- import ScalaPrimitivesOps._
+ import ScalaPrimitivesOps.*
if (isArithmeticOp(code)) genArithmeticOp(tree, code)
else if (code == CONCAT) genStringConcat(tree)
@@ -1267,7 +1267,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder {
/* Generate coercion denoted by "code" */
def genCoercion(code: Int): Unit = {
- import ScalaPrimitivesOps._
+ import ScalaPrimitivesOps.*
(code: @switch) match {
case B2B | S2S | C2C | I2I | L2L | F2F | D2D => ()
case _ =>
@@ -1443,7 +1443,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder {
val mdescr = bmType.descriptor
val isInterface = isEmittedInterface(receiverClass)
- import InvokeStyle._
+ import InvokeStyle.*
if (style == Super) {
if (isInterface && !method.is(JavaDefined)) {
val args = new Array[BType](bmType.argumentTypes.length + 1)
@@ -1497,7 +1497,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder {
} else if (tk.isRef) { // REFERENCE(_) | ARRAY(_)
bc.emitIF_ACMP(op, success)
} else {
- import Primitives._
+ import Primitives.*
def useCmpG = if (negated) op == GT || op == GE else op == LT || op == LE
(tk: @unchecked) match {
case LONG => emit(asm.Opcodes.LCMP)
@@ -1512,7 +1512,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder {
/* Emits code to compare (and consume) stack-top and zero using the 'op' operator */
private def genCZJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType, targetIfNoJump: asm.Label, negated: Boolean = false): Unit = {
- import Primitives._
+ import Primitives.*
if (targetIfNoJump == success) genCZJUMP(failure, success, op.negate(), tk, targetIfNoJump, negated = !negated)
else {
if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
@@ -1825,7 +1825,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder {
else
jliLambdaMetaFactoryMetafactoryHandle
- bc.jmethod.visitInvokeDynamicInsn(methodName, desc, metafactory, bsmArgs: _*)
+ bc.jmethod.visitInvokeDynamicInsn(methodName, desc, metafactory, bsmArgs*)
generatedType
}
diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala b/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala
index c36c8c546635..3779f59d33b0 100644
--- a/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala
+++ b/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala
@@ -9,24 +9,25 @@ import scala.tools.asm
import scala.tools.asm.AnnotationVisitor
import scala.tools.asm.ClassWriter
import scala.collection.mutable
+import scala.compiletime.uninitialized
import dotty.tools.dotc.CompilationUnit
import dotty.tools.dotc.ast.tpd
import dotty.tools.dotc.ast.Trees
-import dotty.tools.dotc.core.Annotations._
-import dotty.tools.dotc.core.Constants._
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Phases._
-import dotty.tools.dotc.core.Decorators._
-import dotty.tools.dotc.core.Flags._
+import dotty.tools.dotc.core.Annotations.*
+import dotty.tools.dotc.core.Constants.*
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Phases.*
+import dotty.tools.dotc.core.Decorators.*
+import dotty.tools.dotc.core.Flags.*
import dotty.tools.dotc.core.Names.Name
import dotty.tools.dotc.core.NameKinds.ExpandedName
import dotty.tools.dotc.core.Signature
-import dotty.tools.dotc.core.StdNames._
+import dotty.tools.dotc.core.StdNames.*
import dotty.tools.dotc.core.NameKinds
-import dotty.tools.dotc.core.Symbols._
+import dotty.tools.dotc.core.Symbols.*
import dotty.tools.dotc.core.Types
-import dotty.tools.dotc.core.Types._
+import dotty.tools.dotc.core.Types.*
import dotty.tools.dotc.core.TypeErasure
import dotty.tools.dotc.transform.GenericSignatures
import dotty.tools.dotc.transform.ElimErasedValueType
@@ -44,12 +45,12 @@ import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions
*/
trait BCodeHelpers extends BCodeIdiomatic {
// for some reason singleton types aren't allowed in constructor calls. will need several casts in code to enforce
- //import global._
- import bTypes._
- import tpd._
- import coreBTypes._
+ //import global.*
+ import bTypes.*
+ import tpd.*
+ import coreBTypes.*
import int.{_, given}
- import DottyBackendInterface._
+ import DottyBackendInterface.*
// We need to access GenBCode phase to get access to post-processor components.
// At this point it should always be initialized already.
@@ -576,7 +577,7 @@ trait BCodeHelpers extends BCodeIdiomatic {
/* builder of mirror classes */
class JMirrorBuilder extends JCommonBuilder {
- private var cunit: CompilationUnit = _
+ private var cunit: CompilationUnit = uninitialized
def getCurrentCUnit(): CompilationUnit = cunit;
/* Generate a mirror class for a top-level module. A mirror class is a class
@@ -700,10 +701,10 @@ trait BCodeHelpers extends BCodeIdiomatic {
* classes.
*/
private def typeToTypeKind(tp: Type)(ct: BCodeHelpers)(storage: ct.BCInnerClassGen): ct.bTypes.BType = {
- import ct.bTypes._
+ import ct.bTypes.*
val defn = ctx.definitions
- import coreBTypes._
- import Types._
+ import coreBTypes.*
+ import Types.*
/**
* Primitive types are represented as TypeRefs to the class symbol of, for example, scala.Int.
* The `primitiveTypeMap` maps those class symbols to the corresponding PrimitiveBType.
@@ -851,7 +852,7 @@ trait BCodeHelpers extends BCodeIdiomatic {
object BCodeHelpers {
class InvokeStyle(val style: Int) extends AnyVal {
- import InvokeStyle._
+ import InvokeStyle.*
def isVirtual: Boolean = this == Virtual
def isStatic : Boolean = this == Static
def isSpecial: Boolean = this == Special
diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala b/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala
index 42f8ef7f4ef6..9938b7415da7 100644
--- a/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala
+++ b/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala
@@ -22,8 +22,8 @@ trait BCodeIdiomatic {
val bTypes: BTypesFromSymbols[int.type]
import int.{_, given}
- import bTypes._
- import coreBTypes._
+ import bTypes.*
+ import coreBTypes.*
lazy val JavaStringBuilderClassName = jlStringBuilderRef.internalName
@@ -247,9 +247,9 @@ trait BCodeIdiomatic {
): Unit = {
jmethod.visitInvokeDynamicInsn(
"makeConcatWithConstants",
- asm.Type.getMethodDescriptor(StringRef.toASMType, argTypes:_*),
+ asm.Type.getMethodDescriptor(StringRef.toASMType, argTypes*),
coreBTypes.jliStringConcatFactoryMakeConcatWithConstantsHandle,
- (recipe +: constants):_*
+ (recipe +: constants)*
)
}
@@ -522,7 +522,7 @@ trait BCodeIdiomatic {
i += 1
}
assert(oldPos == keys.length, "emitSWITCH")
- jmethod.visitTableSwitchInsn(keyMin, keyMax, defaultBranch, newBranches: _*)
+ jmethod.visitTableSwitchInsn(keyMin, keyMax, defaultBranch, newBranches*)
} else {
jmethod.visitLookupSwitchInsn(defaultBranch, keys, branches)
}
@@ -617,7 +617,7 @@ trait BCodeIdiomatic {
/* Constant-valued val-members of JCodeMethodN at the companion object, so as to avoid re-initializing them multiple times. */
object JCodeMethodN {
- import asm.Opcodes._
+ import asm.Opcodes.*
// ---------------- conversions ----------------
@@ -651,7 +651,7 @@ trait BCodeIdiomatic {
* can-multi-thread
*/
final def coercionFrom(code: Int): BType = {
- import ScalaPrimitivesOps._
+ import ScalaPrimitivesOps.*
(code: @switch) match {
case B2B | B2C | B2S | B2I | B2L | B2F | B2D => BYTE
case S2B | S2S | S2C | S2I | S2L | S2F | S2D => SHORT
@@ -668,7 +668,7 @@ trait BCodeIdiomatic {
* can-multi-thread
*/
final def coercionTo(code: Int): BType = {
- import ScalaPrimitivesOps._
+ import ScalaPrimitivesOps.*
(code: @switch) match {
case B2B | C2B | S2B | I2B | L2B | F2B | D2B => BYTE
case B2C | C2C | S2C | I2C | L2C | F2C | D2C => CHAR
diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala b/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala
index 073cc44e76b7..61383d2000d1 100644
--- a/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala
+++ b/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala
@@ -31,10 +31,10 @@ import dotty.tools.dotc.transform.SymUtils.*
trait BCodeSkelBuilder extends BCodeHelpers {
import int.{_, given}
import DottyBackendInterface.{symExtensions, _}
- import tpd._
- import bTypes._
- import coreBTypes._
- import bCodeAsmCommon._
+ import tpd.*
+ import bTypes.*
+ import coreBTypes.*
+ import bCodeAsmCommon.*
lazy val NativeAttr: Symbol = requiredClass[scala.native]
diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeSyncAndTry.scala b/compiler/src/dotty/tools/backend/jvm/BCodeSyncAndTry.scala
index 74e1c5812b14..4e2ea6dd52b8 100644
--- a/compiler/src/dotty/tools/backend/jvm/BCodeSyncAndTry.scala
+++ b/compiler/src/dotty/tools/backend/jvm/BCodeSyncAndTry.scala
@@ -9,7 +9,7 @@ import scala.tools.asm
import dotty.tools.dotc.CompilationUnit
import dotty.tools.dotc.core.StdNames.nme
-import dotty.tools.dotc.core.Symbols._
+import dotty.tools.dotc.core.Symbols.*
import dotty.tools.dotc.ast.tpd
/*
@@ -20,9 +20,9 @@ import dotty.tools.dotc.ast.tpd
*/
trait BCodeSyncAndTry extends BCodeBodyBuilder {
import int.given
- import tpd._
- import bTypes._
- import coreBTypes._
+ import tpd.*
+ import bTypes.*
+ import coreBTypes.*
/*
* Functionality to lower `synchronized` and `try` expressions.
*/
diff --git a/compiler/src/dotty/tools/backend/jvm/BTypes.scala b/compiler/src/dotty/tools/backend/jvm/BTypes.scala
index ba5e3e360e88..8b4c2834ed19 100644
--- a/compiler/src/dotty/tools/backend/jvm/BTypes.scala
+++ b/compiler/src/dotty/tools/backend/jvm/BTypes.scala
@@ -39,7 +39,7 @@ abstract class BTypes { self =>
def classBTypeFromInternalName(internalName: String) = classBTypeFromInternalNameMap(internalName)
val coreBTypes: CoreBTypes { val bTypes: self.type}
- import coreBTypes._
+ import coreBTypes.*
/**
* A BType is either a primitve type, a ClassBType, an ArrayBType of one of these, or a MethodType
diff --git a/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala b/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala
index dc5cf48de6fe..0743465b7b3b 100644
--- a/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala
+++ b/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala
@@ -7,12 +7,12 @@ import scala.annotation.threadUnsafe
import scala.collection.mutable
import scala.collection.mutable.Clearable
-import dotty.tools.dotc.core.Flags._
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Phases._
-import dotty.tools.dotc.core.Symbols._
+import dotty.tools.dotc.core.Flags.*
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Phases.*
+import dotty.tools.dotc.core.Symbols.*
import dotty.tools.dotc.core.Phases.Phase
-import dotty.tools.dotc.transform.SymUtils._
+import dotty.tools.dotc.transform.SymUtils.*
import dotty.tools.dotc.core.StdNames
import dotty.tools.dotc.core.Phases
@@ -29,12 +29,12 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce
lazy val VolatileAttr = requiredClass[scala.volatile]
val bCodeAsmCommon: BCodeAsmCommon[int.type ] = new BCodeAsmCommon(int)
- import bCodeAsmCommon._
+ import bCodeAsmCommon.*
val coreBTypes = new CoreBTypesFromSymbols[I]{
val bTypes: BTypesFromSymbols.this.type = BTypesFromSymbols.this
}
- import coreBTypes._
+ import coreBTypes.*
@threadUnsafe protected lazy val classBTypeFromInternalNameMap =
collection.concurrent.TrieMap.empty[String, ClassBType]
@@ -286,7 +286,7 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce
val finalFlag = sym.is(Final) && !toDenot(sym).isClassConstructor && !sym.is(Mutable, butNot = Accessor) && !sym.enclosingClass.is(Trait)
- import asm.Opcodes._
+ import asm.Opcodes.*
import GenBCodeOps.addFlagIf
0 .addFlagIf(privateFlag, ACC_PRIVATE)
.addFlagIf(!privateFlag, ACC_PUBLIC)
@@ -312,7 +312,7 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce
}
def javaFieldFlags(sym: Symbol) = {
- import asm.Opcodes._
+ import asm.Opcodes.*
import GenBCodeOps.addFlagIf
javaFlags(sym)
.addFlagIf(sym.hasAnnotation(TransientAttr), ACC_TRANSIENT)
diff --git a/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala b/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala
index 668dedd6076e..2f8a469169cc 100644
--- a/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala
+++ b/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala
@@ -5,7 +5,7 @@ import scala.tools.asm.Handle
import scala.tools.asm.tree.InvokeDynamicInsnNode
import asm.tree.ClassNode
import scala.collection.mutable
-import scala.jdk.CollectionConverters._
+import scala.jdk.CollectionConverters.*
import dotty.tools.dotc.report
import scala.language.unsafeNulls
@@ -92,9 +92,9 @@ class BackendUtils(val postProcessor: PostProcessor) {
* methods.
*/
def addLambdaDeserialize(classNode: ClassNode, implMethodsArray: Array[Handle]): Unit = {
- import asm.Opcodes._
- import bTypes._
- import coreBTypes._
+ import asm.Opcodes.*
+ import bTypes.*
+ import coreBTypes.*
val cw = classNode
@@ -107,7 +107,7 @@ class BackendUtils(val postProcessor: PostProcessor) {
val mv = cw.visitMethod(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$deserializeLambda$", serializedLamdaObjDesc, null, null)
def emitLambdaDeserializeIndy(targetMethods: Seq[Handle]): Unit = {
mv.visitVarInsn(ALOAD, 0)
- mv.visitInvokeDynamicInsn("lambdaDeserialize", serializedLamdaObjDesc, jliLambdaDeserializeBootstrapHandle, targetMethods: _*)
+ mv.visitInvokeDynamicInsn("lambdaDeserialize", serializedLamdaObjDesc, jliLambdaDeserializeBootstrapHandle, targetMethods*)
}
val targetMethodGroupLimit = 255 - 1 - 3 // JVM limit. See See MAX_MH_ARITY in CallSite.java
diff --git a/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala b/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala
index 109fac06d63a..ec251b4aa3f0 100644
--- a/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala
+++ b/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala
@@ -4,7 +4,7 @@ import java.io.{DataOutputStream, IOException, BufferedOutputStream, FileOutputS
import java.nio.ByteBuffer
import java.nio.channels.{ClosedByInterruptException, FileChannel}
import java.nio.charset.StandardCharsets.UTF_8
-import java.nio.file._
+import java.nio.file.*
import java.nio.file.attribute.FileAttribute
import java.util
import java.util.concurrent.ConcurrentHashMap
@@ -15,7 +15,7 @@ import dotty.tools.dotc.core.Decorators.em
import dotty.tools.io.{AbstractFile, PlainFile}
import dotty.tools.io.PlainFile.toPlainFile
import BTypes.InternalName
-import scala.util.chaining._
+import scala.util.chaining.*
import dotty.tools.io.JarArchive
import scala.language.unsafeNulls
@@ -146,7 +146,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) {
val storeOnly = compressionLevel == Deflater.NO_COMPRESSION
val jarWriter: JarOutputStream = {
- import scala.util.Properties._
+ import scala.util.Properties.*
val manifest = new Manifest
val attrs = manifest.getMainAttributes.nn
attrs.put(MANIFEST_VERSION, "1.0")
@@ -201,7 +201,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) {
val parent = filePath.getParent
if (!builtPaths.containsKey(parent)) {
parent.iterator.forEachRemaining(checkName)
- try Files.createDirectories(parent, noAttributes: _*)
+ try Files.createDirectories(parent, noAttributes*)
catch {
case e: FileAlreadyExistsException =>
// `createDirectories` reports this exception if `parent` is an existing symlink to a directory
diff --git a/compiler/src/dotty/tools/backend/jvm/CodeGen.scala b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala
index 9572777095e0..a477e55e2b68 100644
--- a/compiler/src/dotty/tools/backend/jvm/CodeGen.scala
+++ b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala
@@ -8,27 +8,28 @@ import dotty.tools.dotc.ast.tpd
import dotty.tools.dotc.core.Phases.Phase
import scala.collection.mutable
-import scala.jdk.CollectionConverters._
-import dotty.tools.dotc.transform.SymUtils._
+import scala.jdk.CollectionConverters.*
+import dotty.tools.dotc.transform.SymUtils.*
import dotty.tools.dotc.interfaces
import dotty.tools.dotc.report
import java.util.Optional
import dotty.tools.dotc.sbt.ExtractDependencies
-import dotty.tools.dotc.core._
-import Contexts._
-import Phases._
-import Symbols._
+import dotty.tools.dotc.core.*
+import Contexts.*
+import Phases.*
+import Symbols.*
import StdNames.nme
import java.io.DataOutputStream
import java.nio.channels.ClosedByInterruptException
import dotty.tools.tasty.{ TastyBuffer, TastyHeaderUnpickler }
+import dotty.tools.dotc.core.tasty.TastyUnpickler
import scala.tools.asm
-import scala.tools.asm.tree._
-import tpd._
+import scala.tools.asm.tree.*
+import tpd.*
import dotty.tools.io.AbstractFile
import dotty.tools.dotc.util
import dotty.tools.dotc.util.NoSourcePosition
@@ -36,7 +37,7 @@ import dotty.tools.dotc.util.NoSourcePosition
class CodeGen(val int: DottyBackendInterface, val primitives: DottyPrimitives)( val bTypes: BTypesFromSymbols[int.type]) { self =>
import DottyBackendInterface.symExtensions
- import bTypes._
+ import bTypes.*
import int.given
private lazy val mirrorCodeGen = Impl.JMirrorBuilder()
@@ -94,7 +95,7 @@ class CodeGen(val int: DottyBackendInterface, val primitives: DottyPrimitives)(
for (binary <- unit.pickled.get(claszSymbol.asClass)) {
generatedTasty += GeneratedTasty(store, binary)
val tasty =
- val uuid = new TastyHeaderUnpickler(binary()).readHeader()
+ val uuid = new TastyHeaderUnpickler(TastyUnpickler.scala3CompilerConfig, binary()).readHeader()
val lo = uuid.getMostSignificantBits
val hi = uuid.getLeastSignificantBits
diff --git a/compiler/src/dotty/tools/backend/jvm/CollectSuperCalls.scala b/compiler/src/dotty/tools/backend/jvm/CollectSuperCalls.scala
index 299c1c75d6cf..94a946989d23 100644
--- a/compiler/src/dotty/tools/backend/jvm/CollectSuperCalls.scala
+++ b/compiler/src/dotty/tools/backend/jvm/CollectSuperCalls.scala
@@ -1,9 +1,9 @@
package dotty.tools.backend.jvm
import dotty.tools.dotc.ast.tpd
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Phases._
-import dotty.tools.dotc.core.Symbols._
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Phases.*
+import dotty.tools.dotc.core.Symbols.*
import dotty.tools.dotc.core.Flags.Trait
import dotty.tools.dotc.transform.MegaPhase.MiniPhase
@@ -18,7 +18,7 @@ import dotty.tools.dotc.transform.MegaPhase.MiniPhase
* the redundant mixin class could be required as a parent by the JVM.
*/
class CollectSuperCalls extends MiniPhase {
- import tpd._
+ import tpd.*
override def phaseName: String = CollectSuperCalls.name
diff --git a/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala b/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala
index 538c288b5ef6..5a3980347bcb 100644
--- a/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala
+++ b/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala
@@ -3,7 +3,7 @@ package backend
package jvm
-import dotty.tools.dotc.core.Symbols._
+import dotty.tools.dotc.core.Symbols.*
import dotty.tools.dotc.transform.Erasure
import scala.tools.asm.{Handle, Opcodes}
import dotty.tools.dotc.core.StdNames
@@ -11,7 +11,7 @@ import BTypes.InternalName
abstract class CoreBTypes {
val bTypes: BTypes
- import bTypes._
+ import bTypes.*
def primitiveTypeMap: Map[Symbol, PrimitiveBType]
@@ -55,9 +55,9 @@ abstract class CoreBTypes {
abstract class CoreBTypesFromSymbols[I <: DottyBackendInterface] extends CoreBTypes {
val bTypes: BTypesFromSymbols[I]
- import bTypes._
+ import bTypes.*
import int.given
- import DottyBackendInterface._
+ import DottyBackendInterface.*
import frontendAccess.frontendSynch
import dotty.tools.dotc.core.Contexts.Context
@@ -223,7 +223,7 @@ abstract class CoreBTypesFromSymbols[I <: DottyBackendInterface] extends CoreBTy
)
lazy val typeOfArrayOp: Map[Int, BType] = {
- import dotty.tools.backend.ScalaPrimitivesOps._
+ import dotty.tools.backend.ScalaPrimitivesOps.*
Map(
(List(ZARRAY_LENGTH, ZARRAY_GET, ZARRAY_SET) map (_ -> BOOL)) ++
(List(BARRAY_LENGTH, BARRAY_GET, BARRAY_SET) map (_ -> BYTE)) ++
@@ -233,7 +233,7 @@ abstract class CoreBTypesFromSymbols[I <: DottyBackendInterface] extends CoreBTy
(List(LARRAY_LENGTH, LARRAY_GET, LARRAY_SET) map (_ -> LONG)) ++
(List(FARRAY_LENGTH, FARRAY_GET, FARRAY_SET) map (_ -> FLOAT)) ++
(List(DARRAY_LENGTH, DARRAY_GET, DARRAY_SET) map (_ -> DOUBLE)) ++
- (List(OARRAY_LENGTH, OARRAY_GET, OARRAY_SET) map (_ -> ObjectRef)) : _*
+ (List(OARRAY_LENGTH, OARRAY_GET, OARRAY_SET) map (_ -> ObjectRef)) *
)
}
}
diff --git a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala
index c005a6d38403..30568ef92b2d 100644
--- a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala
+++ b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala
@@ -3,23 +3,23 @@ package dotty.tools.backend.jvm
import scala.language.unsafeNulls
import dotty.tools.dotc.ast.tpd
-import dotty.tools.dotc.core.Flags._
-import dotty.tools.dotc.transform.SymUtils._
+import dotty.tools.dotc.core.Flags.*
+import dotty.tools.dotc.transform.SymUtils.*
import java.io.{File => _}
import scala.reflect.ClassTag
import dotty.tools.io.AbstractFile
-import dotty.tools.dotc.core._
-import Contexts._
-import Types._
-import Symbols._
-import Phases._
+import dotty.tools.dotc.core.*
+import Contexts.*
+import Types.*
+import Symbols.*
+import Phases.*
import Decorators.em
import dotty.tools.dotc.util.ReadOnlyMap
import dotty.tools.dotc.report
-import tpd._
+import tpd.*
import StdNames.nme
import NameKinds.{LazyBitMapName, LazyLocalName}
diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala
index 8c1691e88b80..d5c111259f01 100644
--- a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala
+++ b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala
@@ -3,12 +3,13 @@ package dotty.tools.backend.jvm
import dotty.tools.dotc.CompilationUnit
import dotty.tools.dotc.core.Phases.Phase
import dotty.tools.dotc.report
-import dotty.tools.dotc.core._
+import dotty.tools.dotc.core.*
import dotty.tools.dotc.interfaces.CompilerCallback
-import Contexts._
-import Symbols._
-import dotty.tools.io._
+import Contexts.*
+import Symbols.*
+import dotty.tools.io.*
import scala.collection.mutable
+import scala.compiletime.uninitialized
class GenBCode extends Phase { self =>
@@ -25,7 +26,7 @@ class GenBCode extends Phase { self =>
private val entryPoints = new mutable.HashSet[String]()
def registerEntryPoint(s: String): Unit = entryPoints += s
- private var _backendInterface: DottyBackendInterface = _
+ private var _backendInterface: DottyBackendInterface = uninitialized
def backendInterface(using ctx: Context): DottyBackendInterface = {
if _backendInterface eq null then
// Enforce usage of FreshContext so we would be able to modify compilation unit between runs
@@ -36,7 +37,7 @@ class GenBCode extends Phase { self =>
_backendInterface
}
- private var _codeGen: CodeGen = _
+ private var _codeGen: CodeGen = uninitialized
def codeGen(using Context): CodeGen = {
if _codeGen eq null then
val int = backendInterface
@@ -45,28 +46,28 @@ class GenBCode extends Phase { self =>
_codeGen
}
- private var _bTypes: BTypesFromSymbols[DottyBackendInterface] = _
+ private var _bTypes: BTypesFromSymbols[DottyBackendInterface] = uninitialized
def bTypes(using Context): BTypesFromSymbols[DottyBackendInterface] = {
if _bTypes eq null then
_bTypes = BTypesFromSymbols(backendInterface, frontendAccess)
_bTypes
}
- private var _frontendAccess: PostProcessorFrontendAccess | Null = _
+ private var _frontendAccess: PostProcessorFrontendAccess | Null = uninitialized
def frontendAccess(using Context): PostProcessorFrontendAccess = {
if _frontendAccess eq null then
_frontendAccess = PostProcessorFrontendAccess.Impl(backendInterface, entryPoints)
_frontendAccess.nn
}
- private var _postProcessor: PostProcessor | Null = _
+ private var _postProcessor: PostProcessor | Null = uninitialized
def postProcessor(using Context): PostProcessor = {
if _postProcessor eq null then
_postProcessor = new PostProcessor(frontendAccess, bTypes)
_postProcessor.nn
}
- private var _generatedClassHandler: GeneratedClassHandler | Null = _
+ private var _generatedClassHandler: GeneratedClassHandler | Null = uninitialized
def generatedClassHandler(using Context): GeneratedClassHandler = {
if _generatedClassHandler eq null then
_generatedClassHandler = GeneratedClassHandler(postProcessor)
diff --git a/compiler/src/dotty/tools/backend/jvm/GeneratedClassHandler.scala b/compiler/src/dotty/tools/backend/jvm/GeneratedClassHandler.scala
index fc02d9597efe..bf2ae9a131aa 100644
--- a/compiler/src/dotty/tools/backend/jvm/GeneratedClassHandler.scala
+++ b/compiler/src/dotty/tools/backend/jvm/GeneratedClassHandler.scala
@@ -2,7 +2,7 @@ package dotty.tools.backend.jvm
import java.nio.channels.ClosedByInterruptException
import java.util.concurrent.ThreadPoolExecutor.CallerRunsPolicy
-import java.util.concurrent._
+import java.util.concurrent.*
import scala.collection.mutable.ListBuffer
import scala.concurrent.duration.Duration
@@ -15,6 +15,7 @@ import dotty.tools.dotc.core.Phases
import dotty.tools.dotc.core.Decorators.em
import scala.language.unsafeNulls
+import scala.compiletime.uninitialized
/**
* Interface to handle post-processing and classfile writing (see [[PostProcessor]]) of generated
@@ -185,7 +186,7 @@ final private class CompilationUnitInPostProcess(private var classes: List[Gener
}
/** the main async task submitted onto the scheduler */
- var task: Future[Unit] = _
+ var task: Future[Unit] = uninitialized
val bufferedReporting = new PostProcessorFrontendAccess.BufferingBackendReporting()
}
\ No newline at end of file
diff --git a/compiler/src/dotty/tools/backend/jvm/GenericSignatureVisitor.scala b/compiler/src/dotty/tools/backend/jvm/GenericSignatureVisitor.scala
index 71bf307266c5..00d7dc598509 100644
--- a/compiler/src/dotty/tools/backend/jvm/GenericSignatureVisitor.scala
+++ b/compiler/src/dotty/tools/backend/jvm/GenericSignatureVisitor.scala
@@ -3,12 +3,12 @@ package dotty.tools.backend.jvm
import scala.language.unsafeNulls
import scala.tools.asm.{ClassReader, Type, Handle }
-import scala.tools.asm.tree._
+import scala.tools.asm.tree.*
import scala.collection.mutable
import scala.util.control.{NoStackTrace, NonFatal}
-import scala.annotation._
-import scala.jdk.CollectionConverters._
+import scala.annotation.*
+import scala.jdk.CollectionConverters.*
// Backported from scala/scala, commit sha: 724be0e9425b9ad07c244d25efdad695d75abbcf
// https://github.com/scala/scala/blob/724be0e9425b9ad07c244d25efdad695d75abbcf/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala#L928
diff --git a/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala b/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala
index e6911c766655..4e3438f3d78a 100644
--- a/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala
+++ b/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala
@@ -14,7 +14,7 @@ import dotty.tools.dotc.core.Phases
* frontend. All methods are synchronized.
*/
sealed abstract class PostProcessorFrontendAccess {
- import PostProcessorFrontendAccess._
+ import PostProcessorFrontendAccess.*
def compilerSettings: CompilerSettings
diff --git a/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala b/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala
index bc453aec17af..262b5df43362 100644
--- a/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala
+++ b/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala
@@ -2,10 +2,10 @@ package dotty.tools
package backend.jvm
import dotc.ast.Trees.Select
-import dotc.ast.tpd._
-import dotc.core._
-import Contexts._
-import Names.TermName, StdNames._
+import dotc.ast.tpd.*
+import dotc.core.*
+import Contexts.*
+import Names.TermName, StdNames.*
import Types.{JavaArrayType, UnspecifiedErrorType, Type}
import Symbols.{Symbol, NoSymbol}
import Decorators.em
@@ -32,7 +32,7 @@ import scala.annotation.threadUnsafe
* Inspired from the `scalac` compiler.
*/
class DottyPrimitives(ictx: Context) {
- import dotty.tools.backend.ScalaPrimitivesOps._
+ import dotty.tools.backend.ScalaPrimitivesOps.*
@threadUnsafe private lazy val primitives: ReadOnlyMap[Symbol, Int] = init
diff --git a/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala b/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala
index 1579b4577933..2c5a6639dc8b 100644
--- a/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala
+++ b/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala
@@ -1,8 +1,8 @@
package dotty.tools.backend.sjs
-import dotty.tools.dotc.core._
-import Contexts._
-import Phases._
+import dotty.tools.dotc.core.*
+import Contexts.*
+import Phases.*
/** Generates Scala.js IR files for the compilation unit. */
class GenSJSIR extends Phase {
diff --git a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala
index fa8f99fc0e03..d0694617f61e 100644
--- a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala
+++ b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala
@@ -8,20 +8,20 @@ import scala.collection.mutable
import dotty.tools.FatalError
import dotty.tools.dotc.CompilationUnit
import dotty.tools.dotc.ast.tpd
-import dotty.tools.dotc.core._
-import Contexts._
-import Decorators._
-import Flags._
-import Names._
+import dotty.tools.dotc.core.*
+import Contexts.*
+import Decorators.*
+import Flags.*
+import Names.*
import NameKinds.DefaultGetterName
-import Types._
-import Symbols._
-import Phases._
-import StdNames._
+import Types.*
+import Symbols.*
+import Phases.*
+import StdNames.*
import TypeErasure.ErasedValueType
import dotty.tools.dotc.transform.{Erasure, ValueClasses}
-import dotty.tools.dotc.transform.SymUtils._
+import dotty.tools.dotc.transform.SymUtils.*
import dotty.tools.dotc.util.SourcePosition
import dotty.tools.dotc.report
@@ -32,9 +32,9 @@ import org.scalajs.ir.OriginalName
import org.scalajs.ir.OriginalName.NoOriginalName
import org.scalajs.ir.Trees.OptimizerHints
-import dotty.tools.dotc.transform.sjs.JSSymUtils._
+import dotty.tools.dotc.transform.sjs.JSSymUtils.*
-import JSEncoding._
+import JSEncoding.*
import ScopedVar.withScopedVars
/** Main codegen for Scala.js IR.
@@ -54,15 +54,15 @@ import ScopedVar.withScopedVars
* - `genStatOrExpr()` and everything else generate the bodies of methods.
*/
class JSCodeGen()(using genCtx: Context) {
- import JSCodeGen._
- import tpd._
+ import JSCodeGen.*
+ import tpd.*
val sjsPlatform = dotty.tools.dotc.config.SJSPlatform.sjsPlatform
val jsdefn = JSDefinitions.jsdefn
private val primitives = new JSPrimitives(genCtx)
val positionConversions = new JSPositions()(using genCtx)
- import positionConversions._
+ import positionConversions.*
private val jsExportsGen = new JSExportsGen(this)
@@ -1827,7 +1827,7 @@ class JSCodeGen()(using genCtx: Context) {
}
case Literal(value) =>
- import Constants._
+ import Constants.*
value.tag match {
case UnitTag =>
js.Skip()
@@ -2525,7 +2525,7 @@ class JSCodeGen()(using genCtx: Context) {
/** Gen JS code for a primitive method call. */
private def genPrimitiveOp(tree: Apply, isStat: Boolean): js.Tree = {
- import dotty.tools.backend.ScalaPrimitivesOps._
+ import dotty.tools.backend.ScalaPrimitivesOps.*
implicit val pos = tree.span
@@ -2565,7 +2565,7 @@ class JSCodeGen()(using genCtx: Context) {
/** Gen JS code for a simple unary operation. */
private def genSimpleUnaryOp(tree: Apply, arg: Tree, code: Int): js.Tree = {
- import dotty.tools.backend.ScalaPrimitivesOps._
+ import dotty.tools.backend.ScalaPrimitivesOps.*
implicit val pos = tree.span
@@ -2606,7 +2606,7 @@ class JSCodeGen()(using genCtx: Context) {
/** Gen JS code for a simple binary operation. */
private def genSimpleBinaryOp(tree: Apply, lhs: Tree, rhs: Tree, code: Int): js.Tree = {
- import dotty.tools.backend.ScalaPrimitivesOps._
+ import dotty.tools.backend.ScalaPrimitivesOps.*
implicit val pos: SourcePosition = tree.sourcePos
@@ -2646,7 +2646,7 @@ class JSCodeGen()(using genCtx: Context) {
} else if (code == ZAND) {
js.If(lsrc, rsrc, js.BooleanLiteral(false))(jstpe.BooleanType)
} else {
- import js.BinaryOp._
+ import js.BinaryOp.*
(opType: @unchecked) match {
case jstpe.IntType =>
@@ -2768,7 +2768,7 @@ class JSCodeGen()(using genCtx: Context) {
*/
private def genConversion(from: jstpe.Type, to: jstpe.Type, value: js.Tree)(
implicit pos: Position): js.Tree = {
- import js.UnaryOp._
+ import js.UnaryOp.*
if (from == to || from == jstpe.NothingType) {
value
@@ -2823,7 +2823,7 @@ class JSCodeGen()(using genCtx: Context) {
private def genUniversalEqualityOp(ltpe: Type, rtpe: Type, lhs: js.Tree, rhs: js.Tree, code: Int)(
implicit pos: SourcePosition): js.Tree = {
- import dotty.tools.backend.ScalaPrimitivesOps._
+ import dotty.tools.backend.ScalaPrimitivesOps.*
val bypassEqEq = {
// Do not call equals if we have a literal null at either side.
@@ -2931,7 +2931,7 @@ class JSCodeGen()(using genCtx: Context) {
/** Gen JS code for an array operation (get, set or length) */
private def genArrayOp(tree: Tree, code: Int): js.Tree = {
- import dotty.tools.backend.ScalaPrimitivesOps._
+ import dotty.tools.backend.ScalaPrimitivesOps.*
implicit val pos = tree.span
@@ -3766,7 +3766,7 @@ class JSCodeGen()(using genCtx: Context) {
private def genJSPrimitive(tree: Apply, args: List[Tree], code: Int,
isStat: Boolean): js.Tree = {
- import JSPrimitives._
+ import JSPrimitives.*
implicit val pos = tree.span
@@ -4696,7 +4696,7 @@ class JSCodeGen()(using genCtx: Context) {
}
private def computeJSNativeLoadSpecOfInPhase(sym: Symbol)(using Context): js.JSNativeLoadSpec = {
- import js.JSNativeLoadSpec._
+ import js.JSNativeLoadSpec.*
val symOwner = sym.owner
diff --git a/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala b/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala
index 5336d60129ac..ab7f9a89f9c5 100644
--- a/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala
+++ b/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala
@@ -4,14 +4,15 @@ import scala.language.unsafeNulls
import scala.annotation.threadUnsafe
-import dotty.tools.dotc.core._
-import Names._
-import Types._
-import Contexts._
-import Symbols._
-import StdNames._
+import dotty.tools.dotc.core.*
+import Names.*
+import Types.*
+import Contexts.*
+import Symbols.*
+import StdNames.*
import dotty.tools.dotc.config.SJSPlatform
+import scala.compiletime.uninitialized
object JSDefinitions {
/** The Scala.js-specific definitions for the current context. */
@@ -249,7 +250,7 @@ final class JSDefinitions()(using Context) {
@threadUnsafe lazy val Selectable_reflectiveSelectableFromLangReflectiveCallsR = SelectableModule.requiredMethodRef("reflectiveSelectableFromLangReflectiveCalls")
def Selectable_reflectiveSelectableFromLangReflectiveCalls(using Context) = Selectable_reflectiveSelectableFromLangReflectiveCallsR.symbol
- private var allRefClassesCache: Set[Symbol] = _
+ private var allRefClassesCache: Set[Symbol] = uninitialized
def allRefClasses(using Context): Set[Symbol] = {
if (allRefClassesCache == null) {
val baseNames = List("Object", "Boolean", "Character", "Byte", "Short",
diff --git a/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala b/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala
index 73a150c60290..f2b90d5b1161 100644
--- a/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala
+++ b/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala
@@ -4,16 +4,16 @@ import scala.language.unsafeNulls
import scala.collection.mutable
-import dotty.tools.dotc.core._
-import Contexts._
-import Flags._
-import Types._
-import Symbols._
-import NameOps._
-import Names._
-import StdNames._
+import dotty.tools.dotc.core.*
+import Contexts.*
+import Flags.*
+import Types.*
+import Symbols.*
+import NameOps.*
+import Names.*
+import StdNames.*
-import dotty.tools.dotc.transform.sjs.JSSymUtils._
+import dotty.tools.dotc.transform.sjs.JSSymUtils.*
import org.scalajs.ir
import org.scalajs.ir.{Trees => js, Types => jstpe}
@@ -61,7 +61,7 @@ object JSEncoding {
// Fresh local name generator ----------------------------------------------
class LocalNameGenerator {
- import LocalNameGenerator._
+ import LocalNameGenerator.*
private val usedLocalNames = mutable.Set.empty[LocalName]
private val localSymbolNames = mutable.Map.empty[Symbol, LocalName]
diff --git a/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala b/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala
index 78412999bb34..8c72f03e7cc4 100644
--- a/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala
+++ b/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala
@@ -5,18 +5,18 @@ import scala.language.unsafeNulls
import scala.annotation.tailrec
import scala.collection.mutable
-import dotty.tools.dotc.core._
+import dotty.tools.dotc.core.*
-import Contexts._
-import Decorators._
-import Denotations._
-import Flags._
-import Names._
+import Contexts.*
+import Decorators.*
+import Denotations.*
+import Flags.*
+import Names.*
import NameKinds.DefaultGetterName
-import NameOps._
-import Phases._
-import Symbols._
-import Types._
+import NameOps.*
+import Phases.*
+import Symbols.*
+import Types.*
import TypeErasure.ErasedValueType
import dotty.tools.dotc.util.{SourcePosition, SrcPos}
@@ -28,14 +28,14 @@ import org.scalajs.ir.OriginalName.NoOriginalName
import org.scalajs.ir.Position.NoPosition
import org.scalajs.ir.Trees.OptimizerHints
-import dotty.tools.dotc.transform.sjs.JSExportUtils._
-import dotty.tools.dotc.transform.sjs.JSSymUtils._
+import dotty.tools.dotc.transform.sjs.JSExportUtils.*
+import dotty.tools.dotc.transform.sjs.JSSymUtils.*
-import JSEncoding._
+import JSEncoding.*
final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) {
- import jsCodeGen._
- import positionConversions._
+ import jsCodeGen.*
+ import positionConversions.*
/** Info for a non-member export. */
sealed trait ExportInfo {
@@ -154,7 +154,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) {
(info, tups) <- exports.groupBy(_._1)
kind <- checkSameKind(tups)
} yield {
- import ExportKind._
+ import ExportKind.*
implicit val pos = info.pos
@@ -201,7 +201,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) {
implicit val pos = info.pos
- import ExportKind._
+ import ExportKind.*
kind match {
case Method =>
diff --git a/compiler/src/dotty/tools/backend/sjs/JSPositions.scala b/compiler/src/dotty/tools/backend/sjs/JSPositions.scala
index 2fd007165952..3b25187b0acd 100644
--- a/compiler/src/dotty/tools/backend/sjs/JSPositions.scala
+++ b/compiler/src/dotty/tools/backend/sjs/JSPositions.scala
@@ -4,8 +4,8 @@ import scala.language.unsafeNulls
import java.net.{URI, URISyntaxException}
-import dotty.tools.dotc.core._
-import Contexts._
+import dotty.tools.dotc.core.*
+import Contexts.*
import Decorators.em
import dotty.tools.dotc.report
@@ -17,7 +17,7 @@ import org.scalajs.ir
/** Conversion utilities from dotty Positions to IR Positions. */
class JSPositions()(using Context) {
- import JSPositions._
+ import JSPositions.*
private val sourceURIMaps: List[URIMap] = {
ctx.settings.scalajsMapSourceURI.value.flatMap { option =>
@@ -64,7 +64,7 @@ class JSPositions()(using Context) {
sourceAndSpan2irPos(sourcePos.source, sourcePos.span)
private object span2irPosCache {
- import dotty.tools.dotc.util._
+ import dotty.tools.dotc.util.*
private var lastDotcSource: SourceFile = null
private var lastIRSource: ir.Position.SourceFile = null
diff --git a/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala b/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala
index 029273aed54b..a3a37795826a 100644
--- a/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala
+++ b/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala
@@ -1,13 +1,13 @@
package dotty.tools.backend.sjs
-import dotty.tools.dotc.core._
+import dotty.tools.dotc.core.*
import Names.TermName
-import Types._
-import Contexts._
-import Symbols._
+import Types.*
+import Contexts.*
+import Symbols.*
import Decorators.em
-import dotty.tools.dotc.ast.tpd._
+import dotty.tools.dotc.ast.tpd.*
import dotty.tools.backend.jvm.DottyPrimitives
import dotty.tools.dotc.report
import dotty.tools.dotc.util.ReadOnlyMap
@@ -64,7 +64,7 @@ object JSPrimitives {
}
class JSPrimitives(ictx: Context) extends DottyPrimitives(ictx) {
- import JSPrimitives._
+ import JSPrimitives.*
private lazy val jsPrimitives: ReadOnlyMap[Symbol, Int] = initJSPrimitives(using ictx)
diff --git a/compiler/src/dotty/tools/dotc/Bench.scala b/compiler/src/dotty/tools/dotc/Bench.scala
index 5f5e9fc799b5..cbc490919cfe 100644
--- a/compiler/src/dotty/tools/dotc/Bench.scala
+++ b/compiler/src/dotty/tools/dotc/Bench.scala
@@ -1,11 +1,12 @@
package dotty.tools
package dotc
-import core.Contexts._
+import core.Contexts.*
import reporting.Reporter
import io.AbstractFile
import scala.annotation.internal.sharable
+import scala.compiletime.uninitialized
/** A main class for running compiler benchmarks. Can instantiate a given
* number of compilers and run each (sequentially) a given number of times
@@ -17,7 +18,7 @@ object Bench extends Driver:
@sharable private var numCompilers = 1
@sharable private var waitAfter = -1
@sharable private var curCompiler = 0
- @sharable private var times: Array[Int] = _
+ @sharable private var times: Array[Int] = uninitialized
override def doCompile(compiler: Compiler, files: List[AbstractFile])(using Context): Reporter =
var reporter: Reporter = emptyReporter
diff --git a/compiler/src/dotty/tools/dotc/CompilationUnit.scala b/compiler/src/dotty/tools/dotc/CompilationUnit.scala
index a906d52ccd4e..686414a4fd9b 100644
--- a/compiler/src/dotty/tools/dotc/CompilationUnit.scala
+++ b/compiler/src/dotty/tools/dotc/CompilationUnit.scala
@@ -1,10 +1,10 @@
package dotty.tools
package dotc
-import core._
-import Contexts._
+import core.*
+import Contexts.*
import SymDenotations.ClassDenotation
-import Symbols._
+import Symbols.*
import Comments.Comment
import util.{FreshNameCreator, SourceFile, NoSource}
import util.Spans.Span
@@ -12,8 +12,8 @@ import ast.{tpd, untpd}
import tpd.{Tree, TreeTraverser}
import ast.Trees.{Import, Ident}
import typer.Nullables
-import transform.SymUtils._
-import core.Decorators._
+import transform.SymUtils.*
+import core.Decorators.*
import config.{SourceVersion, Feature}
import StdNames.nme
import scala.annotation.internal.sharable
diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala
index 3972ef654b72..aaa14a052936 100644
--- a/compiler/src/dotty/tools/dotc/Compiler.scala
+++ b/compiler/src/dotty/tools/dotc/Compiler.scala
@@ -1,12 +1,13 @@
package dotty.tools
package dotc
-import core._
-import Contexts._
+import core.*
+import Contexts.*
import typer.{TyperPhase, RefChecks}
import parsing.Parser
import Phases.Phase
-import transform._
+import transform.*
+import dotty.tools.backend
import backend.jvm.{CollectSuperCalls, GenBCode}
import localopt.StringInterpolatorOpt
diff --git a/compiler/src/dotty/tools/dotc/Driver.scala b/compiler/src/dotty/tools/dotc/Driver.scala
index e548cae55ddd..4207baa57470 100644
--- a/compiler/src/dotty/tools/dotc/Driver.scala
+++ b/compiler/src/dotty/tools/dotc/Driver.scala
@@ -3,12 +3,12 @@ package dotty.tools.dotc
import dotty.tools.FatalError
import config.CompilerCommand
import core.Comments.{ContextDoc, ContextDocstrings}
-import core.Contexts._
+import core.Contexts.*
import core.{MacroClassLoader, TypeError}
import dotty.tools.dotc.ast.Positioned
import dotty.tools.io.AbstractFile
-import reporting._
-import core.Decorators._
+import reporting.*
+import core.Decorators.*
import config.Feature
import scala.util.control.NonFatal
diff --git a/compiler/src/dotty/tools/dotc/Resident.scala b/compiler/src/dotty/tools/dotc/Resident.scala
index 0b9bca0dc75b..481d321773c6 100644
--- a/compiler/src/dotty/tools/dotc/Resident.scala
+++ b/compiler/src/dotty/tools/dotc/Resident.scala
@@ -1,7 +1,7 @@
package dotty.tools
package dotc
-import core.Contexts._
+import core.Contexts.*
import reporting.Reporter
import java.io.EOFException
import scala.annotation.tailrec
diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala
index d85e03e92d60..bec1c89d7216 100644
--- a/compiler/src/dotty/tools/dotc/Run.scala
+++ b/compiler/src/dotty/tools/dotc/Run.scala
@@ -1,22 +1,22 @@
package dotty.tools
package dotc
-import core._
-import Contexts._
-import Periods._
-import Symbols._
-import Scopes._
+import core.*
+import Contexts.*
+import Periods.*
+import Symbols.*
+import Scopes.*
import Names.Name
import Denotations.Denotation
import typer.Typer
import typer.ImportInfo.withRootImports
-import Decorators._
+import Decorators.*
import io.AbstractFile
import Phases.{unfusedPhases, Phase}
import sbt.interfaces.ProgressCallback
-import util._
+import util.*
import reporting.{Suppression, Action, Profile, ActiveProfile, NoProfile}
import reporting.Diagnostic
import reporting.Diagnostic.Warning
@@ -65,7 +65,7 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint
private var myUnits: List[CompilationUnit] = Nil
private var myUnitsCached: List[CompilationUnit] = Nil
- private var myFiles: Set[AbstractFile] = _
+ private var myFiles: Set[AbstractFile] = uninitialized
// `@nowarn` annotations by source file, populated during typer
private val mySuppressions: mutable.LinkedHashMap[SourceFile, mutable.ListBuffer[Suppression]] = mutable.LinkedHashMap.empty
@@ -511,7 +511,7 @@ object Run {
var currentCompletedSubtraversalCount: Int = 0 // completed subphases in the current phase
var seenPhaseCount: Int = 0 // how many phases we've seen so far
- private var currPhase: Phase = uninitialized // initialized by enterPhase
+ private var currPhase: Phase = uninitialized // initialized by enterPhase
private var subPhases: SubPhases = uninitialized // initialized by enterPhase
private var currPhaseName: String = uninitialized // initialized by enterPhase
private var nextPhaseName: String = uninitialized // initialized by enterPhase
diff --git a/compiler/src/dotty/tools/dotc/ScalacCommand.scala b/compiler/src/dotty/tools/dotc/ScalacCommand.scala
index 2e0d9a08f25d..5f7f80a262d8 100644
--- a/compiler/src/dotty/tools/dotc/ScalacCommand.scala
+++ b/compiler/src/dotty/tools/dotc/ScalacCommand.scala
@@ -1,6 +1,6 @@
package dotty.tools.dotc
-import config.Properties._
+import config.Properties.*
import config.CompilerCommand
object ScalacCommand extends CompilerCommand:
diff --git a/compiler/src/dotty/tools/dotc/ast/CheckTrees.scala.disabled b/compiler/src/dotty/tools/dotc/ast/CheckTrees.scala.disabled
index 6bf7530faf24..08d409772331 100644
--- a/compiler/src/dotty/tools/dotc/ast/CheckTrees.scala.disabled
+++ b/compiler/src/dotty/tools/dotc/ast/CheckTrees.scala.disabled
@@ -2,14 +2,14 @@ package dotty.tools
package dotc
package ast
-import core._
-import util.Spans._, Types._, Contexts._, Constants._, Names._, Flags._
-import SymDenotations._, Symbols._, StdNames._, Annotations._, Trees._
+import core.*
+import util.Spans.*, Types.*, Contexts.*, Constants.*, Names.*, Flags.*
+import SymDenotations.*, Symbols.*, StdNames.*, Annotations.*, Trees.*
// TODO: revise, integrate in a checking phase.
object CheckTrees {
- import tpd._
+ import tpd.*
def check(p: Boolean, msg: => String = "")(using Context): Unit = assert(p, msg)
diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala
index 471d7f6af0dc..0607b3729654 100644
--- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala
+++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala
@@ -2,18 +2,18 @@ package dotty.tools
package dotc
package ast
-import core._
-import util.Spans._, Types._, Contexts._, Constants._, Names._, NameOps._, Flags._
-import Symbols._, StdNames._, Trees._, ContextOps._
-import Decorators._, transform.SymUtils._
+import core.*
+import util.Spans.*, Types.*, Contexts.*, Constants.*, Names.*, NameOps.*, Flags.*
+import Symbols.*, StdNames.*, Trees.*, ContextOps.*
+import Decorators.*, transform.SymUtils.*
import Annotations.Annotation
import NameKinds.{UniqueName, ContextBoundParamName, ContextFunctionParamName, DefaultGetterName, WildcardParamName}
import typer.{Namer, Checking}
import util.{Property, SourceFile, SourcePosition, Chars}
import config.Feature.{sourceVersion, migrateTo3, enabled}
-import config.SourceVersion._
+import config.SourceVersion.*
import collection.mutable.ListBuffer
-import reporting._
+import reporting.*
import annotation.constructorOnly
import printing.Formatting.hl
import config.Printers
@@ -22,8 +22,8 @@ import scala.annotation.internal.sharable
import scala.annotation.threadUnsafe
object desugar {
- import untpd._
- import DesugarEnums._
+ import untpd.*
+ import DesugarEnums.*
/** An attachment for companion modules of classes that have a `derives` clause.
* The position value indicates the start position of the template of the
diff --git a/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala b/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala
index a1c3c0ed0775..4b00f72bf21e 100644
--- a/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala
+++ b/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala
@@ -2,19 +2,19 @@ package dotty.tools
package dotc
package ast
-import core._
-import util.Spans._, Types._, Contexts._, Constants._, Names._, Flags._
-import Symbols._, StdNames._, Trees._
-import Decorators._
+import core.*
+import util.Spans.*, Types.*, Contexts.*, Constants.*, Names.*, Flags.*
+import Symbols.*, StdNames.*, Trees.*
+import Decorators.*
import util.{Property, SourceFile}
-import typer.ErrorReporting._
+import typer.ErrorReporting.*
import transform.SyntheticMembers.ExtendsSingletonMirror
import scala.annotation.internal.sharable
/** Helper methods to desugar enums */
object DesugarEnums {
- import untpd._
+ import untpd.*
enum CaseKind:
case Simple, Object, Class
diff --git a/compiler/src/dotty/tools/dotc/ast/MainProxies.scala b/compiler/src/dotty/tools/dotc/ast/MainProxies.scala
index c0cf2c0d1b81..8ee75cbf364b 100644
--- a/compiler/src/dotty/tools/dotc/ast/MainProxies.scala
+++ b/compiler/src/dotty/tools/dotc/ast/MainProxies.scala
@@ -1,10 +1,10 @@
package dotty.tools.dotc
package ast
-import core._
-import Symbols._, Types._, Contexts._, Decorators._, util.Spans._, Flags._, Constants._
+import core.*
+import Symbols.*, Types.*, Contexts.*, Decorators.*, util.Spans.*, Flags.*, Constants.*
import StdNames.{nme, tpnme}
-import ast.Trees._
+import ast.Trees.*
import Names.Name
import Comments.Comment
import NameKinds.DefaultGetterName
@@ -24,7 +24,7 @@ object MainProxies {
*
* would be translated to something like
*
- * import CommandLineParser._
+ * import CommandLineParser.*
* class f {
* @static def main(args: Array[String]): Unit =
* try
@@ -36,7 +36,7 @@ object MainProxies {
* }
*/
private def mainProxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = {
- import tpd._
+ import tpd.*
def mainMethods(stats: List[Tree]): List[Symbol] = stats.flatMap {
case stat: DefDef if stat.symbol.hasAnnotation(defn.MainAnnot) =>
stat.symbol :: Nil
@@ -48,7 +48,7 @@ object MainProxies {
mainMethods(stats).flatMap(mainProxy)
}
- import untpd._
+ import untpd.*
private def mainProxy(mainFun: Symbol)(using Context): List[TypeDef] = {
val mainAnnotSpan = mainFun.getAnnotation(defn.MainAnnot).get.tree.span
def pos = mainFun.sourcePos
@@ -172,7 +172,7 @@ object MainProxies {
* }
*/
private def mainAnnotationProxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = {
- import tpd._
+ import tpd.*
/**
* Computes the symbols of the default values of the function. Since they cannot be inferred anymore at this
@@ -405,7 +405,7 @@ object MainProxies {
/** A class responsible for extracting the docstrings of a method. */
private class Documentation(docComment: Option[Comment]):
- import util.CommentParsing._
+ import util.CommentParsing.*
/** The main part of the documentation. */
lazy val mainDoc: String = _mainDoc
diff --git a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala
index ace396d1e583..2960af8fcdec 100644
--- a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala
+++ b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala
@@ -1,9 +1,9 @@
package dotty.tools.dotc
package ast
-import core.Contexts._
-import core.Decorators._
-import util.Spans._
+import core.Contexts.*
+import core.Decorators.*
+import util.Spans.*
import Trees.{MemberDef, DefTree, WithLazyFields}
import dotty.tools.dotc.core.Types.AnnotatedType
import dotty.tools.dotc.core.Types.ImportType
diff --git a/compiler/src/dotty/tools/dotc/ast/Positioned.scala b/compiler/src/dotty/tools/dotc/ast/Positioned.scala
index dd783be7a9e1..d8017783f47f 100644
--- a/compiler/src/dotty/tools/dotc/ast/Positioned.scala
+++ b/compiler/src/dotty/tools/dotc/ast/Positioned.scala
@@ -2,23 +2,25 @@ package dotty.tools
package dotc
package ast
-import util.Spans._
+import util.Spans.*
import util.{SourceFile, SourcePosition, SrcPos}
-import core.Contexts._
-import core.Decorators._
-import core.NameOps._
+import core.Contexts.*
+import core.Decorators.*
+import core.NameOps.*
import core.Flags.{JavaDefined, ExtensionMethod}
import core.StdNames.nme
import ast.Trees.mods
import annotation.constructorOnly
import annotation.internal.sharable
+import scala.compiletime.uninitialized
+
/** A base class for things that have positions (currently: modifiers and trees)
*/
abstract class Positioned(implicit @constructorOnly src: SourceFile) extends SrcPos, Product, Cloneable {
import Positioned.{ids, nextId, debugId}
- private var mySpan: Span = _
+ private var mySpan: Span = uninitialized
private var mySource: SourceFile = src
@@ -163,7 +165,7 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src
* - If item is a non-empty tree, it has a position
*/
def checkPos(nonOverlapping: Boolean)(using Context): Unit = try {
- import untpd._
+ import untpd.*
val last = LastPosRef()
def check(p: Any): Unit = p match {
case p: Positioned =>
diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala
index 7616ef220d7f..9b841e8458ec 100644
--- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala
+++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala
@@ -2,13 +2,13 @@ package dotty.tools
package dotc
package ast
-import core._
-import Flags._, Trees._, Types._, Contexts._
-import Names._, StdNames._, NameOps._, Symbols._
+import core.*
+import Flags.*, Trees.*, Types.*, Contexts.*
+import Names.*, StdNames.*, NameOps.*, Symbols.*
import typer.ConstFold
import reporting.trace
-import dotty.tools.dotc.transform.SymUtils._
-import Decorators._
+import dotty.tools.dotc.transform.SymUtils.*
+import Decorators.*
import Constants.Constant
import scala.collection.mutable
@@ -390,7 +390,7 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] =>
}
trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] =>
- import untpd._
+ import untpd.*
/** The underlying tree when stripping any TypedSplice or Parens nodes */
override def unsplice(tree: Tree): Tree = tree match {
@@ -495,8 +495,8 @@ trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped]
}
trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] =>
- import TreeInfo._
- import tpd._
+ import TreeInfo.*
+ import tpd.*
/** The purity level of this statement.
* @return Pure if statement has no side effects
diff --git a/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala b/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala
index ae674c25dc3d..5603a422a77b 100644
--- a/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala
+++ b/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala
@@ -1,11 +1,11 @@
package dotty.tools.dotc
package ast
-import Trees._
-import core.Contexts._
+import Trees.*
+import core.Contexts.*
import core.ContextOps.enter
-import core.Flags._
-import core.Symbols._
+import core.Flags.*
+import core.Symbols.*
import core.TypeError
/** A TreeMap that maintains the necessary infrastructure to support
@@ -14,7 +14,7 @@ import core.TypeError
* This incudes implicits defined in scope as well as imported implicits.
*/
class TreeMapWithImplicits extends tpd.TreeMapWithPreciseStatContexts {
- import tpd._
+ import tpd.*
def transformSelf(vd: ValDef)(using Context): ValDef =
cpy.ValDef(vd)(tpt = transform(vd.tpt))
diff --git a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala
index 955892b2ae22..15c61bc2b8d4 100644
--- a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala
+++ b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala
@@ -2,11 +2,11 @@ package dotty.tools
package dotc
package ast
-import core._
-import Types._, Contexts._, Flags._
-import Symbols._, Annotations._, Trees._, Symbols._, Constants.Constant
-import Decorators._
-import dotty.tools.dotc.transform.SymUtils._
+import core.*
+import Types.*, Contexts.*, Flags.*
+import Symbols.*, Annotations.*, Trees.*, Symbols.*, Constants.Constant
+import Decorators.*
+import dotty.tools.dotc.transform.SymUtils.*
/** A map that applies three functions and a substitution together to a tree and
* makes sure they are coordinated so that the result is well-typed. The functions are
@@ -39,7 +39,7 @@ class TreeTypeMap(
val substFrom: List[Symbol] = Nil,
val substTo: List[Symbol] = Nil,
cpy: tpd.TreeCopier = tpd.cpy)(using Context) extends tpd.TreeMap(cpy) {
- import tpd._
+ import tpd.*
def copy(
typeMap: Type => Type,
@@ -105,7 +105,8 @@ class TreeTypeMap(
tree1.withType(mapType(tree1.tpe)) match {
case id: Ident =>
if needsSelect(id.tpe) then
- ref(id.tpe.asInstanceOf[TermRef]).withSpan(id.span)
+ try ref(id.tpe.asInstanceOf[TermRef]).withSpan(id.span)
+ catch case ex: TypeError => super.transform(id)
else
super.transform(id)
case sel: Select =>
diff --git a/compiler/src/dotty/tools/dotc/ast/Trees.scala b/compiler/src/dotty/tools/dotc/ast/Trees.scala
index 1ad8bf520830..4ec41b95a90b 100644
--- a/compiler/src/dotty/tools/dotc/ast/Trees.scala
+++ b/compiler/src/dotty/tools/dotc/ast/Trees.scala
@@ -2,10 +2,10 @@ package dotty.tools
package dotc
package ast
-import core._
-import Types._, Names._, NameOps._, Flags._, util.Spans._, Contexts._, Constants._
+import core.*
+import Types.*, Names.*, NameOps.*, Flags.*, util.Spans.*, Contexts.*, Constants.*
import typer.{ ConstFold, ProtoTypes }
-import SymDenotations._, Symbols._, Denotations._, StdNames._, Comments._
+import SymDenotations.*, Symbols.*, Denotations.*, StdNames.*, Comments.*
import collection.mutable.ListBuffer
import printing.Printer
import printing.Texts.Text
@@ -16,7 +16,7 @@ import annotation.internal.sharable
import annotation.unchecked.uncheckedVariance
import annotation.constructorOnly
import compiletime.uninitialized
-import Decorators._
+import Decorators.*
import staging.StagingLevel.*
object Trees {
diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala
index e0b282e2d8db..acffb1e89972 100644
--- a/compiler/src/dotty/tools/dotc/ast/tpd.scala
+++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala
@@ -4,13 +4,13 @@ package ast
import dotty.tools.dotc.transform.{ExplicitOuter, Erasure}
import typer.ProtoTypes
-import transform.SymUtils._
-import transform.TypeUtils._
-import core._
+import transform.SymUtils.*
+import transform.TypeUtils.*
+import core.*
import Scopes.newScope
-import util.Spans._, Types._, Contexts._, Constants._, Names._, Flags._, NameOps._
-import Symbols._, StdNames._, Annotations._, Trees._, Symbols._
-import Decorators._, DenotTransformers._
+import util.Spans.*, Types.*, Contexts.*, Constants.*, Names.*, Flags.*, NameOps.*
+import Symbols.*, StdNames.*, Annotations.*, Trees.*, Symbols.*
+import Decorators.*, DenotTransformers.*
import collection.{immutable, mutable}
import util.{Property, SourceFile}
import NameKinds.{TempResultName, OuterSelectName}
@@ -18,6 +18,7 @@ import typer.ConstFold
import scala.annotation.tailrec
import scala.collection.mutable.ListBuffer
+import scala.compiletime.uninitialized
/** Some creators for typed trees */
object tpd extends Trees.Instance[Type] with TypedTreeInfo {
@@ -1309,7 +1310,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo {
trait TreeProvider {
protected def computeRootTrees(using Context): List[Tree]
- private var myTrees: List[Tree] | Null = _
+ private var myTrees: List[Tree] | Null = uninitialized
/** Get trees defined by this provider. Cache them if -Yretain-trees is set. */
def rootTrees(using Context): List[Tree] =
diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala
index 41ba452fa80a..817ff5c6c9fa 100644
--- a/compiler/src/dotty/tools/dotc/ast/untpd.scala
+++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala
@@ -2,15 +2,15 @@ package dotty.tools
package dotc
package ast
-import core._
-import Types._, Contexts._, Constants._, Names._, Flags._
+import core.*
+import Types.*, Contexts.*, Constants.*, Names.*, Flags.*
import dotty.tools.dotc.typer.ProtoTypes
-import Symbols._, StdNames._, Trees._
+import Symbols.*, StdNames.*, Trees.*
import util.{Property, SourceFile, NoSource}
import util.Spans.Span
import annotation.constructorOnly
import annotation.internal.sharable
-import Decorators._
+import Decorators.*
object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo {
@@ -42,7 +42,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo {
/** mods object name impl */
case class ModuleDef(name: TermName, impl: Template)(implicit @constructorOnly src: SourceFile)
extends MemberDef {
- type ThisTree[+T <: Untyped] <: Trees.NameTree[T] with Trees.MemberDef[T] with ModuleDef
+ type ThisTree[+T <: Untyped] <: Trees.NameTree[T] & Trees.MemberDef[T] & ModuleDef
def withName(name: Name)(using Context): ModuleDef = cpy.ModuleDef(this)(name.toTermName, impl)
}
diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala
index dccf07ba199e..40e94ebde5dd 100644
--- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala
+++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala
@@ -206,6 +206,12 @@ extension (tp: Type)
case _: TypeRef | _: AppliedType => tp.typeSymbol.hasAnnotation(defn.CapabilityAnnot)
case _ => false
+ def isSealed(using Context): Boolean = tp match
+ case tp: TypeParamRef => tp.underlying.isSealed
+ case tp: TypeBounds => tp.hi.hasAnnotation(defn.Caps_SealedAnnot)
+ case tp: TypeRef => tp.symbol.is(Sealed) || tp.info.isSealed // TODO: drop symbol flag?
+ case _ => false
+
/** Drop @retains annotations everywhere */
def dropAllRetains(using Context): Type = // TODO we should drop retains from inferred types before unpickling
val tm = new TypeMap:
@@ -225,7 +231,11 @@ extension (cls: ClassSymbol)
&& bc.givenSelfType.dealiasKeepAnnots.match
case CapturingType(_, refs) => refs.isAlwaysEmpty
case RetainingType(_, refs) => refs.isEmpty
- case selfType => selfType.exists && selfType.captureSet.isAlwaysEmpty
+ case selfType =>
+ isCaptureChecking // At Setup we have not processed self types yet, so
+ // unless a self type is explicitly given, we can't tell
+ // and err on the side of impure.
+ && selfType.exists && selfType.captureSet.isAlwaysEmpty
extension (sym: Symbol)
diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala
index 2586d449dfd4..7261c760aa01 100644
--- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala
+++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala
@@ -872,6 +872,7 @@ object CaptureSet:
upper.isAlwaysEmpty || upper.isConst && upper.elems.size == 1 && upper.elems.contains(r1)
if variance > 0 || isExact then upper
else if variance < 0 then CaptureSet.empty
+ else if ctx.mode.is(Mode.Printing) then upper
else assert(false, i"trying to add $upper from $r via ${tm.getClass} in a non-variant setting")
/** Apply `f` to each element in `xs`, and join result sets with `++` */
diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala
index fab0689b4df2..a49bd9f79351 100644
--- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala
+++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala
@@ -14,14 +14,14 @@ import typer.RefChecks.{checkAllOverrides, checkSelfAgainstParents, OverridingPa
import typer.Checking.{checkBounds, checkAppliedTypesIn}
import typer.ErrorReporting.{Addenda, err}
import typer.ProtoTypes.{AnySelectionProto, LhsProto}
-import util.{SimpleIdentitySet, EqHashMap, SrcPos, Property}
+import util.{SimpleIdentitySet, EqHashMap, EqHashSet, SrcPos, Property}
import transform.SymUtils.*
-import transform.{Recheck, PreRecheck}
+import transform.{Recheck, PreRecheck, CapturedVars}
import Recheck.*
import scala.collection.mutable
import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap, CompareResult}
import StdNames.nme
-import NameKinds.DefaultGetterName
+import NameKinds.{DefaultGetterName, WildcardParamName}
import reporting.trace
/** The capture checker */
@@ -147,33 +147,49 @@ object CheckCaptures:
private def disallowRootCapabilitiesIn(tp: Type, carrier: Symbol, what: String, have: String, addendum: String, pos: SrcPos)(using Context) =
val check = new TypeTraverser:
+ private val seen = new EqHashSet[TypeRef]
+
+ /** Check that there is at least one method containing carrier and defined
+ * in the scope of tparam. E.g. this is OK:
+ * def f[T] = { ... var x: T ... }
+ * So is this:
+ * class C[T] { def f() = { class D { var x: T }}}
+ * But this is not OK:
+ * class C[T] { object o { var x: T }}
+ */
extension (tparam: Symbol) def isParametricIn(carrier: Symbol): Boolean =
- val encl = carrier.owner.enclosingMethodOrClass
- if encl.isClass then tparam.isParametricIn(encl)
- else
- def recur(encl: Symbol): Boolean =
- if tparam.owner == encl then true
- else if encl.isStatic || !encl.exists then false
- else recur(encl.owner.enclosingMethodOrClass)
- recur(encl)
+ carrier.exists && {
+ val encl = carrier.owner.enclosingMethodOrClass
+ if encl.isClass then tparam.isParametricIn(encl)
+ else
+ def recur(encl: Symbol): Boolean =
+ if tparam.owner == encl then true
+ else if encl.isStatic || !encl.exists then false
+ else recur(encl.owner.enclosingMethodOrClass)
+ recur(encl)
+ }
def traverse(t: Type) =
t.dealiasKeepAnnots match
case t: TypeRef =>
- capt.println(i"disallow $t, $tp, $what, ${t.symbol.is(Sealed)}")
- t.info match
- case TypeBounds(_, hi)
- if !t.symbol.is(Sealed) && !t.symbol.isParametricIn(carrier) =>
- if hi.isAny then
- report.error(
- em"""$what cannot $have $tp since
- |that type refers to the type variable $t, which is not sealed.
- |$addendum""",
- pos)
- else
- traverse(hi)
- case _ =>
- traverseChildren(t)
+ if !seen.contains(t) then
+ capt.println(i"disallow $t, $tp, $what, ${t.isSealed}")
+ seen += t
+ t.info match
+ case TypeBounds(_, hi) if !t.isSealed && !t.symbol.isParametricIn(carrier) =>
+ if hi.isAny then
+ val detailStr =
+ if t eq tp then "variable"
+ else i"refers to the type variable $t, which"
+ report.error(
+ em"""$what cannot $have $tp since
+ |that type $detailStr is not sealed.
+ |$addendum""",
+ pos)
+ else
+ traverse(hi)
+ case _ =>
+ traverseChildren(t)
case AnnotatedType(_, ann) if ann.symbol == defn.UncheckedCapturesAnnot =>
()
case t =>
@@ -260,11 +276,12 @@ class CheckCaptures extends Recheck, SymTransformer:
pos, provenance)
/** Check subcapturing `cs1 <: cs2`, report error on failure */
- def checkSubset(cs1: CaptureSet, cs2: CaptureSet, pos: SrcPos, provenance: => String = "")(using Context) =
+ def checkSubset(cs1: CaptureSet, cs2: CaptureSet, pos: SrcPos,
+ provenance: => String = "", cs1description: String = "")(using Context) =
checkOK(
cs1.subCaptures(cs2, frozen = false),
- if cs1.elems.size == 1 then i"reference ${cs1.elems.toList.head} is not"
- else i"references $cs1 are not all",
+ if cs1.elems.size == 1 then i"reference ${cs1.elems.toList.head}$cs1description is not"
+ else i"references $cs1$cs1description are not all",
pos, provenance)
/** The current environment */
@@ -542,10 +559,10 @@ class CheckCaptures extends Recheck, SymTransformer:
val TypeApply(fn, args) = tree
val polyType = atPhase(thisPhase.prev):
fn.tpe.widen.asInstanceOf[TypeLambda]
- for case (arg: TypeTree, pinfo, pname) <- args.lazyZip(polyType.paramInfos).lazyZip((polyType.paramNames)) do
- if pinfo.bounds.hi.hasAnnotation(defn.Caps_SealedAnnot) then
+ for case (arg: TypeTree, formal, pname) <- args.lazyZip(polyType.paramRefs).lazyZip((polyType.paramNames)) do
+ if formal.isSealed then
def where = if fn.symbol.exists then i" in an argument of ${fn.symbol}" else ""
- disallowRootCapabilitiesIn(arg.knownType, fn.symbol,
+ disallowRootCapabilitiesIn(arg.knownType, NoSymbol,
i"Sealed type variable $pname", "be instantiated to",
i"This is often caused by a local capability$where\nleaking as part of its result.",
tree.srcPos)
@@ -586,13 +603,58 @@ class CheckCaptures extends Recheck, SymTransformer:
openClosures = openClosures.tail
end recheckClosureBlock
+ /** Maps mutable variables to the symbols that capture them (in the
+ * CheckCaptures sense, i.e. symbol is referred to from a different method
+ * than the one it is defined in).
+ */
+ private val capturedBy = util.HashMap[Symbol, Symbol]()
+
+ /** Maps anonymous functions appearing as function arguments to
+ * the function that is called.
+ */
+ private val anonFunCallee = util.HashMap[Symbol, Symbol]()
+
+ /** Populates `capturedBy` and `anonFunCallee`. Called by `checkUnit`.
+ */
+ private def collectCapturedMutVars(using Context) = new TreeTraverser:
+ def traverse(tree: Tree)(using Context) = tree match
+ case id: Ident =>
+ val sym = id.symbol
+ if sym.is(Mutable, butNot = Method) && sym.owner.isTerm then
+ val enclMeth = ctx.owner.enclosingMethod
+ if sym.enclosingMethod != enclMeth then
+ capturedBy(sym) = enclMeth
+ case Apply(fn, args) =>
+ for case closureDef(mdef) <- args do
+ anonFunCallee(mdef.symbol) = fn.symbol
+ traverseChildren(tree)
+ case Inlined(_, bindings, expansion) =>
+ traverse(bindings)
+ traverse(expansion)
+ case mdef: DefDef =>
+ if !mdef.symbol.isInlineMethod then traverseChildren(tree)
+ case _ =>
+ traverseChildren(tree)
+
override def recheckValDef(tree: ValDef, sym: Symbol)(using Context): Type =
try
if sym.is(Module) then sym.info // Modules are checked by checking the module class
else
if sym.is(Mutable) && !sym.hasAnnotation(defn.UncheckedCapturesAnnot) then
- disallowRootCapabilitiesIn(tree.tpt.knownType, sym,
- i"mutable $sym", "have type", "", sym.srcPos)
+ val (carrier, addendum) = capturedBy.get(sym) match
+ case Some(encl) =>
+ val enclStr =
+ if encl.isAnonymousFunction then
+ val location = anonFunCallee.get(encl) match
+ case Some(meth) if meth.exists => i" argument in a call to $meth"
+ case _ => ""
+ s"an anonymous function$location"
+ else encl.show
+ (NoSymbol, i"\nNote that $sym does not count as local since it is captured by $enclStr")
+ case _ =>
+ (sym, "")
+ disallowRootCapabilitiesIn(
+ tree.tpt.knownType, carrier, i"Mutable $sym", "have type", addendum, sym.srcPos)
checkInferredResult(super.recheckValDef(tree, sym), tree)
finally
if !sym.is(Param) then
@@ -680,9 +742,15 @@ class CheckCaptures extends Recheck, SymTransformer:
if !param.hasAnnotation(defn.ConstructorOnlyAnnot) then
checkSubset(param.termRef.captureSet, thisSet, param.srcPos) // (3)
for pureBase <- cls.pureBaseClass do // (4)
+ def selfType = impl.body
+ .collect:
+ case TypeDef(tpnme.SELF, rhs) => rhs
+ .headOption
+ .getOrElse(tree)
+ .orElse(tree)
checkSubset(thisSet,
CaptureSet.empty.withDescription(i"of pure base class $pureBase"),
- tree.srcPos)
+ selfType.srcPos, cs1description = " captured by this self type")
super.recheckClassDef(tree, impl, cls)
finally
curEnv = saved
@@ -1122,6 +1190,8 @@ class CheckCaptures extends Recheck, SymTransformer:
override def needsCheck(overriding: Symbol, overridden: Symbol)(using Context): Boolean =
!setup.isPreCC(overriding) && !setup.isPreCC(overridden)
+
+ override def checkInheritedTraitParameters: Boolean = false
end OverridingPairsCheckerCC
def traverse(t: Tree)(using Context) =
@@ -1158,11 +1228,12 @@ class CheckCaptures extends Recheck, SymTransformer:
private val setup: SetupAPI = thisPhase.prev.asInstanceOf[Setup]
override def checkUnit(unit: CompilationUnit)(using Context): Unit =
- setup.setupUnit(ctx.compilationUnit.tpdTree, completeDef)
+ setup.setupUnit(unit.tpdTree, completeDef)
+ collectCapturedMutVars.traverse(unit.tpdTree)
if ctx.settings.YccPrintSetup.value then
val echoHeader = "[[syntax tree at end of cc setup]]"
- val treeString = show(ctx.compilationUnit.tpdTree)
+ val treeString = show(unit.tpdTree)
report.echo(s"$echoHeader\n$treeString\n")
withCaptureSetsExplained:
@@ -1298,6 +1369,39 @@ class CheckCaptures extends Recheck, SymTransformer:
checker.traverse(tree.knownType)
end healTypeParam
+ def checkNoLocalRootIn(sym: Symbol, info: Type, pos: SrcPos)(using Context): Unit =
+ val check = new TypeTraverser:
+ def traverse(tp: Type) = tp match
+ case tp: TermRef if tp.isLocalRootCapability =>
+ if tp.localRootOwner == sym then
+ report.error(i"local root $tp cannot appear in type of $sym", pos)
+ case tp: ClassInfo =>
+ traverseChildren(tp)
+ for mbr <- tp.decls do
+ if !mbr.is(Private) then checkNoLocalRootIn(sym, mbr.info, mbr.srcPos)
+ case _ =>
+ traverseChildren(tp)
+ check.traverse(info)
+
+ def checkArraysAreSealedIn(tp: Type, pos: SrcPos)(using Context): Unit =
+ val check = new TypeTraverser:
+ def traverse(t: Type): Unit =
+ t match
+ case AppliedType(tycon, arg :: Nil) if tycon.typeSymbol == defn.ArrayClass =>
+ if !(pos.span.isSynthetic && ctx.reporter.errorsReported)
+ && !arg.typeSymbol.name.is(WildcardParamName)
+ then
+ CheckCaptures.disallowRootCapabilitiesIn(arg, NoSymbol,
+ "Array", "have element type",
+ "Since arrays are mutable, they have to be treated like variables,\nso their element type must be sealed.",
+ pos)
+ traverseChildren(t)
+ case defn.RefinedFunctionOf(rinfo: MethodType) =>
+ traverse(rinfo)
+ case _ =>
+ traverseChildren(t)
+ check.traverse(tp)
+
/** Perform the following kinds of checks
* - Check all explicitly written capturing types for well-formedness using `checkWellFormedPost`.
* - Check that arguments of TypeApplys and AppliedTypes conform to their bounds.
@@ -1309,10 +1413,11 @@ class CheckCaptures extends Recheck, SymTransformer:
val lctx = tree match
case _: DefTree | _: TypeDef if tree.symbol.exists => ctx.withOwner(tree.symbol)
case _ => ctx
- traverseChildren(tree)(using lctx)
- check(tree)
+ trace(i"post check $tree"):
+ traverseChildren(tree)(using lctx)
+ check(tree)
def check(tree: Tree)(using Context) = tree match
- case t @ TypeApply(fun, args) =>
+ case TypeApply(fun, args) =>
fun.knownType.widen match
case tl: PolyType =>
val normArgs = args.lazyZip(tl.paramInfos).map: (arg, bounds) =>
@@ -1321,6 +1426,10 @@ class CheckCaptures extends Recheck, SymTransformer:
checkBounds(normArgs, tl)
args.lazyZip(tl.paramNames).foreach(healTypeParam(_, _, fun.symbol))
case _ =>
+ case _: ValOrDefDef | _: TypeDef =>
+ checkNoLocalRootIn(tree.symbol, tree.symbol.info, tree.symbol.srcPos)
+ case tree: TypeTree =>
+ checkArraysAreSealedIn(tree.tpe, tree.srcPos)
case _ =>
end check
end checker
diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala
index 68fd79048f41..270fd9322a88 100644
--- a/compiler/src/dotty/tools/dotc/cc/Setup.scala
+++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala
@@ -2,7 +2,7 @@ package dotty.tools
package dotc
package cc
-import core._
+import core.*
import Phases.*, DenotTransformers.*, SymDenotations.*
import Contexts.*, Names.*, Flags.*, Symbols.*, Decorators.*
import Types.*, StdNames.*
@@ -522,7 +522,9 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI:
tree.symbol match
case cls: ClassSymbol =>
val cinfo @ ClassInfo(prefix, _, ps, decls, selfInfo) = cls.classInfo
- if (selfInfo eq NoType) || cls.is(ModuleClass) && !cls.isStatic then
+ if ((selfInfo eq NoType) || cls.is(ModuleClass) && !cls.isStatic)
+ && !cls.isPureClass
+ then
// add capture set to self type of nested classes if no self type is given explicitly.
val newSelfType = CapturingType(cinfo.selfType, CaptureSet.Var(cls))
val ps1 = inContext(ctx.withOwner(cls)):
@@ -705,4 +707,5 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI:
def postCheck()(using Context): Unit =
for chk <- todoAtPostCheck do chk(ctx)
+ todoAtPostCheck.clear()
end Setup
\ No newline at end of file
diff --git a/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala
index 51b261583feb..5fbe7212a674 100644
--- a/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala
+++ b/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala
@@ -56,7 +56,7 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath {
override def asClassPathStrings: Seq[String] = aggregates.map(_.asClassPathString).distinct
- override def asSourcePathString: String = ClassPath.join(aggregates map (_.asSourcePathString): _*)
+ override def asSourcePathString: String = ClassPath.join(aggregates map (_.asSourcePathString)*)
override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = {
val aggregatedPackages = aggregates.flatMap(_.packages(inPackage)).distinct
diff --git a/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala b/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala
index ac8b69381938..0b66f339bf53 100644
--- a/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala
+++ b/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala
@@ -4,9 +4,9 @@
package dotty.tools.dotc.classpath
import dotty.tools.io.{AbstractFile, VirtualDirectory}
-import FileUtils._
+import FileUtils.*
import dotty.tools.io.ClassPath
-import dotty.tools.dotc.core.Contexts._
+import dotty.tools.dotc.core.Contexts.*
/**
* Provides factory methods for classpath. When creating classpath instances for a given path,
diff --git a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala
index da1276f10dd7..26ed2734890e 100644
--- a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala
+++ b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala
@@ -11,10 +11,10 @@ import java.nio.file.{FileSystems, Files}
import dotty.tools.dotc.classpath.PackageNameUtils.{packageContains, separatePkgAndClassNames}
import dotty.tools.io.{AbstractFile, PlainFile, ClassPath, ClassRepresentation, EfficientClassPath, JDK9Reflectors}
-import FileUtils._
+import FileUtils.*
import PlainFile.toPlainFile
-import scala.jdk.CollectionConverters._
+import scala.jdk.CollectionConverters.*
import scala.collection.immutable.ArraySeq
import scala.util.control.NonFatal
@@ -126,9 +126,9 @@ trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends Directo
}
object JrtClassPath {
- import java.nio.file._, java.net.URI
+ import java.nio.file.*, java.net.URI
def apply(release: Option[String]): Option[ClassPath] = {
- import scala.util.Properties._
+ import scala.util.Properties.*
if (!isJavaAtLeast("9")) None
else {
// Longer term we'd like an official API for this in the JDK
@@ -165,7 +165,7 @@ object JrtClassPath {
* The implementation assumes that no classes exist in the empty package.
*/
final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with NoSourcePaths {
- import java.nio.file.Path, java.nio.file._
+ import java.nio.file.Path, java.nio.file.*
type F = Path
private val dir: Path = fs.getPath("/packages")
@@ -214,7 +214,7 @@ final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with No
* Implementation `ClassPath` based on the \$JAVA_HOME/lib/ct.sym backing http://openjdk.java.net/jeps/247
*/
final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends ClassPath with NoSourcePaths {
- import java.nio.file.Path, java.nio.file._
+ import java.nio.file.Path, java.nio.file.*
private val fileSystem: FileSystem = FileSystems.newFileSystem(ctSym, null: ClassLoader)
private val root: Path = fileSystem.getRootDirectories.iterator.next
diff --git a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala
index 4b777444c3bf..f520cd97767e 100644
--- a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala
+++ b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala
@@ -4,7 +4,7 @@ import scala.language.unsafeNulls
import dotty.tools.io.{ClassPath, ClassRepresentation}
import dotty.tools.io.{AbstractFile, VirtualDirectory}
-import FileUtils._
+import FileUtils.*
import java.net.{URI, URL}
case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath with DirectoryLookup[ClassFileEntryImpl] with NoSourcePaths {
diff --git a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala
index b38e1841728d..0ca996db4812 100644
--- a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala
+++ b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala
@@ -13,8 +13,8 @@ import java.nio.file.attribute.{BasicFileAttributes, FileTime}
import scala.annotation.tailrec
import dotty.tools.io.{AbstractFile, ClassPath, ClassRepresentation, FileZipArchive, ManifestResources}
-import dotty.tools.dotc.core.Contexts._
-import FileUtils._
+import dotty.tools.dotc.core.Contexts.*
+import FileUtils.*
/**
* A trait providing an optional cache for classpath entries obtained from zip and jar files.
diff --git a/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala b/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala
index 8033291f5dd3..ca8636e3884f 100644
--- a/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala
+++ b/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala
@@ -9,7 +9,7 @@ import java.io.File
import java.net.URL
import dotty.tools.io.{ AbstractFile, FileZipArchive }
-import FileUtils._
+import FileUtils.*
import dotty.tools.io.{EfficientClassPath, ClassRepresentation}
/**
diff --git a/compiler/src/dotty/tools/dotc/config/CliCommand.scala b/compiler/src/dotty/tools/dotc/config/CliCommand.scala
index 914df040fbf7..5c24dd57eeba 100644
--- a/compiler/src/dotty/tools/dotc/config/CliCommand.scala
+++ b/compiler/src/dotty/tools/dotc/config/CliCommand.scala
@@ -3,8 +3,8 @@ package config
import scala.language.unsafeNulls
-import Settings._
-import core.Contexts._
+import Settings.*
+import core.Contexts.*
import printing.Highlighting
import scala.util.chaining.given
@@ -12,7 +12,7 @@ import scala.PartialFunction.cond
trait CliCommand:
- type ConcreteSettings <: CommonScalaSettings with Settings.SettingGroup
+ type ConcreteSettings <: CommonScalaSettings & Settings.SettingGroup
def versionMsg: String
diff --git a/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala b/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala
index 41e123472a75..2ffe900fbdbf 100644
--- a/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala
+++ b/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala
@@ -1,8 +1,8 @@
package dotty.tools.dotc
package config
-import Settings._
-import core.Contexts._
+import Settings.*
+import core.Contexts.*
abstract class CompilerCommand extends CliCommand:
type ConcreteSettings = ScalaSettings
@@ -21,6 +21,6 @@ abstract class CompilerCommand extends CliCommand:
else ""
final def isHelpFlag(using settings: ScalaSettings)(using SettingsState): Boolean =
- import settings._
+ import settings.*
val flags = Set(help, Vhelp, Whelp, Xhelp, Yhelp, showPlugins, XshowPhases)
flags.exists(_.value) || allSettings.exists(isHelping)
diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala
index f1443ad56442..74966d89295e 100644
--- a/compiler/src/dotty/tools/dotc/config/Feature.scala
+++ b/compiler/src/dotty/tools/dotc/config/Feature.scala
@@ -2,12 +2,12 @@ package dotty.tools
package dotc
package config
-import core._
-import Contexts._, Symbols._, Names._
+import core.*
+import Contexts.*, Symbols.*, Names.*
import StdNames.nme
import Decorators.*
import util.{SrcPos, NoSourcePosition}
-import SourceVersion._
+import SourceVersion.*
import reporting.Message
import NameKinds.QualifiedName
@@ -26,6 +26,7 @@ object Feature:
val dependent = experimental("dependent")
val erasedDefinitions = experimental("erasedDefinitions")
val symbolLiterals = deprecated("symbolLiterals")
+ val ascriptionVarargsUnpacking = deprecated("ascriptionVarargsUnpacking")
val fewerBraces = experimental("fewerBraces")
val saferExceptions = experimental("saferExceptions")
val clauseInterleaving = experimental("clauseInterleaving")
diff --git a/compiler/src/dotty/tools/dotc/config/JavaPlatform.scala b/compiler/src/dotty/tools/dotc/config/JavaPlatform.scala
index f611360dd4ca..ed8ef6c8372e 100644
--- a/compiler/src/dotty/tools/dotc/config/JavaPlatform.scala
+++ b/compiler/src/dotty/tools/dotc/config/JavaPlatform.scala
@@ -2,12 +2,12 @@ package dotty.tools
package dotc
package config
-import io._
+import io.*
import classpath.AggregateClassPath
-import core._
-import Symbols._, Types._, Contexts._, StdNames._
-import Flags._
-import transform.ExplicitOuter, transform.SymUtils._
+import core.*
+import Symbols.*, Types.*, Contexts.*, StdNames.*
+import Flags.*
+import transform.ExplicitOuter, transform.SymUtils.*
class JavaPlatform extends Platform {
@@ -52,7 +52,7 @@ class JavaPlatform extends Platform {
*/
def isMaybeBoxed(sym: ClassSymbol)(using Context): Boolean = {
val d = defn
- import d._
+ import d.*
(sym == ObjectClass) ||
(sym == JavaSerializableClass) ||
(sym == ComparableClass) ||
diff --git a/compiler/src/dotty/tools/dotc/config/OutputDirs.scala b/compiler/src/dotty/tools/dotc/config/OutputDirs.scala
index 0411c5604768..ba121d06e35a 100644
--- a/compiler/src/dotty/tools/dotc/config/OutputDirs.scala
+++ b/compiler/src/dotty/tools/dotc/config/OutputDirs.scala
@@ -4,7 +4,7 @@ package config
import scala.language.unsafeNulls
-import io._
+import io.*
/** A class for holding mappings from source directories to
* their output location. This functionality can be accessed
diff --git a/compiler/src/dotty/tools/dotc/config/PathResolver.scala b/compiler/src/dotty/tools/dotc/config/PathResolver.scala
index 8b4eedb0e9d2..29e6e35855c8 100644
--- a/compiler/src/dotty/tools/dotc/config/PathResolver.scala
+++ b/compiler/src/dotty/tools/dotc/config/PathResolver.scala
@@ -9,15 +9,15 @@ import io.{ClassPath, Directory, Path}
import classpath.{AggregateClassPath, ClassPathFactory, JrtClassPath}
import ClassPath.split
import PartialFunction.condOpt
-import core.Contexts._
-import Settings._
+import core.Contexts.*
+import Settings.*
import dotty.tools.io.File
object PathResolver {
// Imports property/environment functions which suppress
// security exceptions.
- import AccessControl._
+ import AccessControl.*
def firstNonEmpty(xs: String*): String = xs find (_ != "") getOrElse ""
@@ -208,7 +208,7 @@ class PathResolver(using c: Context) {
if (!settings.classpath.isDefault) settings.classpath.value
else sys.env.getOrElse("CLASSPATH", ".")
- import classPathFactory._
+ import classPathFactory.*
// Assemble the elements!
def basis: List[Iterable[ClassPath]] =
diff --git a/compiler/src/dotty/tools/dotc/config/Platform.scala b/compiler/src/dotty/tools/dotc/config/Platform.scala
index 73a05fbd41c1..2a0b207e68c1 100644
--- a/compiler/src/dotty/tools/dotc/config/Platform.scala
+++ b/compiler/src/dotty/tools/dotc/config/Platform.scala
@@ -3,7 +3,7 @@ package dotc
package config
import io.{ClassPath, AbstractFile}
-import core.Contexts._, core.Symbols._
+import core.Contexts.*, core.Symbols.*
import core.SymbolLoader
import core.StdNames.nme
import core.Flags.Module
diff --git a/compiler/src/dotty/tools/dotc/config/SJSPlatform.scala b/compiler/src/dotty/tools/dotc/config/SJSPlatform.scala
index 0275e0d6a227..f6e29754ef94 100644
--- a/compiler/src/dotty/tools/dotc/config/SJSPlatform.scala
+++ b/compiler/src/dotty/tools/dotc/config/SJSPlatform.scala
@@ -1,8 +1,8 @@
package dotty.tools.dotc.config
-import dotty.tools.dotc.core._
-import Contexts._
-import Symbols._
+import dotty.tools.dotc.core.*
+import Contexts.*
+import Symbols.*
import dotty.tools.backend.sjs.JSDefinitions
diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala
index b10c9859e4d0..bd33a075c584 100644
--- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala
+++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala
@@ -6,12 +6,12 @@ import scala.language.unsafeNulls
import dotty.tools.dotc.config.PathResolver.Defaults
import dotty.tools.dotc.config.Settings.{Setting, SettingGroup}
import dotty.tools.dotc.config.SourceVersion
-import dotty.tools.dotc.core.Contexts._
+import dotty.tools.dotc.core.Contexts.*
import dotty.tools.dotc.rewrites.Rewrites
import dotty.tools.io.{AbstractFile, Directory, JDK9Reflectors, PlainDirectory}
import Setting.ChoiceWithHelp
-import scala.util.chaining._
+import scala.util.chaining.*
import java.util.zip.Deflater
diff --git a/compiler/src/dotty/tools/dotc/config/ScalaVersion.scala b/compiler/src/dotty/tools/dotc/config/ScalaVersion.scala
index 7fdf57478f1a..9f603e6792be 100644
--- a/compiler/src/dotty/tools/dotc/config/ScalaVersion.scala
+++ b/compiler/src/dotty/tools/dotc/config/ScalaVersion.scala
@@ -89,7 +89,7 @@ case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBu
def isInt(s: String) = Try(toInt(s)).isSuccess
- import ScalaBuild._
+ import ScalaBuild.*
def toBuild(s: String) = s match {
case null | "FINAL" => Final
diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala
index d992f5bdf2ee..79eb2b882f8f 100644
--- a/compiler/src/dotty/tools/dotc/config/Settings.scala
+++ b/compiler/src/dotty/tools/dotc/config/Settings.scala
@@ -3,7 +3,7 @@ package config
import scala.language.unsafeNulls
-import core.Contexts._
+import core.Contexts.*
import dotty.tools.io.{AbstractFile, Directory, JarArchive, PlainDirectory}
@@ -24,7 +24,7 @@ object Settings:
val OutputTag: ClassTag[AbstractFile] = ClassTag(classOf[AbstractFile])
class SettingsState(initialValues: Seq[Any]):
- private val values = ArrayBuffer(initialValues: _*)
+ private val values = ArrayBuffer(initialValues*)
private var _wasRead: Boolean = false
override def toString: String = s"SettingsState(values: ${values.toList})"
diff --git a/compiler/src/dotty/tools/dotc/config/WrappedProperties.scala b/compiler/src/dotty/tools/dotc/config/WrappedProperties.scala
index 5b79432a97e7..20304b74c1da 100644
--- a/compiler/src/dotty/tools/dotc/config/WrappedProperties.scala
+++ b/compiler/src/dotty/tools/dotc/config/WrappedProperties.scala
@@ -22,7 +22,7 @@ trait WrappedProperties extends PropertiesTrait {
override def envOrNone(name: String): Option[String] = wrap(super.envOrNone(name)).flatten
def systemProperties: Iterator[(String, String)] = {
- import scala.jdk.CollectionConverters._
+ import scala.jdk.CollectionConverters.*
wrap(System.getProperties.asScala.iterator) getOrElse Iterator.empty
}
}
diff --git a/compiler/src/dotty/tools/dotc/core/Annotations.scala b/compiler/src/dotty/tools/dotc/core/Annotations.scala
index 202f3eb26e41..43c753458f6e 100644
--- a/compiler/src/dotty/tools/dotc/core/Annotations.scala
+++ b/compiler/src/dotty/tools/dotc/core/Annotations.scala
@@ -2,7 +2,7 @@ package dotty.tools
package dotc
package core
-import Symbols._, Types._, Contexts._, Constants._, Phases.*
+import Symbols.*, Types.*, Contexts.*, Constants.*, Phases.*
import ast.tpd, tpd.*
import util.Spans.Span
import printing.{Showable, Printer}
diff --git a/compiler/src/dotty/tools/dotc/core/Atoms.scala b/compiler/src/dotty/tools/dotc/core/Atoms.scala
index bcaaf6794107..a68a07947965 100644
--- a/compiler/src/dotty/tools/dotc/core/Atoms.scala
+++ b/compiler/src/dotty/tools/dotc/core/Atoms.scala
@@ -2,7 +2,7 @@ package dotty.tools
package dotc
package core
-import Types._
+import Types.*
/** Indicates the singleton types that a type must or may consist of.
* @param lo The lower bound: singleton types in this set are guaranteed
diff --git a/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala
index a61701eee2d7..060189016828 100644
--- a/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala
+++ b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala
@@ -2,9 +2,9 @@ package dotty.tools
package dotc
package core
-import Contexts._, Types._, Symbols._, Names._, Flags._
+import Contexts.*, Types.*, Symbols.*, Names.*, Flags.*
import Denotations.SingleDenotation
-import Decorators._
+import Decorators.*
import collection.mutable
import config.SourceVersion.future
import config.Feature.sourceVersion
@@ -62,7 +62,7 @@ object CheckRealizable {
* Type.isStable).
*/
class CheckRealizable(using Context) {
- import CheckRealizable._
+ import CheckRealizable.*
/** A set of all fields that have already been checked. Used
* to avoid infinite recursions when analyzing recursive types.
diff --git a/compiler/src/dotty/tools/dotc/core/Comments.scala b/compiler/src/dotty/tools/dotc/core/Comments.scala
index 1b20b75ad8ac..80e9a105d658 100644
--- a/compiler/src/dotty/tools/dotc/core/Comments.scala
+++ b/compiler/src/dotty/tools/dotc/core/Comments.scala
@@ -5,10 +5,10 @@ package core
import scala.language.unsafeNulls
import ast.{ untpd, tpd }
-import Symbols._, Contexts._
+import Symbols.*, Contexts.*
import util.{SourceFile, ReadOnlyMap}
-import util.Spans._
-import util.CommentParsing._
+import util.Spans.*
+import util.CommentParsing.*
import util.Property.Key
import parsing.Parsers.Parser
import reporting.ProperDefinitionNotFound
diff --git a/compiler/src/dotty/tools/dotc/core/Constants.scala b/compiler/src/dotty/tools/dotc/core/Constants.scala
index f45e9e5217de..63acfbe55701 100644
--- a/compiler/src/dotty/tools/dotc/core/Constants.scala
+++ b/compiler/src/dotty/tools/dotc/core/Constants.scala
@@ -2,7 +2,7 @@ package dotty.tools
package dotc
package core
-import Types._, Symbols._, Contexts._
+import Types.*, Symbols.*, Contexts.*
import printing.Printer
import printing.Texts.Text
@@ -210,7 +210,7 @@ object Constants {
}
override def hashCode: Int = {
- import scala.util.hashing.MurmurHash3._
+ import scala.util.hashing.MurmurHash3.*
val seed = 17
var h = seed
h = mix(h, tag.##) // include tag in the hash, otherwise 0, 0d, 0L, 0f collide.
diff --git a/compiler/src/dotty/tools/dotc/core/Constraint.scala b/compiler/src/dotty/tools/dotc/core/Constraint.scala
index c634f847e510..59f3aa9838c2 100644
--- a/compiler/src/dotty/tools/dotc/core/Constraint.scala
+++ b/compiler/src/dotty/tools/dotc/core/Constraint.scala
@@ -2,7 +2,7 @@ package dotty.tools
package dotc
package core
-import Types._, Contexts._
+import Types.*, Contexts.*
import printing.Showable
import util.{SimpleIdentitySet, SimpleIdentityMap}
diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala
index bbe46c344890..d43739019f2f 100644
--- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala
+++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala
@@ -2,11 +2,11 @@ package dotty.tools
package dotc
package core
-import Types._
-import Contexts._
-import Symbols._
-import Decorators._
-import Flags._
+import Types.*
+import Contexts.*
+import Symbols.*
+import Decorators.*
+import Flags.*
import config.Config
import config.Printers.typr
import typer.ProtoTypes.{newTypeVar, representedParamRef}
diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala b/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala
index d2b1246a8149..8ec38d52e725 100644
--- a/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala
+++ b/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala
@@ -1,12 +1,13 @@
package dotty.tools.dotc
package core
-import Contexts._
+import Contexts.*
import config.Printers.{default, typr}
+import scala.compiletime.uninitialized
trait ConstraintRunInfo { self: Run =>
private var maxSize = 0
- private var maxConstraint: Constraint | Null = _
+ private var maxConstraint: Constraint | Null = uninitialized
def recordConstraintSize(c: Constraint, size: Int): Unit =
if (size > maxSize) {
maxSize = size
diff --git a/compiler/src/dotty/tools/dotc/core/ContextOps.scala b/compiler/src/dotty/tools/dotc/core/ContextOps.scala
index aa85f714a8e5..8e0b022b891e 100644
--- a/compiler/src/dotty/tools/dotc/core/ContextOps.scala
+++ b/compiler/src/dotty/tools/dotc/core/ContextOps.scala
@@ -1,8 +1,8 @@
package dotty.tools.dotc
package core
-import Contexts._, Symbols._, Types._, Flags._
-import Denotations._, SymDenotations._
+import Contexts.*, Symbols.*, Types.*, Flags.*
+import Denotations.*, SymDenotations.*
import Names.Name, StdNames.nme
import ast.untpd
diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala
index a2adc0058938..c575f7bc72f7 100644
--- a/compiler/src/dotty/tools/dotc/core/Contexts.scala
+++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala
@@ -3,28 +3,28 @@ package dotc
package core
import interfaces.CompilerCallback
-import Decorators._
-import Periods._
-import Names._
-import Phases._
-import Types._
-import Symbols._
-import Scopes._
-import Uniques._
-import ast.Trees._
+import Decorators.*
+import Periods.*
+import Names.*
+import Phases.*
+import Types.*
+import Symbols.*
+import Scopes.*
+import Uniques.*
+import ast.Trees.*
import ast.untpd
import util.{NoSource, SimpleIdentityMap, SourceFile, HashSet, ReusableInstance}
import typer.{Implicits, ImportInfo, SearchHistory, SearchRoot, TypeAssigner, Typer, Nullables}
import inlines.Inliner
-import Nullables._
+import Nullables.*
import Implicits.ContextualImplicits
-import config.Settings._
+import config.Settings.*
import config.Config
-import reporting._
+import reporting.*
import io.{AbstractFile, NoAbstractFile, PlainFile, Path}
import scala.io.Codec
import collection.mutable
-import printing._
+import printing.*
import config.{JavaPlatform, SJSPlatform, Platform, ScalaSettings}
import classfile.ReusableDataReader
import StdNames.nme
@@ -37,7 +37,7 @@ import dotty.tools.dotc.profile.Profiler
import dotty.tools.dotc.sbt.interfaces.{IncrementalCallback, ProgressCallback}
import util.Property.Key
import util.Store
-import plugins._
+import plugins.*
import java.util.concurrent.atomic.AtomicInteger
import java.nio.file.InvalidPathException
@@ -414,7 +414,7 @@ object Contexts {
* from constructor parameters to class parameter accessors.
*/
def superCallContext: Context = {
- val locals = newScopeWith(owner.typeParams ++ owner.asClass.paramAccessors: _*)
+ val locals = newScopeWith(owner.typeParams ++ owner.asClass.paramAccessors*)
superOrThisCallContext(owner.primaryConstructor, locals)
}
@@ -558,7 +558,7 @@ object Contexts {
private var _owner: Symbol = uninitialized
final def owner: Symbol = _owner
- private var _tree: Tree[?]= _
+ private var _tree: Tree[?] = uninitialized
final def tree: Tree[?] = _tree
private var _scope: Scope = uninitialized
@@ -915,7 +915,7 @@ object Contexts {
def next()(using Context): FreshContext =
val base = ctx.base
- import base._
+ import base.*
val nestedCtx =
if inUse < pool.size then
pool(inUse).reuseIn(ctx)
diff --git a/compiler/src/dotty/tools/dotc/core/Decorators.scala b/compiler/src/dotty/tools/dotc/core/Decorators.scala
index fc2b6a852216..29d4b3fa4052 100644
--- a/compiler/src/dotty/tools/dotc/core/Decorators.scala
+++ b/compiler/src/dotty/tools/dotc/core/Decorators.scala
@@ -6,8 +6,8 @@ import scala.annotation.tailrec
import scala.collection.mutable.ListBuffer
import scala.util.control.NonFatal
-import Contexts._, Names._, Phases._, Symbols._
-import printing.{ Printer, Showable }, printing.Formatting._, printing.Texts._
+import Contexts.*, Names.*, Phases.*, Symbols.*
+import printing.{ Printer, Showable }, printing.Formatting.*, printing.Texts.*
import transform.MegaPhase
import reporting.{Message, NoExplanation}
diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala
index 97b1b7db1b21..f86eab71bc07 100644
--- a/compiler/src/dotty/tools/dotc/core/Definitions.scala
+++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala
@@ -3,8 +3,8 @@ package dotc
package core
import scala.annotation.{threadUnsafe => tu}
-import Types._, Contexts._, Symbols._, SymDenotations._, StdNames._, Names._, Phases._
-import Flags._, Scopes._, Decorators._, NameOps._, Periods._, NullOpsDecorator._
+import Types.*, Contexts.*, Symbols.*, SymDenotations.*, StdNames.*, Names.*, Phases.*
+import Flags.*, Scopes.*, Decorators.*, NameOps.*, Periods.*, NullOpsDecorator.*
import unpickleScala2.Scala2Unpickler.ensureConstructor
import scala.collection.mutable
import collection.mutable
@@ -20,6 +20,7 @@ import cc.{CaptureSet, RetainingType}
import ast.tpd.ref
import scala.annotation.tailrec
+import scala.compiletime.uninitialized
object Definitions {
@@ -42,9 +43,9 @@ object Definitions {
*
*/
class Definitions {
- import Definitions._
+ import Definitions.*
- private var initCtx: Context = _
+ private var initCtx: Context = uninitialized
private given currentContext[Dummy_so_its_a_def]: Context = initCtx
private def newPermanentSymbol[N <: Name](owner: Symbol, name: N, flags: FlagSet, info: Type) =
@@ -1442,7 +1443,7 @@ class Definitions {
/** Base classes that are assumed to be pure for the purposes of capture checking.
* Every class inheriting from a pure baseclass is pure.
*/
- @tu lazy val pureBaseClasses = Set(defn.ThrowableClass)
+ @tu lazy val pureBaseClasses = Set(ThrowableClass, PureClass)
/** Non-inheritable lasses that are assumed to be pure for the purposes of capture checking,
*/
@@ -2001,7 +2002,7 @@ class Definitions {
class PerRun[T](generate: Context ?=> T) {
private var current: RunId = NoRunId
- private var cached: T = _
+ private var cached: T = uninitialized
def apply()(using Context): T = {
if (current != ctx.runId) {
cached = generate
diff --git a/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala b/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala
index 6690cae3a142..59982fb99b5f 100644
--- a/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala
+++ b/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala
@@ -1,13 +1,13 @@
package dotty.tools.dotc
package core
-import Periods._
-import SymDenotations._
-import Contexts._
-import Types._
-import Symbols._
-import Denotations._
-import Phases._
+import Periods.*
+import SymDenotations.*
+import Contexts.*
+import Types.*
+import Symbols.*
+import Denotations.*
+import Phases.*
object DenotTransformers {
diff --git a/compiler/src/dotty/tools/dotc/core/Denotations.scala b/compiler/src/dotty/tools/dotc/core/Denotations.scala
index 640ba8015be7..efbdfeb49246 100644
--- a/compiler/src/dotty/tools/dotc/core/Denotations.scala
+++ b/compiler/src/dotty/tools/dotc/core/Denotations.scala
@@ -3,27 +3,29 @@ package dotc
package core
import SymDenotations.{ SymDenotation, ClassDenotation, NoDenotation, LazyType, stillValid, acceptStale, traceInvalid }
-import Contexts._
-import Names._
-import NameKinds._
-import StdNames._
+import Contexts.*
+import Names.*
+import NameKinds.*
+import StdNames.*
import Symbols.NoSymbol
-import Symbols._
-import Types._
-import Periods._
-import Flags._
-import DenotTransformers._
-import Decorators._
-import Signature.MatchDegree._
-import printing.Texts._
+import Symbols.*
+import Types.*
+import Periods.*
+import Flags.*
+import DenotTransformers.*
+import Decorators.*
+import Signature.MatchDegree.*
+import printing.Texts.*
import printing.Printer
import io.AbstractFile
import config.Config
import config.Printers.overload
-import util.common._
+import util.common.*
import typer.ProtoTypes.NoViewsAllowed
import collection.mutable.ListBuffer
+import scala.compiletime.uninitialized
+
/** Denotations represent the meaning of symbols and named types.
* The following diagram shows how the principal types of denotations
* and their denoting entities relate to each other. Lines ending in
@@ -121,8 +123,8 @@ object Denotations {
/** Map `f` over all single denotations and aggregate the results with `g`. */
def aggregate[T](f: SingleDenotation => T, g: (T, T) => T): T
- private var cachedPrefix: Type = _
- private var cachedAsSeenFrom: AsSeenFromResult = _
+ private var cachedPrefix: Type = uninitialized
+ private var cachedAsSeenFrom: AsSeenFromResult = uninitialized
private var validAsSeenFrom: Period = Nowhere
type AsSeenFromResult <: PreDenotation
diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala
index fa57c503d61b..47b5c9f17af2 100644
--- a/compiler/src/dotty/tools/dotc/core/Flags.scala
+++ b/compiler/src/dotty/tools/dotc/core/Flags.scala
@@ -188,7 +188,7 @@ object Flags {
flag
}
- def commonFlags(flagss: FlagSet*): FlagSet = union(flagss.map(_.toCommonFlags): _*)
+ def commonFlags(flagss: FlagSet*): FlagSet = union(flagss.map(_.toCommonFlags)*)
/** The empty flag set */
val EmptyFlags: FlagSet = FlagSet(0)
diff --git a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala
index ab0611b89b22..1cbfabc08958 100644
--- a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala
+++ b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala
@@ -6,7 +6,7 @@ import Contexts.*, Decorators.*, Symbols.*, Types.*
import NameKinds.UniqueName
import config.Printers.{gadts, gadtsConstr}
import util.{SimpleIdentitySet, SimpleIdentityMap}
-import printing._
+import printing.*
import scala.annotation.tailrec
import scala.annotation.internal.sharable
diff --git a/compiler/src/dotty/tools/dotc/core/Hashable.scala b/compiler/src/dotty/tools/dotc/core/Hashable.scala
index 79da5f1dcd6f..5ab2d4a2af03 100644
--- a/compiler/src/dotty/tools/dotc/core/Hashable.scala
+++ b/compiler/src/dotty/tools/dotc/core/Hashable.scala
@@ -1,7 +1,7 @@
package dotty.tools.dotc
package core
-import Types._
+import Types.*
import scala.util.hashing.{ MurmurHash3 => hashing }
import annotation.tailrec
@@ -40,7 +40,7 @@ object Hashable {
}
trait Hashable {
- import Hashable._
+ import Hashable.*
protected def hashSeed: Int = getClass.hashCode
diff --git a/compiler/src/dotty/tools/dotc/core/JavaNullInterop.scala b/compiler/src/dotty/tools/dotc/core/JavaNullInterop.scala
index 60fc4a4274e0..6244923cfb52 100644
--- a/compiler/src/dotty/tools/dotc/core/JavaNullInterop.scala
+++ b/compiler/src/dotty/tools/dotc/core/JavaNullInterop.scala
@@ -1,11 +1,11 @@
package dotty.tools.dotc
package core
-import Contexts._
+import Contexts.*
import Flags.JavaDefined
import StdNames.nme
-import Symbols._
-import Types._
+import Symbols.*
+import Types.*
/** This module defines methods to interpret types of Java symbols, which are implicitly nullable in Java,
* as Scala types, which are explicitly nullable.
diff --git a/compiler/src/dotty/tools/dotc/core/MacroClassLoader.scala b/compiler/src/dotty/tools/dotc/core/MacroClassLoader.scala
index d8f41ef99b11..b6b316ac14d9 100644
--- a/compiler/src/dotty/tools/dotc/core/MacroClassLoader.scala
+++ b/compiler/src/dotty/tools/dotc/core/MacroClassLoader.scala
@@ -1,6 +1,6 @@
package dotty.tools.dotc.core
-import dotty.tools.dotc.core.Contexts._
+import dotty.tools.dotc.core.Contexts.*
import dotty.tools.dotc.util.Property
import dotty.tools.dotc.reporting.trace
import dotty.tools.io.ClassPath
diff --git a/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala b/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala
index 2109a5839380..5fc1b3137e90 100644
--- a/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala
+++ b/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala
@@ -2,7 +2,7 @@ package dotty.tools
package dotc
package core
-import Types._, Contexts._, Symbols._, Decorators._
+import Types.*, Contexts.*, Symbols.*, Decorators.*
import util.Property
import Names.Name
@@ -15,7 +15,7 @@ object MatchTypeTrace:
case Stuck(scrut: Type, stuckCase: Type, otherCases: List[Type])
case NoInstance(scrut: Type, stuckCase: Type, fails: List[(Name, TypeBounds)])
case EmptyScrutinee(scrut: Type)
- import TraceEntry._
+ import TraceEntry.*
private class MatchTrace:
var entries: List[TraceEntry] = Nil
diff --git a/compiler/src/dotty/tools/dotc/core/Mode.scala b/compiler/src/dotty/tools/dotc/core/Mode.scala
index da578e9bf0b0..133d110cabda 100644
--- a/compiler/src/dotty/tools/dotc/core/Mode.scala
+++ b/compiler/src/dotty/tools/dotc/core/Mode.scala
@@ -10,7 +10,7 @@ package dotty.tools.dotc.core
* Also, a setting is externally settable, while a mode isn't.
*/
case class Mode(val bits: Int) extends AnyVal {
- import Mode._
+ import Mode.*
def | (that: Mode): Mode = Mode(bits | that.bits)
def & (that: Mode): Mode = Mode(bits & that.bits)
def &~ (that: Mode): Mode = Mode(bits & ~that.bits)
diff --git a/compiler/src/dotty/tools/dotc/core/NameKinds.scala b/compiler/src/dotty/tools/dotc/core/NameKinds.scala
index 3e8d398d05d6..d4f009cbbbd5 100644
--- a/compiler/src/dotty/tools/dotc/core/NameKinds.scala
+++ b/compiler/src/dotty/tools/dotc/core/NameKinds.scala
@@ -2,12 +2,12 @@ package dotty.tools
package dotc
package core
-import Names._
-import NameOps._
-import StdNames._
-import NameTags._
-import Contexts._
-import Decorators._
+import Names.*
+import NameOps.*
+import StdNames.*
+import NameTags.*
+import Contexts.*
+import Decorators.*
import scala.annotation.internal.sharable
diff --git a/compiler/src/dotty/tools/dotc/core/NameOps.scala b/compiler/src/dotty/tools/dotc/core/NameOps.scala
index bdb01a079148..415aa049c587 100644
--- a/compiler/src/dotty/tools/dotc/core/NameOps.scala
+++ b/compiler/src/dotty/tools/dotc/core/NameOps.scala
@@ -6,11 +6,11 @@ import java.security.MessageDigest
import java.nio.CharBuffer
import scala.io.Codec
import Int.MaxValue
-import Names._, StdNames._, Contexts._, Symbols._, Flags._, NameKinds._, Types._
+import Names.*, StdNames.*, Contexts.*, Symbols.*, Flags.*, NameKinds.*, Types.*
import util.Chars.{isOperatorPart, digit2int}
import Decorators.*
-import Definitions._
-import nme._
+import Definitions.*
+import nme.*
object NameOps {
diff --git a/compiler/src/dotty/tools/dotc/core/NamerOps.scala b/compiler/src/dotty/tools/dotc/core/NamerOps.scala
index dc09edd79781..e4364d168267 100644
--- a/compiler/src/dotty/tools/dotc/core/NamerOps.scala
+++ b/compiler/src/dotty/tools/dotc/core/NamerOps.scala
@@ -2,7 +2,7 @@ package dotty.tools
package dotc
package core
-import Contexts._, Symbols._, Types._, Flags._, Scopes._, Decorators._, Names._, NameOps._
+import Contexts.*, Symbols.*, Types.*, Flags.*, Scopes.*, Decorators.*, Names.*, NameOps.*
import SymDenotations.{LazyType, SymDenotation}, StdNames.nme
import TypeApplications.EtaExpansion
diff --git a/compiler/src/dotty/tools/dotc/core/Names.scala b/compiler/src/dotty/tools/dotc/core/Names.scala
index 1e08379b57f0..3f9667b08067 100644
--- a/compiler/src/dotty/tools/dotc/core/Names.scala
+++ b/compiler/src/dotty/tools/dotc/core/Names.scala
@@ -13,7 +13,7 @@ import util.{LinearMap, HashSet}
import scala.annotation.internal.sharable
object Names {
- import NameKinds._
+ import NameKinds.*
/** Things that can be turned into names with `toTermName` and `toTypeName`.
* Decorators implements these as extension methods for strings.
diff --git a/compiler/src/dotty/tools/dotc/core/NullOpsDecorator.scala b/compiler/src/dotty/tools/dotc/core/NullOpsDecorator.scala
index e18271772ff1..4f22f9d31e36 100644
--- a/compiler/src/dotty/tools/dotc/core/NullOpsDecorator.scala
+++ b/compiler/src/dotty/tools/dotc/core/NullOpsDecorator.scala
@@ -1,8 +1,8 @@
package dotty.tools.dotc
package core
-import Contexts._
-import Types._
+import Contexts.*
+import Types.*
/** Defines operations on nullable types and tree. */
object NullOpsDecorator:
@@ -49,7 +49,7 @@ object NullOpsDecorator:
}
end extension
- import ast.tpd._
+ import ast.tpd.*
extension (self: Tree)
// cast the type of the tree to a non-nullable type
diff --git a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala
index 0328cea9b3ca..29e665956091 100644
--- a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala
+++ b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala
@@ -2,11 +2,11 @@ package dotty.tools
package dotc
package core
-import Types._, Contexts._, Symbols._, Decorators._, TypeApplications._
+import Types.*, Contexts.*, Symbols.*, Decorators.*, TypeApplications.*
import util.{SimpleIdentitySet, SimpleIdentityMap}
import collection.mutable
import printing.Printer
-import printing.Texts._
+import printing.Texts.*
import config.Config
import config.Printers.constr
import reflect.ClassTag
@@ -14,6 +14,8 @@ import annotation.tailrec
import annotation.internal.sharable
import cc.{CapturingType, derivedCapturingType}
+import scala.compiletime.uninitialized
+
object OrderingConstraint {
/** If true, use reverse dependencies in `replace` to avoid checking the bounds
@@ -124,7 +126,7 @@ object OrderingConstraint {
val empty = new OrderingConstraint(SimpleIdentityMap.empty, SimpleIdentityMap.empty, SimpleIdentityMap.empty, SimpleIdentitySet.empty)
}
-import OrderingConstraint._
+import OrderingConstraint.*
/** Constraint over undetermined type parameters that keeps separate maps to
* reflect parameter orderings.
@@ -881,7 +883,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
i += 1
}
- private var myUninstVars: mutable.ArrayBuffer[TypeVar] | Null = _
+ private var myUninstVars: mutable.ArrayBuffer[TypeVar] | Null = uninitialized
/** The uninstantiated typevars of this constraint */
def uninstVars: collection.Seq[TypeVar] = {
diff --git a/compiler/src/dotty/tools/dotc/core/ParamInfo.scala b/compiler/src/dotty/tools/dotc/core/ParamInfo.scala
index e88d6540e64b..e499f718365a 100644
--- a/compiler/src/dotty/tools/dotc/core/ParamInfo.scala
+++ b/compiler/src/dotty/tools/dotc/core/ParamInfo.scala
@@ -1,7 +1,7 @@
package dotty.tools.dotc.core
import Names.Name
-import Contexts._
+import Contexts.*
import Types.Type
import Variances.{Variance, varianceToInt}
diff --git a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala
index 5e8a960608e6..4e3596ea8814 100644
--- a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala
+++ b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala
@@ -2,14 +2,14 @@ package dotty.tools
package dotc
package core
-import Decorators._
-import Symbols._
-import Types._
-import Flags._
+import Decorators.*
+import Symbols.*
+import Types.*
+import Flags.*
import Contexts.ctx
import dotty.tools.dotc.reporting.trace
import config.Feature.migrateTo3
-import config.Printers._
+import config.Printers.*
trait PatternTypeConstrainer { self: TypeComparer =>
@@ -76,7 +76,7 @@ trait PatternTypeConstrainer { self: TypeComparer =>
def constrainPatternType(pat: Type, scrut: Type, forceInvariantRefinement: Boolean = false): Boolean = trace(i"constrainPatternType($scrut, $pat)", gadts) {
def classesMayBeCompatible: Boolean = {
- import Flags._
+ import Flags.*
val patCls = pat.classSymbol
val scrCls = scrut.classSymbol
!patCls.exists || !scrCls.exists || {
diff --git a/compiler/src/dotty/tools/dotc/core/Periods.scala b/compiler/src/dotty/tools/dotc/core/Periods.scala
index ee877fb538d4..019c5932b3c9 100644
--- a/compiler/src/dotty/tools/dotc/core/Periods.scala
+++ b/compiler/src/dotty/tools/dotc/core/Periods.scala
@@ -1,6 +1,6 @@
package dotty.tools.dotc.core
-import Contexts._
+import Contexts.*
import Phases.unfusedPhases
object Periods {
diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala
index 9ba8332a8634..e04d829d1e60 100644
--- a/compiler/src/dotty/tools/dotc/core/Phases.scala
+++ b/compiler/src/dotty/tools/dotc/core/Phases.scala
@@ -2,17 +2,17 @@ package dotty.tools
package dotc
package core
-import Periods._
-import Contexts._
+import Periods.*
+import Contexts.*
import dotty.tools.backend.jvm.GenBCode
-import DenotTransformers._
-import Denotations._
-import Decorators._
+import DenotTransformers.*
+import Denotations.*
+import Decorators.*
import config.Printers.config
import scala.collection.mutable.ListBuffer
-import dotty.tools.dotc.transform.MegaPhase._
-import dotty.tools.dotc.transform._
-import Periods._
+import dotty.tools.dotc.transform.MegaPhase.*
+import dotty.tools.dotc.transform.*
+import Periods.*
import parsing.Parser
import printing.XprintMode
import typer.{TyperPhase, RefChecks}
@@ -21,6 +21,7 @@ import typer.ImportInfo.withRootImports
import ast.{tpd, untpd}
import scala.annotation.internal.sharable
import scala.util.control.NonFatal
+import scala.compiletime.uninitialized
object Phases {
@@ -205,30 +206,30 @@ object Phases {
if nextDenotTransformerId(i) == phase.id then
nextDenotTransformerId(i) = nextDenotTransformerId(phase.id + 1)
- private var myParserPhase: Phase = _
- private var myTyperPhase: Phase = _
- private var myPostTyperPhase: Phase = _
- private var mySbtExtractDependenciesPhase: Phase = _
- private var myPicklerPhase: Phase = _
- private var myInliningPhase: Phase = _
- private var myStagingPhase: Phase = _
- private var mySplicingPhase: Phase = _
- private var myFirstTransformPhase: Phase = _
- private var myCollectNullableFieldsPhase: Phase = _
- private var myRefChecksPhase: Phase = _
- private var myPatmatPhase: Phase = _
- private var myElimRepeatedPhase: Phase = _
- private var myElimByNamePhase: Phase = _
- private var myExtensionMethodsPhase: Phase = _
- private var myExplicitOuterPhase: Phase = _
- private var myGettersPhase: Phase = _
- private var myErasurePhase: Phase = _
- private var myElimErasedValueTypePhase: Phase = _
- private var myLambdaLiftPhase: Phase = _
- private var myCountOuterAccessesPhase: Phase = _
- private var myFlattenPhase: Phase = _
- private var myGenBCodePhase: Phase = _
- private var myCheckCapturesPhase: Phase = _
+ private var myParserPhase: Phase = uninitialized
+ private var myTyperPhase: Phase = uninitialized
+ private var myPostTyperPhase: Phase = uninitialized
+ private var mySbtExtractDependenciesPhase: Phase = uninitialized
+ private var myPicklerPhase: Phase = uninitialized
+ private var myInliningPhase: Phase = uninitialized
+ private var myStagingPhase: Phase = uninitialized
+ private var mySplicingPhase: Phase = uninitialized
+ private var myFirstTransformPhase: Phase = uninitialized
+ private var myCollectNullableFieldsPhase: Phase = uninitialized
+ private var myRefChecksPhase: Phase = uninitialized
+ private var myPatmatPhase: Phase = uninitialized
+ private var myElimRepeatedPhase: Phase = uninitialized
+ private var myElimByNamePhase: Phase = uninitialized
+ private var myExtensionMethodsPhase: Phase = uninitialized
+ private var myExplicitOuterPhase: Phase = uninitialized
+ private var myGettersPhase: Phase = uninitialized
+ private var myErasurePhase: Phase = uninitialized
+ private var myElimErasedValueTypePhase: Phase = uninitialized
+ private var myLambdaLiftPhase: Phase = uninitialized
+ private var myCountOuterAccessesPhase: Phase = uninitialized
+ private var myFlattenPhase: Phase = uninitialized
+ private var myGenBCodePhase: Phase = uninitialized
+ private var myCheckCapturesPhase: Phase = uninitialized
final def parserPhase: Phase = myParserPhase
final def typerPhase: Phase = myTyperPhase
@@ -389,7 +390,7 @@ object Phases {
def printingContext(ctx: Context): Context = ctx
private var myPeriod: Period = Periods.InvalidPeriod
- private var myBase: ContextBase = _
+ private var myBase: ContextBase = uninitialized
private var myErasedTypes = false
private var myFlatClasses = false
private var myRefChecked = false
diff --git a/compiler/src/dotty/tools/dotc/core/Scopes.scala b/compiler/src/dotty/tools/dotc/core/Scopes.scala
index 99076b422358..7df5a7fa3c09 100644
--- a/compiler/src/dotty/tools/dotc/core/Scopes.scala
+++ b/compiler/src/dotty/tools/dotc/core/Scopes.scala
@@ -7,18 +7,19 @@ package dotty.tools
package dotc
package core
-import Symbols._
+import Symbols.*
import Types.{TermRef, NoPrefix}
-import Flags._
-import Names._
-import Contexts._
-import Phases._
-import Denotations._
-import printing.Texts._
+import Flags.*
+import Names.*
+import Contexts.*
+import Phases.*
+import Denotations.*
+import printing.Texts.*
import printing.Printer
import SymDenotations.NoDenotation
import collection.mutable
+import scala.compiletime.uninitialized
object Scopes {
diff --git a/compiler/src/dotty/tools/dotc/core/Signature.scala b/compiler/src/dotty/tools/dotc/core/Signature.scala
index bd744ec01846..f62d594d639d 100644
--- a/compiler/src/dotty/tools/dotc/core/Signature.scala
+++ b/compiler/src/dotty/tools/dotc/core/Signature.scala
@@ -3,9 +3,9 @@ package core
import scala.annotation.tailrec
-import Names._, Types._, Contexts._, StdNames._, Decorators._
+import Names.*, Types.*, Contexts.*, StdNames.*, Decorators.*
import TypeErasure.sigName
-import Signature._
+import Signature.*
/** The signature of a denotation.
*
diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala
index 4fc7ea4185d8..1436b41b521c 100644
--- a/compiler/src/dotty/tools/dotc/core/StdNames.scala
+++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala
@@ -4,9 +4,9 @@ package core
import scala.collection.mutable
import scala.annotation.switch
import scala.annotation.internal.sharable
-import Names._
-import Symbols._
-import Contexts._
+import Names.*
+import Symbols.*
+import Contexts.*
object StdNames {
diff --git a/compiler/src/dotty/tools/dotc/core/Substituters.scala b/compiler/src/dotty/tools/dotc/core/Substituters.scala
index 5a641416b3e1..96da91293d91 100644
--- a/compiler/src/dotty/tools/dotc/core/Substituters.scala
+++ b/compiler/src/dotty/tools/dotc/core/Substituters.scala
@@ -1,7 +1,7 @@
package dotty.tools.dotc
package core
-import Types._, Symbols._, Contexts._
+import Types.*, Symbols.*, Contexts.*
import cc.CaptureSet.IdempotentCaptRefMap
/** Substitution operations on types. See the corresponding `subst` and
@@ -189,7 +189,7 @@ object Substituters:
def apply(tp: Type): Type = substThis(tp, from, to, this)(using mapCtx)
}
- final class SubstRecThisMap(from: Type, to: Type)(using Context) extends DeepTypeMap {
+ final class SubstRecThisMap(from: Type, to: Type)(using Context) extends DeepTypeMap, IdempotentCaptRefMap {
def apply(tp: Type): Type = substRecThis(tp, from, to, this)(using mapCtx)
}
diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala
index 395164b1dd48..e18e1463f3ae 100644
--- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala
+++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala
@@ -2,16 +2,16 @@ package dotty.tools
package dotc
package core
-import Periods._, Contexts._, Symbols._, Denotations._, Names._, NameOps._, Annotations._
-import Types._, Flags._, Decorators._, DenotTransformers._, StdNames._, Scopes._
-import NameOps._, NameKinds._
+import Periods.*, Contexts.*, Symbols.*, Denotations.*, Names.*, NameOps.*, Annotations.*
+import Types.*, Flags.*, Decorators.*, DenotTransformers.*, StdNames.*, Scopes.*
+import NameOps.*, NameKinds.*
import Phases.{Phase, typerPhase, unfusedPhases}
import Constants.Constant
import TypeApplications.TypeParamInfo
import Scopes.Scope
import dotty.tools.io.AbstractFile
-import Decorators._
-import ast._
+import Decorators.*
+import ast.*
import ast.Trees.{LambdaTypeTree, TypeBoundsTree}
import Trees.Literal
import Variances.Variance
@@ -21,12 +21,13 @@ import util.Stats
import java.util.WeakHashMap
import scala.util.control.NonFatal
import config.Config
-import reporting._
+import reporting.*
import collection.mutable
-import transform.TypeUtils._
+import transform.TypeUtils.*
import cc.{CapturingType, derivedCapturingType}
import scala.annotation.internal.sharable
+import scala.compiletime.uninitialized
object SymDenotations {
@@ -2431,7 +2432,7 @@ object SymDenotations {
initPrivateWithin: Symbol)
extends ClassDenotation(symbol, ownerIfExists, name, initFlags, initInfo, initPrivateWithin) {
- private var packageObjsCache: List[ClassDenotation] = _
+ private var packageObjsCache: List[ClassDenotation] = uninitialized
private var packageObjsRunId: RunId = NoRunId
private var ambiguityWarningIssued: Boolean = false
diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala
index 3969a09a69ee..5f6078a14625 100644
--- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala
+++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala
@@ -11,11 +11,11 @@ import dotty.tools.dotc.classpath.FileUtils.isTasty
import dotty.tools.io.{ ClassPath, ClassRepresentation, AbstractFile }
import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions
-import Contexts._, Symbols._, Flags._, SymDenotations._, Types._, Scopes._, Names._
-import NameOps._
-import StdNames._
+import Contexts.*, Symbols.*, Flags.*, SymDenotations.*, Types.*, Scopes.*, Names.*
+import NameOps.*
+import StdNames.*
import classfile.{ClassfileParser, ClassfileTastyUUIDParser}
-import Decorators._
+import Decorators.*
import util.Stats
import reporting.trace
@@ -24,11 +24,12 @@ import ast.desugar
import parsing.JavaParsers.OutlineJavaParser
import parsing.Parsers.OutlineParser
-import dotty.tools.tasty.TastyHeaderUnpickler
+import dotty.tools.tasty.{TastyHeaderUnpickler, UnpickleException, UnpicklerConfig}
+import dotty.tools.dotc.core.tasty.TastyUnpickler
object SymbolLoaders {
- import ast.untpd._
+ import ast.untpd.*
/** A marker trait for a completer that replaces the original
* Symbol loader for an unpickled root.
@@ -421,14 +422,25 @@ class TastyLoader(val tastyFile: AbstractFile) extends SymbolLoader {
def description(using Context): String = "TASTy file " + tastyFile.toString
override def doComplete(root: SymDenotation)(using Context): Unit =
- val (classRoot, moduleRoot) = rootDenots(root.asClass)
- val tastyBytes = tastyFile.toByteArray
- val unpickler = new tasty.DottyUnpickler(tastyBytes)
- unpickler.enter(roots = Set(classRoot, moduleRoot, moduleRoot.sourceModule))(using ctx.withSource(util.NoSource))
- if mayLoadTreesFromTasty then
- classRoot.classSymbol.rootTreeOrProvider = unpickler
- moduleRoot.classSymbol.rootTreeOrProvider = unpickler
- checkTastyUUID(tastyFile, tastyBytes)
+ try
+ val (classRoot, moduleRoot) = rootDenots(root.asClass)
+ val tastyBytes = tastyFile.toByteArray
+ val unpickler = new tasty.DottyUnpickler(tastyBytes)
+ unpickler.enter(roots = Set(classRoot, moduleRoot, moduleRoot.sourceModule))(using ctx.withSource(util.NoSource))
+ if mayLoadTreesFromTasty then
+ classRoot.classSymbol.rootTreeOrProvider = unpickler
+ moduleRoot.classSymbol.rootTreeOrProvider = unpickler
+ checkTastyUUID(tastyFile, tastyBytes)
+ catch case e: RuntimeException =>
+ val message = e match
+ case e: UnpickleException =>
+ i"""TASTy file ${tastyFile.canonicalPath} could not be read, failing with:
+ | ${Option(e.getMessage).getOrElse("")}"""
+ case _ =>
+ i"""TASTy file ${tastyFile.canonicalPath} is broken, reading aborted with ${e.getClass}
+ | ${Option(e.getMessage).getOrElse("")}"""
+ if (ctx.debug) e.printStackTrace()
+ throw IOException(message)
private def checkTastyUUID(tastyFile: AbstractFile, tastyBytes: Array[Byte])(using Context): Unit =
@@ -436,7 +448,7 @@ class TastyLoader(val tastyFile: AbstractFile) extends SymbolLoader {
val className = tastyFile.name.stripSuffix(".tasty")
tastyFile.resolveSibling(className + ".class")
if classfile != null then
- val tastyUUID = new TastyHeaderUnpickler(tastyBytes).readHeader()
+ val tastyUUID = new TastyHeaderUnpickler(TastyUnpickler.scala3CompilerConfig, tastyBytes).readHeader()
new ClassfileTastyUUIDParser(classfile)(ctx).checkTastyUUID(tastyUUID)
else
// This will be the case in any of our tests that compile with `-Youtput-only-tasty`
diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala
index 1e21c1a743f4..a41d194693e6 100644
--- a/compiler/src/dotty/tools/dotc/core/Symbols.scala
+++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala
@@ -2,23 +2,23 @@ package dotty.tools
package dotc
package core
-import Periods._
-import Names._
-import Scopes._
-import Flags._
-import Decorators._
-import Contexts._
-import Phases._
-import SymDenotations._
-import Denotations._
-import printing.Texts._
+import Periods.*
+import Names.*
+import Scopes.*
+import Flags.*
+import Decorators.*
+import Contexts.*
+import Phases.*
+import SymDenotations.*
+import Denotations.*
+import printing.Texts.*
import printing.Printer
-import Types._
-import util.Spans._
-import DenotTransformers._
-import StdNames._
-import NameOps._
-import transform.SymUtils._
+import Types.*
+import util.Spans.*
+import DenotTransformers.*
+import StdNames.*
+import NameOps.*
+import transform.SymUtils.*
import NameKinds.LazyImplicitName
import ast.tpd
import tpd.{Tree, TreeProvider, TreeOps}
@@ -33,6 +33,8 @@ import scala.annotation.internal.sharable
import config.Printers.typr
import dotty.tools.dotc.classpath.FileUtils.isScalaBinary
+import scala.compiletime.uninitialized
+
object Symbols {
implicit def eqSymbol: CanEqual[Symbol, Symbol] = CanEqual.derived
@@ -88,7 +90,7 @@ object Symbols {
ctx.settings.YcheckInitGlobal.value
/** The last denotation of this symbol */
- private var lastDenot: SymDenotation = _
+ private var lastDenot: SymDenotation = uninitialized
private var checkedPeriod: Period = Nowhere
private[core] def invalidateDenotCache(): Unit = { checkedPeriod = Nowhere }
diff --git a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala
index 1cd1a3ad4d39..e725df199476 100644
--- a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala
+++ b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala
@@ -2,13 +2,13 @@ package dotty.tools
package dotc
package core
-import Types._
-import Contexts._
-import Symbols._
+import Types.*
+import Contexts.*
+import Symbols.*
import SymDenotations.LazyType
-import Decorators._
-import util.Stats._
-import Names._
+import Decorators.*
+import util.Stats.*
+import Names.*
import StdNames.nme
import Flags.{Module, Provisional}
import dotty.tools.dotc.config.Config
@@ -154,7 +154,7 @@ object TypeApplications {
}
}
-import TypeApplications._
+import TypeApplications.*
/** A decorator that provides methods for modeling type application */
class TypeApplications(val self: Type) extends AnyVal {
diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala
index d5b97dca6164..f853d28e6361 100644
--- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala
+++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala
@@ -2,8 +2,8 @@ package dotty.tools
package dotc
package core
-import Types._, Contexts._, Symbols._, Flags._, Names._, NameOps._, Denotations._
-import Decorators._
+import Types.*, Contexts.*, Symbols.*, Flags.*, Names.*, NameOps.*, Denotations.*
+import Decorators.*
import Phases.{gettersPhase, elimByNamePhase}
import StdNames.nme
import TypeOps.refineUsingParent
@@ -13,11 +13,11 @@ import config.Config
import config.Feature.migrateTo3
import config.Printers.{subtyping, gadts, matchTypes, noPrinter}
import TypeErasure.{erasedLub, erasedGlb}
-import TypeApplications._
+import TypeApplications.*
import Variances.{Variance, variancesConform}
import Constants.Constant
-import transform.TypeUtils._
-import transform.SymUtils._
+import transform.TypeUtils.*
+import transform.SymUtils.*
import scala.util.control.NonFatal
import typer.ProtoTypes.constrained
import typer.Applications.productSelectorTypes
@@ -29,7 +29,7 @@ import NameKinds.WildcardParamName
/** Provides methods to compare types.
*/
class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling, PatternTypeConstrainer {
- import TypeComparer._
+ import TypeComparer.*
Stats.record("TypeComparer")
private var myContext: Context = initctx
@@ -3342,7 +3342,7 @@ class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) {
* subtraces; never print backtraces starting with `<==`.
*/
class ExplainingTypeComparer(initctx: Context, short: Boolean) extends TypeComparer(initctx) {
- import TypeComparer._
+ import TypeComparer.*
init(initctx)
diff --git a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala
index c914dcdf7e5c..9dfa33b739c7 100644
--- a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala
+++ b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala
@@ -2,17 +2,17 @@ package dotty.tools
package dotc
package core
-import Symbols._, Types._, Contexts._, Flags._, Names._, StdNames._, Phases._
+import Symbols.*, Types.*, Contexts.*, Flags.*, Names.*, StdNames.*, Phases.*
import Flags.JavaDefined
import Uniques.unique
import TypeOps.makePackageObjPrefixExplicit
import backend.sjs.JSDefinitions
-import transform.ExplicitOuter._
-import transform.ValueClasses._
-import transform.TypeUtils._
-import transform.ContextFunctionResults._
+import transform.ExplicitOuter.*
+import transform.ValueClasses.*
+import transform.TypeUtils.*
+import transform.ContextFunctionResults.*
import unpickleScala2.Scala2Erasure
-import Decorators._
+import Decorators.*
import Definitions.MaxImplementedFunctionArity
import scala.annotation.tailrec
@@ -404,7 +404,7 @@ object TypeErasure {
tp1 // After erasure, T | Nothing is just T and C | Null is just C, if C is a reference type.
else tp1 match {
case JavaArrayType(elem1) =>
- import dotty.tools.dotc.transform.TypeUtils._
+ import dotty.tools.dotc.transform.TypeUtils.*
tp2 match {
case JavaArrayType(elem2) =>
if (elem1.isPrimitiveValueType || elem2.isPrimitiveValueType)
@@ -571,7 +571,7 @@ object TypeErasure {
erasure(functionType(applyInfo))
}
-import TypeErasure._
+import TypeErasure.*
/**
* @param sourceLanguage Adapt our erasure rules to mimic what the given language
diff --git a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala
index dcc4874bc34f..2fa769e25852 100644
--- a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala
+++ b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala
@@ -2,15 +2,15 @@ package dotty.tools
package dotc
package core
-import Types._
-import Symbols._
-import Flags._
-import Names._
-import Contexts._
-import SymDenotations._
-import Denotations._
-import Decorators._
-import reporting._
+import Types.*
+import Symbols.*
+import Flags.*
+import Names.*
+import Contexts.*
+import SymDenotations.*
+import Denotations.*
+import Decorators.*
+import reporting.*
import ast.untpd
import config.Printers.{cyclicErrors, noPrinter}
diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala
index 5f63f4871c5f..7f9f7099d805 100644
--- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala
+++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala
@@ -2,21 +2,21 @@ package dotty.tools
package dotc
package core
-import Contexts._, Types._, Symbols._, Names._, NameKinds.*, Flags._
-import SymDenotations._
-import util.Spans._
+import Contexts.*, Types.*, Symbols.*, Names.*, NameKinds.*, Flags.*
+import SymDenotations.*
+import util.Spans.*
import util.Stats
-import Decorators._
-import StdNames._
+import Decorators.*
+import StdNames.*
import collection.mutable
-import ast.tpd._
+import ast.tpd.*
import reporting.trace
import config.Printers.typr
import config.Feature
import transform.SymUtils.*
-import typer.ProtoTypes._
+import typer.ProtoTypes.*
import typer.ForceDegree
-import typer.Inferencing._
+import typer.Inferencing.*
import typer.IfBottom
import reporting.TestingReporter
import cc.{CapturingType, derivedCapturingType, CaptureSet, isBoxed, isBoxedCapturing}
diff --git a/compiler/src/dotty/tools/dotc/core/TyperState.scala b/compiler/src/dotty/tools/dotc/core/TyperState.scala
index d2df2a2aebef..ef7329c3698d 100644
--- a/compiler/src/dotty/tools/dotc/core/TyperState.scala
+++ b/compiler/src/dotty/tools/dotc/core/TyperState.scala
@@ -2,18 +2,19 @@ package dotty.tools
package dotc
package core
-import Types._
-import Contexts._
+import Types.*
+import Contexts.*
import util.SimpleIdentitySet
-import reporting._
+import reporting.*
import config.Config
import config.Printers.constr
import collection.mutable
import java.lang.ref.WeakReference
import util.{Stats, SimpleIdentityMap}
-import Decorators._
+import Decorators.*
import scala.annotation.internal.sharable
+import scala.compiletime.uninitialized
object TyperState {
@sharable private var nextId: Int = 0
@@ -44,19 +45,19 @@ object TyperState {
class TyperState() {
import TyperState.LevelMap
- private var myId: Int = _
+ private var myId: Int = uninitialized
def id: Int = myId
- private var previous: TyperState | Null = _
+ private var previous: TyperState | Null = uninitialized
- private var myReporter: Reporter = _
+ private var myReporter: Reporter = uninitialized
def reporter: Reporter = myReporter
/** A fresh type state with the same constraint as this one and the given reporter */
def setReporter(reporter: Reporter): this.type = { myReporter = reporter; this }
- private var myConstraint: Constraint = _
+ private var myConstraint: Constraint = uninitialized
def constraint: Constraint = myConstraint
def constraint_=(c: Constraint)(using Context): Unit = {
@@ -66,9 +67,9 @@ class TyperState() {
c.checkConsistentVars()
}
- private var previousConstraint: Constraint = _
+ private var previousConstraint: Constraint = uninitialized
- private var myIsCommittable: Boolean = _
+ private var myIsCommittable: Boolean = uninitialized
def isCommittable: Boolean = myIsCommittable
@@ -79,7 +80,7 @@ class TyperState() {
def isGlobalCommittable: Boolean =
isCommittable && (previous == null || previous.uncheckedNN.isGlobalCommittable)
- private var isCommitted: Boolean = _
+ private var isCommitted: Boolean = uninitialized
/** The set of uninstantiated type variables which have this state as their owning state.
*
@@ -87,11 +88,11 @@ class TyperState() {
* if `tstate.isCommittable` then
* `tstate.ownedVars.contains(tvar)` iff `tvar.owningState.get eq tstate`
*/
- private var myOwnedVars: TypeVars = _
+ private var myOwnedVars: TypeVars = uninitialized
def ownedVars: TypeVars = myOwnedVars
def ownedVars_=(vs: TypeVars): Unit = myOwnedVars = vs
- private var upLevels: LevelMap = _
+ private var upLevels: LevelMap = uninitialized
/** Initializes all fields except reporter, isCommittable, which need to be
* set separately.
diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala
index eb878b430183..61e16f1be668 100644
--- a/compiler/src/dotty/tools/dotc/core/Types.scala
+++ b/compiler/src/dotty/tools/dotc/core/Types.scala
@@ -2,32 +2,32 @@ package dotty.tools
package dotc
package core
-import Symbols._
-import Flags._
-import Names._
-import StdNames._, NameOps._
-import NullOpsDecorator._
+import Symbols.*
+import Flags.*
+import Names.*
+import StdNames.*, NameOps.*
+import NullOpsDecorator.*
import NameKinds.SkolemName
-import Scopes._
-import Constants._
-import Contexts._
-import Phases._
-import Annotations._
-import SymDenotations._
-import Decorators._
-import Denotations._
-import Periods._
-import CheckRealizable._
+import Scopes.*
+import Constants.*
+import Contexts.*
+import Phases.*
+import Annotations.*
+import SymDenotations.*
+import Decorators.*
+import Denotations.*
+import Periods.*
+import CheckRealizable.*
import Variances.{Variance, setStructuralVariances, Invariant}
import typer.Nullables
-import util.Stats._
+import util.Stats.*
import util.{SimpleIdentityMap, SimpleIdentitySet}
-import ast.tpd._
+import ast.tpd.*
import ast.TreeTypeMap
-import printing.Texts._
+import printing.Texts.*
import printing.Printer
-import Hashable._
-import Uniques._
+import Hashable.*
+import Uniques.*
import collection.mutable
import config.Config
import annotation.{tailrec, constructorOnly}
@@ -42,7 +42,7 @@ import CaptureSet.{CompareResult, IdempotentCaptRefMap, IdentityCaptRefMap}
import scala.annotation.internal.sharable
import scala.annotation.threadUnsafe
-import dotty.tools.dotc.transform.SymUtils._
+import dotty.tools.dotc.transform.SymUtils.*
import dotty.tools.dotc.transform.TypeUtils.isErasedClass
object Types {
@@ -2164,7 +2164,7 @@ object Types {
/** A trait for references in CaptureSets. These can be NamedTypes, ThisTypes or ParamRefs */
trait CaptureRef extends SingletonType:
- private var myCaptureSet: CaptureSet | Null = _
+ private var myCaptureSet: CaptureSet | Null = uninitialized
private var myCaptureSetRunId: Int = NoRunId
private var mySingletonCaptureSet: CaptureSet.Const | Null = null
@@ -2285,7 +2285,7 @@ object Types {
private var lastSymbol: Symbol | Null = null
private var checkedPeriod: Period = Nowhere
private var myStableHash: Byte = 0
- private var mySignature: Signature = _
+ private var mySignature: Signature = uninitialized
private var mySignatureRunId: Int = NoRunId
// Invariants:
@@ -2941,7 +2941,7 @@ object Types {
type ThisName = TypeName
private var myCanDropAliasPeriod: Period = Nowhere
- private var myCanDropAlias: Boolean = _
+ private var myCanDropAlias: Boolean = uninitialized
/** Given an alias type `type A = B` where a recursive comparison with `B` yields
* `false`, can we conclude that the comparison is definitely false?
@@ -3405,7 +3405,7 @@ object Types {
abstract case class AndType(tp1: Type, tp2: Type) extends AndOrType {
def isAnd: Boolean = true
private var myBaseClassesPeriod: Period = Nowhere
- private var myBaseClasses: List[ClassSymbol] = _
+ private var myBaseClasses: List[ClassSymbol] = uninitialized
/** Base classes are the merge of the operand base classes. */
override final def baseClasses(using Context): List[ClassSymbol] = {
if (myBaseClassesPeriod != ctx.period) {
@@ -3498,7 +3498,7 @@ object Types {
def isAnd: Boolean = false
def isSoft: Boolean
private var myBaseClassesPeriod: Period = Nowhere
- private var myBaseClasses: List[ClassSymbol] = _
+ private var myBaseClasses: List[ClassSymbol] = uninitialized
/** Base classes are the intersection of the operand base classes. */
override final def baseClasses(using Context): List[ClassSymbol] = {
if (myBaseClassesPeriod != ctx.period) {
@@ -3527,7 +3527,7 @@ object Types {
myFactorCount
else 1
- private var myJoin: Type = _
+ private var myJoin: Type = uninitialized
private var myJoinPeriod: Period = Nowhere
/** Replace or type by the closest non-or type above it */
@@ -3541,7 +3541,7 @@ object Types {
myJoin
}
- private var myUnion: Type = _
+ private var myUnion: Type = uninitialized
private var myUnionPeriod: Period = Nowhere
override def widenUnionWithoutNull(using Context): Type =
@@ -3556,8 +3556,8 @@ object Types {
myUnion
private var atomsRunId: RunId = NoRunId
- private var myAtoms: Atoms = _
- private var myWidened: Type = _
+ private var myAtoms: Atoms = uninitialized
+ private var myWidened: Type = uninitialized
private def computeAtoms()(using Context): Atoms =
val tp1n = tp1.normalized
@@ -3797,11 +3797,11 @@ object Types {
// (1) mySignatureRunId != NoRunId => mySignature != null
// (2) myJavaSignatureRunId != NoRunId => myJavaSignature != null
- private var mySignature: Signature = _
+ private var mySignature: Signature = uninitialized
private var mySignatureRunId: Int = NoRunId
- private var myJavaSignature: Signature = _
+ private var myJavaSignature: Signature = uninitialized
private var myJavaSignatureRunId: Int = NoRunId
- private var myScala2Signature: Signature = _
+ private var myScala2Signature: Signature = uninitialized
private var myScala2SignatureRunId: Int = NoRunId
/** If `isJava` is false, the Scala signature of this method. Otherwise, its Java signature.
@@ -3882,7 +3882,7 @@ object Types {
}
trait TermLambda extends LambdaType { thisLambdaType =>
- import DepStatus._
+ import DepStatus.*
type ThisName = TermName
type PInfo = Type
type This >: this.type <: TermLambda
@@ -4993,7 +4993,7 @@ object Types {
def underlying(using Context): Type = bound
private var myReduced: Type | Null = null
- private var reductionContext: util.MutableMap[Type, Type] = _
+ private var reductionContext: util.MutableMap[Type, Type] = uninitialized
override def tryNormalize(using Context): Type =
try
@@ -5420,7 +5420,7 @@ object Types {
override def stripped(using Context): Type = parent.stripped
private var isRefiningKnown = false
- private var isRefiningCache: Boolean = _
+ private var isRefiningCache: Boolean = uninitialized
def isRefining(using Context): Boolean = {
if (!isRefiningKnown) {
@@ -5956,7 +5956,7 @@ object Types {
val elems = scope.toList
val elems1 = mapOver(elems)
if (elems1 eq elems) scope
- else newScopeWith(elems1: _*)
+ else newScopeWith(elems1*)
}
def mapOver(tree: Tree): Tree = treeTypeMap(tree)
diff --git a/compiler/src/dotty/tools/dotc/core/Uniques.scala b/compiler/src/dotty/tools/dotc/core/Uniques.scala
index 4078a2b1051a..da6b0aba88bd 100644
--- a/compiler/src/dotty/tools/dotc/core/Uniques.scala
+++ b/compiler/src/dotty/tools/dotc/core/Uniques.scala
@@ -1,9 +1,9 @@
package dotty.tools.dotc
package core
-import Types._, Contexts._, util.Stats._, Hashable._, Names._
+import Types.*, Contexts.*, util.Stats.*, Hashable.*, Names.*
import config.Config
-import Decorators._
+import Decorators.*
import util.{WeakHashSet, Stats}
import WeakHashSet.Entry
import scala.annotation.tailrec
diff --git a/compiler/src/dotty/tools/dotc/core/Variances.scala b/compiler/src/dotty/tools/dotc/core/Variances.scala
index 2401b43c8e17..e18a31e46769 100644
--- a/compiler/src/dotty/tools/dotc/core/Variances.scala
+++ b/compiler/src/dotty/tools/dotc/core/Variances.scala
@@ -1,9 +1,9 @@
package dotty.tools.dotc
package core
-import Types._, Contexts._, Flags._, Symbols._, Annotations._
+import Types.*, Contexts.*, Flags.*, Symbols.*, Annotations.*
import TypeApplications.TypeParamInfo
-import Decorators._
+import Decorators.*
object Variances {
diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala
index 4aa60d973264..6ad71c5fd1ce 100644
--- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala
+++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala
@@ -331,7 +331,7 @@ object ClassfileConstants {
inline val impdep1 = 0xfe
inline val impdep2 = 0xff
- import Flags._
+ import Flags.*
abstract class FlagTranslation {
protected def baseFlags(jflags: Int): FlagSet = EmptyFlags
diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala
index 5e816502f359..93ebcfeee62a 100644
--- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala
+++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala
@@ -7,11 +7,11 @@ import scala.language.unsafeNulls
import dotty.tools.tasty.{ TastyReader, TastyHeaderUnpickler }
-import Contexts._, Symbols._, Types._, Names._, StdNames._, NameOps._, Scopes._, Decorators._
-import SymDenotations._, unpickleScala2.Scala2Unpickler._, Constants._, Annotations._, util.Spans._
-import Phases._
+import Contexts.*, Symbols.*, Types.*, Names.*, StdNames.*, NameOps.*, Scopes.*, Decorators.*
+import SymDenotations.*, unpickleScala2.Scala2Unpickler.*, Constants.*, Annotations.*, util.Spans.*
+import Phases.*
import ast.{ tpd, untpd }
-import ast.tpd._, util._
+import ast.tpd.*, util.*
import java.io.IOException
import java.lang.Integer.toHexString
@@ -25,9 +25,33 @@ import io.{AbstractFile, ZipArchive}
import scala.util.control.NonFatal
import dotty.tools.dotc.classpath.FileUtils.classToTasty
+import scala.compiletime.uninitialized
+
object ClassfileParser {
- import ClassfileConstants._
+ object Header:
+ opaque type Version = Long
+
+ object Version:
+ val Unknown: Version = -1L
+
+ def brokenVersionAddendum(classfileVersion: Version)(using Context): String =
+ if classfileVersion.exists then
+ val (maj, min) = (classfileVersion.majorVersion, classfileVersion.minorVersion)
+ val scalaVersion = config.Properties.versionNumberString
+ i""" (version $maj.$min),
+ | please check the JDK compatibility of your Scala version ($scalaVersion)"""
+ else
+ ""
+
+ def apply(major: Int, minor: Int): Version =
+ (major.toLong << 32) | (minor.toLong & 0xFFFFFFFFL)
+ extension (version: Version)
+ def exists: Boolean = version != Unknown
+ def majorVersion: Int = (version >> 32).toInt
+ def minorVersion: Int = (version & 0xFFFFFFFFL).toInt
+
+ import ClassfileConstants.*
/** Marker trait for unpicklers that can be embedded in classfiles. */
trait Embedded
@@ -55,6 +79,20 @@ object ClassfileParser {
}
}
+ private[classfile] def parseHeader(classfile: AbstractFile)(using in: DataReader): Header.Version = {
+ val magic = in.nextInt
+ if (magic != JAVA_MAGIC)
+ throw new IOException(s"class file '${classfile}' has wrong magic number 0x${toHexString(magic)}, should be 0x${toHexString(JAVA_MAGIC)}")
+ val minorVersion = in.nextChar.toInt
+ val majorVersion = in.nextChar.toInt
+ if ((majorVersion < JAVA_MAJOR_VERSION) ||
+ ((majorVersion == JAVA_MAJOR_VERSION) &&
+ (minorVersion < JAVA_MINOR_VERSION)))
+ throw new IOException(
+ s"class file '${classfile}' has unknown version $majorVersion.$minorVersion, should be at least $JAVA_MAJOR_VERSION.$JAVA_MINOR_VERSION")
+ Header.Version(majorVersion, minorVersion)
+ }
+
abstract class AbstractConstantPool(using in: DataReader) {
protected val len = in.nextChar
protected val starts = new Array[Int](len)
@@ -232,19 +270,20 @@ class ClassfileParser(
classRoot: ClassDenotation,
moduleRoot: ClassDenotation)(ictx: Context) {
- import ClassfileConstants._
- import ClassfileParser._
+ import ClassfileConstants.*
+ import ClassfileParser.*
protected val staticModule: Symbol = moduleRoot.sourceModule(using ictx)
- protected val instanceScope: MutableScope = newScope(0) // the scope of all instance definitions
- protected val staticScope: MutableScope = newScope(0) // the scope of all static definitions
- protected var pool: ConstantPool = _ // the classfile's constant pool
+ protected val instanceScope: MutableScope = newScope(0) // the scope of all instance definitions
+ protected val staticScope: MutableScope = newScope(0) // the scope of all static definitions
+ protected var pool: ConstantPool = uninitialized // the classfile's constant pool
- protected var currentClassName: SimpleName = _ // JVM name of the current class
+ protected var currentClassName: SimpleName = uninitialized // JVM name of the current class
protected var classTParams: Map[Name, Symbol] = Map()
private var Scala2UnpicklingMode = Mode.Scala2Unpickling
+ private var classfileVersion: Header.Version = Header.Version.Unknown
classRoot.info = NoLoader().withDecls(instanceScope)
moduleRoot.info = NoLoader().withDecls(staticScope).withSourceModule(staticModule)
@@ -257,7 +296,7 @@ class ClassfileParser(
def run()(using Context): Option[Embedded] = try ctx.base.reusableDataReader.withInstance { reader =>
implicit val reader2 = reader.reset(classfile)
report.debuglog("[class] >> " + classRoot.fullName)
- parseHeader()
+ classfileVersion = parseHeader(classfile)
this.pool = new ConstantPool
val res = parseClass()
this.pool = null
@@ -266,22 +305,11 @@ class ClassfileParser(
catch {
case e: RuntimeException =>
if (ctx.debug) e.printStackTrace()
+ val addendum = Header.Version.brokenVersionAddendum(classfileVersion)
throw new IOException(
- i"""class file ${classfile.canonicalPath} is broken, reading aborted with ${e.getClass}
- |${Option(e.getMessage).getOrElse("")}""")
- }
-
- private def parseHeader()(using in: DataReader): Unit = {
- val magic = in.nextInt
- if (magic != JAVA_MAGIC)
- throw new IOException(s"class file '${classfile}' has wrong magic number 0x${toHexString(magic)}, should be 0x${toHexString(JAVA_MAGIC)}")
- val minorVersion = in.nextChar.toInt
- val majorVersion = in.nextChar.toInt
- if ((majorVersion < JAVA_MAJOR_VERSION) ||
- ((majorVersion == JAVA_MAJOR_VERSION) &&
- (minorVersion < JAVA_MINOR_VERSION)))
- throw new IOException(
- s"class file '${classfile}' has unknown version $majorVersion.$minorVersion, should be at least $JAVA_MAJOR_VERSION.$JAVA_MINOR_VERSION")
+ i""" class file ${classfile.canonicalPath} is broken$addendum,
+ | reading aborted with ${e.getClass}:
+ | ${Option(e.getMessage).getOrElse("")}""")
}
/** Return the class symbol of the given name. */
diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileTastyUUIDParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileTastyUUIDParser.scala
index a9c91a68bb60..e2220e40c6b4 100644
--- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileTastyUUIDParser.scala
+++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileTastyUUIDParser.scala
@@ -2,30 +2,34 @@ package dotty.tools.dotc
package core.classfile
import scala.language.unsafeNulls
-
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Decorators._
-import dotty.tools.dotc.core.Names._
-import dotty.tools.dotc.core.StdNames._
-import dotty.tools.dotc.core.Symbols._
-import dotty.tools.dotc.core.Types._
-import dotty.tools.dotc.util._
+import scala.compiletime.uninitialized
+
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Decorators.*
+import dotty.tools.dotc.core.Names.*
+import dotty.tools.dotc.core.StdNames.*
+import dotty.tools.dotc.core.Symbols.*
+import dotty.tools.dotc.core.Types.*
+import dotty.tools.dotc.util.*
import dotty.tools.io.AbstractFile
import dotty.tools.tasty.TastyReader
+import ClassfileParser.Header
+
import java.io.IOException
import java.lang.Integer.toHexString
import java.util.UUID
class ClassfileTastyUUIDParser(classfile: AbstractFile)(ictx: Context) {
- import ClassfileConstants._
+ import ClassfileConstants.*
- private var pool: ConstantPool = _ // the classfile's constant pool
+ private var pool: ConstantPool = uninitialized // the classfile's constant pool
+ private var classfileVersion: Header.Version = Header.Version.Unknown
def checkTastyUUID(tastyUUID: UUID)(using Context): Unit = try ctx.base.reusableDataReader.withInstance { reader =>
implicit val reader2 = reader.reset(classfile)
- parseHeader()
+ this.classfileVersion = ClassfileParser.parseHeader(classfile)
this.pool = new ConstantPool
checkTastyAttr(tastyUUID)
this.pool = null
@@ -33,22 +37,11 @@ class ClassfileTastyUUIDParser(classfile: AbstractFile)(ictx: Context) {
catch {
case e: RuntimeException =>
if (ctx.debug) e.printStackTrace()
+ val addendum = Header.Version.brokenVersionAddendum(classfileVersion)
throw new IOException(
- i"""class file ${classfile.canonicalPath} is broken, reading aborted with ${e.getClass}
- |${Option(e.getMessage).getOrElse("")}""")
- }
-
- private def parseHeader()(using in: DataReader): Unit = {
- val magic = in.nextInt
- if (magic != JAVA_MAGIC)
- throw new IOException(s"class file '${classfile}' has wrong magic number 0x${toHexString(magic)}, should be 0x${toHexString(JAVA_MAGIC)}")
- val minorVersion = in.nextChar.toInt
- val majorVersion = in.nextChar.toInt
- if ((majorVersion < JAVA_MAJOR_VERSION) ||
- ((majorVersion == JAVA_MAJOR_VERSION) &&
- (minorVersion < JAVA_MINOR_VERSION)))
- throw new IOException(
- s"class file '${classfile}' has unknown version $majorVersion.$minorVersion, should be at least $JAVA_MAJOR_VERSION.$JAVA_MINOR_VERSION")
+ i""" class file ${classfile.canonicalPath} is broken$addendum,
+ | reading aborted with ${e.getClass}:
+ | ${Option(e.getMessage).getOrElse("")}""")
}
private def checkTastyAttr(tastyUUID: UUID)(using ctx: Context, in: DataReader): Unit = {
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/CommentUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/CommentUnpickler.scala
index eb0d140df51e..1bbea6447bf3 100644
--- a/compiler/src/dotty/tools/dotc/core/tasty/CommentUnpickler.scala
+++ b/compiler/src/dotty/tools/dotc/core/tasty/CommentUnpickler.scala
@@ -13,7 +13,7 @@ import TastyBuffer.Addr
import java.nio.charset.StandardCharsets
class CommentUnpickler(reader: TastyReader) {
- import reader._
+ import reader.*
private[tasty] lazy val comments: HashMap[Addr, Comment] = {
val comments = new HashMap[Addr, Comment]
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala
index b35c5c9f1acc..bb818edc1f82 100644
--- a/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala
+++ b/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala
@@ -5,9 +5,9 @@ package tasty
import scala.language.unsafeNulls
-import Contexts._, SymDenotations._, Decorators._
+import Contexts.*, SymDenotations.*, Decorators.*
import dotty.tools.dotc.ast.tpd
-import TastyUnpickler._
+import TastyUnpickler.*
import classfile.ClassfileParser
import Names.SimpleName
import TreeUnpickler.UnpickleMode
@@ -42,8 +42,8 @@ object DottyUnpickler {
* @param mode the tasty file contains package (TopLevel), an expression (Term) or a type (TypeTree)
*/
class DottyUnpickler(bytes: Array[Byte], mode: UnpickleMode = UnpickleMode.TopLevel) extends ClassfileParser.Embedded with tpd.TreeProvider {
- import tpd._
- import DottyUnpickler._
+ import tpd.*
+ import DottyUnpickler.*
val unpickler: TastyUnpickler = new TastyUnpickler(bytes)
private val posUnpicklerOpt = unpickler.unpickle(new PositionsSectionUnpickler)
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala b/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala
index 1ddcf9afe1dc..5e2aee33859c 100644
--- a/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala
+++ b/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala
@@ -4,17 +4,17 @@ package core
package tasty
import dotty.tools.tasty.TastyBuffer
-import TastyBuffer._
+import TastyBuffer.*
import collection.mutable
import Names.{Name, chrs, SimpleName, DerivedName, TypeName}
-import NameKinds._
-import NameOps._
+import NameKinds.*
+import NameOps.*
import scala.io.Codec
import NameTags.{SIGNED, TARGETSIGNED}
class NameBuffer extends TastyBuffer(10000) {
- import NameBuffer._
+ import NameBuffer.*
private val nameRefs = new mutable.LinkedHashMap[Name, NameRef]
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala
index 924b87bec003..86076517021a 100644
--- a/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala
+++ b/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala
@@ -5,19 +5,19 @@ package tasty
import dotty.tools.tasty.TastyFormat.{SOURCE, PositionsSection}
import dotty.tools.tasty.TastyBuffer
-import TastyBuffer._
+import TastyBuffer.*
-import ast._
+import ast.*
import Trees.WithLazyFields
import util.{SourceFile, NoSource}
-import core._
-import Annotations._, Decorators._
+import core.*
+import Annotations.*, Decorators.*
import collection.mutable
-import util.Spans._
+import util.Spans.*
import reporting.Message
object PositionPickler:
- import ast.tpd._
+ import ast.tpd.*
// Note: This could be just TreeToAddr => Addr if functions are specialized to value classes.
// We use a SAM type to avoid boxing of Addr
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala
index 9c66e43eae80..b7f88155e3c8 100644
--- a/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala
+++ b/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala
@@ -3,20 +3,22 @@ package dotc
package core
package tasty
+import scala.compiletime.uninitialized
+
import dotty.tools.tasty.{TastyFormat, TastyBuffer, TastyReader}
import TastyFormat.SOURCE
import TastyBuffer.{Addr, NameRef}
-import util.Spans._
+import util.Spans.*
import Names.TermName
/** Unpickler for tree positions */
class PositionUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName) {
- import reader._
+ import reader.*
- private var myLineSizes: Array[Int] = _
- private var mySpans: util.HashMap[Addr, Span] = _
- private var mySourcePaths: util.HashMap[Addr, String] = _
+ private var myLineSizes: Array[Int] = uninitialized
+ private var mySpans: util.HashMap[Addr, Span] = uninitialized
+ private var mySourcePaths: util.HashMap[Addr, String] = uninitialized
private var isDefined = false
def ensureDefined(): Unit = {
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyClassName.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyClassName.scala
index c938868a3c48..0a7068b65445 100644
--- a/compiler/src/dotty/tools/dotc/core/tasty/TastyClassName.scala
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyClassName.scala
@@ -5,10 +5,10 @@ package tasty
import dotty.tools.tasty.{TastyBuffer, TastyReader}
import TastyBuffer.NameRef
-import Contexts._, Decorators._
+import Contexts.*, Decorators.*
import Names.TermName
import StdNames.nme
-import TastyUnpickler._
+import TastyUnpickler.*
import dotty.tools.tasty.TastyFormat.ASTsSection
/** Reads the package and class name of the class contained in this TASTy */
@@ -21,9 +21,9 @@ class TastyClassName(bytes: Array[Byte]) {
def readName(): Option[(TermName, TermName)] = unpickle(new TreeSectionUnpickler)
class TreeSectionUnpickler extends SectionUnpickler[(TermName, TermName)](ASTsSection) {
- import dotty.tools.tasty.TastyFormat._
+ import dotty.tools.tasty.TastyFormat.*
def unpickle(reader: TastyReader, tastyName: NameTable): (TermName, TermName) = {
- import reader._
+ import reader.*
def readNames(packageName: TermName): (TermName, TermName) = {
val tag = readByte()
if (tag >= firstLengthTreeTag) {
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala
index 4f1e84ac9184..556265c66ce9 100644
--- a/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala
@@ -6,12 +6,12 @@ package tasty
import scala.language.unsafeNulls
import dotty.tools.tasty.{TastyBuffer, TastyFormat, TastyHash}
-import TastyFormat._
-import TastyBuffer._
+import TastyFormat.*
+import TastyBuffer.*
import collection.mutable
import core.Symbols.ClassSymbol
-import Decorators._
+import Decorators.*
object TastyPickler {
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala
index 5876b69edfde..9fe3fb282aa2 100644
--- a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala
@@ -5,9 +5,9 @@ package tasty
import dotty.tools.tasty.{TastyBuffer, TastyReader}
import TastyBuffer.NameRef
-import Contexts._, Decorators._
+import Contexts.*, Decorators.*
import Names.Name
-import TastyUnpickler._
+import TastyUnpickler.*
import util.Spans.offsetToInt
import dotty.tools.tasty.TastyFormat.{ASTsSection, PositionsSection, CommentsSection}
import java.nio.file.{Files, Paths}
@@ -98,12 +98,12 @@ class TastyPrinter(bytes: Array[Byte]) {
}
class TreeSectionUnpickler extends SectionUnpickler[String](ASTsSection) {
- import dotty.tools.tasty.TastyFormat._
+ import dotty.tools.tasty.TastyFormat.*
private val sb: StringBuilder = new StringBuilder
def unpickle(reader: TastyReader, tastyName: NameTable): String = {
- import reader._
+ import reader.*
var indent = 0
def newLine() = {
val length = treeStr("%5d".format(index(currentAddr) - index(startAddr)))
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala
index 70bdec7780e2..3a6f1e02a705 100644
--- a/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala
@@ -4,13 +4,14 @@ package tasty
import scala.language.unsafeNulls
-import dotty.tools.tasty.{TastyFormat, TastyBuffer, TastyReader, TastyHeaderUnpickler}
-import TastyFormat.NameTags._, TastyFormat.nameTagToString
+import dotty.tools.tasty.{TastyFormat, TastyBuffer, TastyReader, TastyHeaderUnpickler, UnpicklerConfig}
+import TastyHeaderUnpickler.TastyVersion
+import TastyFormat.NameTags.*, TastyFormat.nameTagToString
import TastyBuffer.NameRef
import scala.collection.mutable
import Names.{TermName, termName, EmptyTermName}
-import NameKinds._
+import NameKinds.*
object TastyUnpickler {
@@ -24,12 +25,45 @@ object TastyUnpickler {
def apply(ref: NameRef): TermName = names(ref.index)
def contents: Iterable[TermName] = names
}
+
+ trait Scala3CompilerConfig extends UnpicklerConfig:
+ private def asScala3Compiler(version: TastyVersion): String =
+ if (version.major == 28) {
+ // scala 3.x.y series
+ if (version.experimental > 0)
+ // scenario here is someone using 3.4.0 to read 3.4.1-RC1-NIGHTLY, in this case, we should show 3.4 nightly.
+ s"the same nightly or snapshot Scala 3.${version.minor - 1} compiler"
+ else s"a Scala 3.${version.minor}.0 compiler or newer"
+ }
+ else if (version.experimental > 0) "the same Scala compiler" // unknown major version, just say same
+ else "a more recent Scala compiler" // unknown major version, just say later
+
+ /** The description of the upgraded scala compiler that can read the given TASTy version */
+ final def upgradedReaderTool(version: TastyVersion): String = asScala3Compiler(version)
+
+ /** The description of the upgraded scala compiler that can produce the given TASTy version */
+ final def upgradedProducerTool(version: TastyVersion): String = asScala3Compiler(version)
+
+ final def recompileAdditionalInfo: String = """
+ | Usually this means that the library dependency containing this file should be updated.""".stripMargin
+
+ final def upgradeAdditionalInfo(fileVersion: TastyVersion): String =
+ if (fileVersion.isExperimental && experimentalVersion == 0) {
+ """
+ | Note that you are using a stable compiler, which can not read experimental TASTy.""".stripMargin
+ }
+ else ""
+ end Scala3CompilerConfig
+
+ /** A config for the TASTy reader of a scala 3 compiler */
+ val scala3CompilerConfig: UnpicklerConfig = new Scala3CompilerConfig with UnpicklerConfig.DefaultTastyVersion {}
+
}
-import TastyUnpickler._
+import TastyUnpickler.*
class TastyUnpickler(reader: TastyReader) {
- import reader._
+ import reader.*
def this(bytes: Array[Byte]) = this(new TastyReader(bytes))
@@ -88,7 +122,7 @@ class TastyUnpickler(reader: TastyReader) {
result
}
- new TastyHeaderUnpickler(reader).readHeader()
+ new TastyHeaderUnpickler(scala3CompilerConfig, reader).readHeader()
locally {
until(readEnd()) { nameAtRef.add(readNameContents()) }
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala
index a04f05cb820c..2e4fe9967d6a 100644
--- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala
@@ -5,16 +5,16 @@ package tasty
import scala.language.unsafeNulls
-import dotty.tools.tasty.TastyFormat._
-import dotty.tools.tasty.TastyBuffer._
+import dotty.tools.tasty.TastyFormat.*
+import dotty.tools.tasty.TastyBuffer.*
-import ast.Trees._
+import ast.Trees.*
import ast.{untpd, tpd}
-import Contexts._, Symbols._, Types._, Names._, Constants._, Decorators._, Annotations._, Flags._
+import Contexts.*, Symbols.*, Types.*, Names.*, Constants.*, Decorators.*, Annotations.*, Flags.*
import Comments.{Comment, CommentsContext}
-import NameKinds._
+import NameKinds.*
import StdNames.nme
-import transform.SymUtils._
+import transform.SymUtils.*
import config.Config
import collection.mutable
import reporting.{Profile, NoProfile}
@@ -27,9 +27,9 @@ object TreePickler:
class TreePickler(pickler: TastyPickler) {
val buf: TreeBuffer = new TreeBuffer
pickler.newSection(ASTsSection, buf)
- import buf._
+ import buf.*
import pickler.nameBuffer.nameIndex
- import tpd._
+ import tpd.*
import TreePickler.*
private val symRefs = Symbols.MutableSymbolMap[Addr](256)
@@ -734,7 +734,7 @@ class TreePickler(pickler: TastyPickler) {
}
def pickleModifiers(sym: Symbol, mdef: MemberDef)(using Context): Unit = {
- import Flags._
+ import Flags.*
var flags = sym.flags
val privateWithin = sym.privateWithin
if (privateWithin.exists) {
@@ -749,7 +749,7 @@ class TreePickler(pickler: TastyPickler) {
}
def pickleFlags(flags: FlagSet, isTerm: Boolean)(using Context): Unit = {
- import Flags._
+ import Flags.*
def writeModTag(tag: Int) = {
assert(isModifierTag(tag))
writeByte(tag)
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala
index 1748b1edd08e..6a02605b6ed7 100644
--- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala
@@ -6,46 +6,47 @@ package tasty
import scala.language.unsafeNulls
import Comments.CommentsContext
-import Contexts._
-import Symbols._
-import Types._
-import Scopes._
-import SymDenotations._
-import Denotations._
-import Names._
-import NameOps._
-import StdNames._
-import Flags._
-import Constants._
-import Annotations._
-import NameKinds._
-import NamerOps._
-import ContextOps._
+import Contexts.*
+import Symbols.*
+import Types.*
+import Scopes.*
+import SymDenotations.*
+import Denotations.*
+import Names.*
+import NameOps.*
+import StdNames.*
+import Flags.*
+import Constants.*
+import Annotations.*
+import NameKinds.*
+import NamerOps.*
+import ContextOps.*
import Variances.Invariant
import TastyUnpickler.NameTable
import typer.ConstFold
import typer.Checking.checkNonCyclic
-import typer.Nullables._
-import util.Spans._
+import typer.Nullables.*
+import util.Spans.*
import util.{SourceFile, Property}
import ast.{Trees, tpd, untpd}
-import Trees._
-import Decorators._
-import transform.SymUtils._
+import Trees.*
+import Decorators.*
+import transform.SymUtils.*
import dotty.tools.dotc.quoted.QuotePatterns
import dotty.tools.tasty.{TastyBuffer, TastyReader}
-import TastyBuffer._
+import TastyBuffer.*
import scala.annotation.{switch, tailrec}
import scala.collection.mutable.ListBuffer
import scala.collection.mutable
import config.Printers.pickling
-import dotty.tools.tasty.TastyFormat._
+import dotty.tools.tasty.TastyFormat.*
import scala.annotation.constructorOnly
import scala.annotation.internal.sharable
+import scala.compiletime.uninitialized
/** Unpickler for typed trees
* @param reader the reader from which to unpickle
@@ -56,8 +57,8 @@ class TreeUnpickler(reader: TastyReader,
nameAtRef: NameTable,
posUnpicklerOpt: Option[PositionUnpickler],
commentUnpicklerOpt: Option[CommentUnpickler]) {
- import TreeUnpickler._
- import tpd._
+ import TreeUnpickler.*
+ import tpd.*
/** A map from addresses of definition entries to the symbols they define */
private val symAtAddr = new mutable.HashMap[Addr, Symbol]
@@ -88,7 +89,7 @@ class TreeUnpickler(reader: TastyReader,
private var seenRoots: Set[Symbol] = Set()
/** The root owner tree. See `OwnerTree` class definition. Set by `enterTopLevel`. */
- private var ownerTree: OwnerTree = _
+ private var ownerTree: OwnerTree = uninitialized
/** Was unpickled class compiled with pureFunctions? */
private var withPureFuns: Boolean = false
@@ -123,7 +124,7 @@ class TreeUnpickler(reader: TastyReader,
}
class Completer(reader: TastyReader)(using @constructorOnly _ctx: Context) extends LazyType {
- import reader._
+ import reader.*
val owner = ctx.owner
val mode = ctx.mode
val source = ctx.source
@@ -150,7 +151,7 @@ class TreeUnpickler(reader: TastyReader,
}
class TreeReader(val reader: TastyReader) {
- import reader._
+ import reader.*
def forkAt(start: Addr): TreeReader = new TreeReader(subReader(start, endAddr))
def fork: TreeReader = forkAt(currentAddr)
diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala
index 2aeb1bdeefcc..e4c253fddc53 100644
--- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala
+++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala
@@ -3,7 +3,7 @@ package dotc
package core
package unpickleScala2
-import Flags._
+import Flags.*
/** Variable length byte arrays, with methods for basic pickling and unpickling.
*
@@ -195,7 +195,7 @@ object PickleBuffer {
private type FlagMap = Array[Array[Long]]
private val (scalaTermFlagMap, scalaTypeFlagMap) = {
- import Scala2Flags._
+ import Scala2Flags.*
val corr = Map(
PROTECTED_PKL -> Protected,
diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Erasure.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Erasure.scala
index cc2d7dd7ee56..78d1666ad580 100644
--- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Erasure.scala
+++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Erasure.scala
@@ -3,8 +3,8 @@ package dotc
package core
package unpickleScala2
-import Symbols._, Types._, Contexts._, Flags._, Names._, StdNames._, Phases._
-import Decorators._
+import Symbols.*, Types.*, Contexts.*, Flags.*, Names.*, StdNames.*, Phases.*
+import Decorators.*
import scala.collection.mutable.ListBuffer
/** Erasure logic specific to Scala 2 symbols. */
diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala
index 3e211e75b73b..3b51496e4ff1 100644
--- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala
+++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala
@@ -9,30 +9,30 @@ import java.io.IOException
import java.lang.Float.intBitsToFloat
import java.lang.Double.longBitsToDouble
-import Contexts._, Symbols._, Types._, Scopes._, SymDenotations._, Names._, NameOps._
-import StdNames._, Denotations._, NameOps._, Flags._, Constants._, Annotations._, Phases._
+import Contexts.*, Symbols.*, Types.*, Scopes.*, SymDenotations.*, Names.*, NameOps.*
+import StdNames.*, Denotations.*, NameOps.*, Flags.*, Constants.*, Annotations.*, Phases.*
import NameKinds.{Scala2MethodNameKinds, SuperAccessorName, ExpandedName}
-import util.Spans._
-import dotty.tools.dotc.ast.{tpd, untpd}, ast.tpd._
+import util.Spans.*
+import dotty.tools.dotc.ast.{tpd, untpd}, ast.tpd.*
import ast.untpd.Modifiers
import backend.sjs.JSDefinitions
-import printing.Texts._
+import printing.Texts.*
import printing.Printer
import io.AbstractFile
-import util.common._
+import util.common.*
import util.NoSourcePosition
import typer.Checking.checkNonCyclic
-import typer.Nullables._
-import transform.SymUtils._
-import PickleBuffer._
-import PickleFormat._
-import Decorators._
-import TypeApplications._
+import typer.Nullables.*
+import transform.SymUtils.*
+import PickleBuffer.*
+import PickleFormat.*
+import Decorators.*
+import TypeApplications.*
import classfile.ClassfileParser
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
import scala.annotation.switch
-import reporting._
+import reporting.*
object Scala2Unpickler {
@@ -146,7 +146,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
// print("unpickling "); showPickled() // !!! DEBUG
- import Scala2Unpickler._
+ import Scala2Unpickler.*
val moduleRoot: SymDenotation = inContext(ictx) { moduleClassRoot.sourceModule.denot }
assert(moduleRoot.isTerm)
diff --git a/compiler/src/dotty/tools/dotc/coverage/Location.scala b/compiler/src/dotty/tools/dotc/coverage/Location.scala
index aa7a586d4b57..88893709b8bd 100644
--- a/compiler/src/dotty/tools/dotc/coverage/Location.scala
+++ b/compiler/src/dotty/tools/dotc/coverage/Location.scala
@@ -1,7 +1,7 @@
package dotty.tools.dotc
package coverage
-import ast.tpd._
+import ast.tpd.*
import dotty.tools.dotc.core.Contexts.Context
import dotty.tools.dotc.core.Flags.*
import java.nio.file.Path
diff --git a/compiler/src/dotty/tools/dotc/decompiler/DecompilationPrinter.scala b/compiler/src/dotty/tools/dotc/decompiler/DecompilationPrinter.scala
index 85a56b9f1d15..16e7cc9c7adc 100644
--- a/compiler/src/dotty/tools/dotc/decompiler/DecompilationPrinter.scala
+++ b/compiler/src/dotty/tools/dotc/decompiler/DecompilationPrinter.scala
@@ -8,7 +8,7 @@ import java.nio.charset.StandardCharsets
import scala.io.Codec
-import dotty.tools.dotc.core.Contexts._
+import dotty.tools.dotc.core.Contexts.*
import dotty.tools.dotc.core.Phases.Phase
import dotty.tools.io.File
diff --git a/compiler/src/dotty/tools/dotc/decompiler/IDEDecompilerDriver.scala b/compiler/src/dotty/tools/dotc/decompiler/IDEDecompilerDriver.scala
index c148ff5f9bca..c1bd6b6778fd 100644
--- a/compiler/src/dotty/tools/dotc/decompiler/IDEDecompilerDriver.scala
+++ b/compiler/src/dotty/tools/dotc/decompiler/IDEDecompilerDriver.scala
@@ -4,10 +4,10 @@ package decompiler
import scala.language.unsafeNulls
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core._
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.*
import dotty.tools.dotc.core.tasty.TastyHTMLPrinter
-import dotty.tools.dotc.reporting._
+import dotty.tools.dotc.reporting.*
import dotty.tools.io.AbstractFile
import scala.quoted.runtime.impl.QuotesImpl
diff --git a/compiler/src/dotty/tools/dotc/decompiler/Main.scala b/compiler/src/dotty/tools/dotc/decompiler/Main.scala
index 3cc94f782793..ecd067d3debf 100644
--- a/compiler/src/dotty/tools/dotc/decompiler/Main.scala
+++ b/compiler/src/dotty/tools/dotc/decompiler/Main.scala
@@ -3,7 +3,7 @@ package dotty.tools.dotc.decompiler
import java.nio.file.Files
import dotty.tools.dotc
-import dotty.tools.dotc.core.Contexts._
+import dotty.tools.dotc.core.Contexts.*
import dotty.tools.io.AbstractFile
/** Main class of the `dotc -decompiler` decompiler.
diff --git a/compiler/src/dotty/tools/dotc/decompiler/TASTYDecompiler.scala b/compiler/src/dotty/tools/dotc/decompiler/TASTYDecompiler.scala
index 62f3e75d2001..7f396654749e 100644
--- a/compiler/src/dotty/tools/dotc/decompiler/TASTYDecompiler.scala
+++ b/compiler/src/dotty/tools/dotc/decompiler/TASTYDecompiler.scala
@@ -1,6 +1,6 @@
package dotty.tools.dotc.decompiler
-import dotty.tools.dotc.fromtasty._
+import dotty.tools.dotc.fromtasty.*
import dotty.tools.dotc.core.Phases.Phase
/** Compiler from tasty to user readable high text representation
diff --git a/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala b/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala
index f032f8d4d065..4969882b7766 100644
--- a/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala
+++ b/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala
@@ -2,13 +2,13 @@ package dotty.tools
package dotc
package fromtasty
-import core._
-import Decorators._
-import Contexts._
+import core.*
+import Decorators.*
+import Contexts.*
import Symbols.{Symbol, ClassSymbol}
import SymDenotations.ClassDenotation
import Denotations.staticRef
-import NameOps._
+import NameOps.*
import ast.Trees.Tree
import Phases.Phase
diff --git a/compiler/src/dotty/tools/dotc/fromtasty/TASTYCompiler.scala b/compiler/src/dotty/tools/dotc/fromtasty/TASTYCompiler.scala
index 923892b62f13..c0adf454b063 100644
--- a/compiler/src/dotty/tools/dotc/fromtasty/TASTYCompiler.scala
+++ b/compiler/src/dotty/tools/dotc/fromtasty/TASTYCompiler.scala
@@ -2,8 +2,8 @@ package dotty.tools
package dotc
package fromtasty
-import core._
-import Contexts._
+import core.*
+import Contexts.*
import Phases.Phase
class TASTYCompiler extends Compiler {
diff --git a/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala b/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala
index fb0abe3332ed..2f4ecad8859d 100644
--- a/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala
+++ b/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala
@@ -5,7 +5,7 @@ package fromtasty
import scala.language.unsafeNulls
import io.{JarArchive, AbstractFile, Path}
-import core.Contexts._
+import core.Contexts.*
import core.Decorators.em
import java.io.File
diff --git a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala
index 8d594448dc2a..07472ee9b4dd 100644
--- a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala
+++ b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala
@@ -28,7 +28,7 @@ import scala.annotation.constructorOnly
/** General support for inlining */
object Inliner:
- import tpd._
+ import tpd.*
private[inlines] type DefBuffer = mutable.ListBuffer[ValOrDefDef]
@@ -143,8 +143,8 @@ end Inliner
* @param rhsToInline the body of the inlineable method that replaces the call.
*/
class Inliner(val call: tpd.Tree)(using Context):
- import tpd._
- import Inliner._
+ import tpd.*
+ import Inliner.*
private val methPart = funPart(call)
protected val callTypeArgs = typeArgss(call).flatten
@@ -734,7 +734,7 @@ class Inliner(val call: tpd.Tree)(using Context):
*/
class InlineTyper(initialErrorCount: Int, @constructorOnly nestingLevel: Int = ctx.nestingLevel + 1)
extends ReTyper(nestingLevel):
- import reducer._
+ import reducer.*
override def ensureAccessible(tpe: Type, superAccess: Boolean, pos: SrcPos)(using Context): Type = {
tpe match {
diff --git a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala
index 25e9b1480370..0accbad5b473 100644
--- a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala
+++ b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala
@@ -5,7 +5,7 @@ package inlines
import ast.*, core.*
import Flags.*, Symbols.*, Types.*, Decorators.*, Constants.*, Contexts.*
import StdNames.{tpnme, nme}
-import transform.SymUtils._
+import transform.SymUtils.*
import typer.*
import NameKinds.BodyRetainerName
import SymDenotations.SymDenotation
@@ -22,7 +22,7 @@ import util.Spans.Span
/** Support for querying inlineable methods and for inlining calls to such methods */
object Inlines:
- import tpd._
+ import tpd.*
/** An exception signalling that an inline info cannot be computed due to a
* cyclic reference. i14772.scala shows a case where this happens.
@@ -395,7 +395,7 @@ object Inlines:
* @param rhsToInline the body of the inlineable method that replaces the call.
*/
private class InlineCall(call: tpd.Tree)(using Context) extends Inliner(call):
- import tpd._
+ import tpd.*
import Inlines.*
/** The Inlined node representing the inlined call */
diff --git a/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala b/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala
index 060c8d21f390..10b55d69bf37 100644
--- a/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala
+++ b/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala
@@ -3,20 +3,20 @@ package dotc
package inlines
import dotty.tools.dotc.ast.{Trees, tpd, untpd}
-import Trees._
-import core._
-import Flags._
-import Symbols._
-import Flags._
-import Types._
-import Decorators._
+import Trees.*
+import core.*
+import Flags.*
+import Symbols.*
+import Flags.*
+import Types.*
+import Decorators.*
import StdNames.nme
-import Contexts._
+import Contexts.*
import Names.{Name, TermName}
import NameKinds.{InlineAccessorName, UniqueInlineName}
import inlines.Inlines
-import NameOps._
-import Annotations._
+import NameOps.*
+import Annotations.*
import transform.{AccessProxies, Splicer}
import staging.CrossStageSafety
import transform.SymUtils.*
@@ -25,7 +25,7 @@ import util.Property
import staging.StagingLevel
object PrepareInlineable {
- import tpd._
+ import tpd.*
private val InlineAccessorsKey = new Property.Key[InlineAccessors]
diff --git a/compiler/src/dotty/tools/dotc/interactive/Completion.scala b/compiler/src/dotty/tools/dotc/interactive/Completion.scala
index 8fb844f1f333..6e91254c2d72 100644
--- a/compiler/src/dotty/tools/dotc/interactive/Completion.scala
+++ b/compiler/src/dotty/tools/dotc/interactive/Completion.scala
@@ -3,15 +3,15 @@ package dotty.tools.dotc.interactive
import dotty.tools.dotc.ast.untpd
import dotty.tools.dotc.ast.NavigateAST
import dotty.tools.dotc.config.Printers.interactiv
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Decorators._
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Decorators.*
import dotty.tools.dotc.core.Denotations.SingleDenotation
-import dotty.tools.dotc.core.Flags._
+import dotty.tools.dotc.core.Flags.*
import dotty.tools.dotc.core.Names.{Name, TermName}
import dotty.tools.dotc.core.NameKinds.SimpleNameKind
-import dotty.tools.dotc.core.NameOps._
+import dotty.tools.dotc.core.NameOps.*
import dotty.tools.dotc.core.Phases
-import dotty.tools.dotc.core.Scopes._
+import dotty.tools.dotc.core.Scopes.*
import dotty.tools.dotc.core.Symbols.{NoSymbol, Symbol, defn, newSymbol}
import dotty.tools.dotc.core.StdNames.nme
import dotty.tools.dotc.core.SymDenotations.SymDenotation
@@ -42,7 +42,7 @@ case class Completion(label: String, description: String, symbols: List[Symbol])
object Completion:
- import dotty.tools.dotc.ast.tpd._
+ import dotty.tools.dotc.ast.tpd.*
/** Get possible completions from tree at `pos`
*
diff --git a/compiler/src/dotty/tools/dotc/interactive/Interactive.scala b/compiler/src/dotty/tools/dotc/interactive/Interactive.scala
index fd6d426f39bb..6c8e3b61cd01 100644
--- a/compiler/src/dotty/tools/dotc/interactive/Interactive.scala
+++ b/compiler/src/dotty/tools/dotc/interactive/Interactive.scala
@@ -4,21 +4,21 @@ package interactive
import scala.language.unsafeNulls
-import scala.collection._
+import scala.collection.*
import ast.{NavigateAST, Trees, tpd, untpd}
-import core._
-import Decorators._, ContextOps._
-import Contexts._, Flags._, Names._, NameOps._, Symbols._, Trees._, Types._
-import transform.SymUtils._
-import util.Spans._, util.SourceFile, util.SourcePosition
+import core.*
+import Decorators.*, ContextOps.*
+import Contexts.*, Flags.*, Names.*, NameOps.*, Symbols.*, Trees.*, Types.*
+import transform.SymUtils.*
+import util.Spans.*, util.SourceFile, util.SourcePosition
/** High-level API to get information out of typed trees, designed to be used by IDEs.
*
* @see `InteractiveDriver` to get typed trees from code.
*/
object Interactive {
- import ast.tpd._
+ import ast.tpd.*
object Include {
case class Set private[Include] (val bits: Int) extends AnyVal {
diff --git a/compiler/src/dotty/tools/dotc/interactive/InteractiveCompiler.scala b/compiler/src/dotty/tools/dotc/interactive/InteractiveCompiler.scala
index 38a93125a342..af1484896c72 100644
--- a/compiler/src/dotty/tools/dotc/interactive/InteractiveCompiler.scala
+++ b/compiler/src/dotty/tools/dotc/interactive/InteractiveCompiler.scala
@@ -2,10 +2,10 @@ package dotty.tools
package dotc
package interactive
-import core._
-import Phases._
-import parsing._
-import typer._
+import core.*
+import Phases.*
+import parsing.*
+import typer.*
class InteractiveCompiler extends Compiler {
// TODO: Figure out what phases should be run in IDEs
diff --git a/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala b/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala
index 2a2860cd1ba3..b00cd1036018 100644
--- a/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala
+++ b/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala
@@ -5,28 +5,28 @@ package interactive
import scala.language.unsafeNulls
import java.net.URI
-import java.io._
-import java.nio.file._
+import java.io.*
+import java.nio.file.*
import java.nio.file.attribute.BasicFileAttributes
import java.nio.charset.StandardCharsets
-import java.util.zip._
+import java.util.zip.*
-import scala.collection._
+import scala.collection.*
import scala.io.Codec
import dotty.tools.io.AbstractFile
import ast.{Trees, tpd}
-import core._, core.Decorators._
-import Contexts._, Names._, NameOps._, Symbols._, SymDenotations._, Trees._, Types._
+import core.*, core.Decorators.*
+import Contexts.*, Names.*, NameOps.*, Symbols.*, SymDenotations.*, Trees.*, Types.*
import Denotations.staticRef
-import classpath._
-import reporting._
-import util._
+import classpath.*
+import reporting.*
+import util.*
/** A Driver subclass designed to be used from IDEs */
class InteractiveDriver(val settings: List[String]) extends Driver {
- import tpd._
+ import tpd.*
override def sourcesRequired: Boolean = false
@@ -148,7 +148,7 @@ class InteractiveDriver(val settings: List[String]) extends Driver {
def run(uri: URI, sourceCode: String): List[Diagnostic] = run(uri, SourceFile.virtual(uri, sourceCode))
def run(uri: URI, source: SourceFile): List[Diagnostic] = {
- import typer.ImportInfo._
+ import typer.ImportInfo.*
val previousCtx = myCtx
try {
diff --git a/compiler/src/dotty/tools/dotc/interactive/SourceTree.scala b/compiler/src/dotty/tools/dotc/interactive/SourceTree.scala
index 60f01396e91e..5480d4a43043 100644
--- a/compiler/src/dotty/tools/dotc/interactive/SourceTree.scala
+++ b/compiler/src/dotty/tools/dotc/interactive/SourceTree.scala
@@ -4,9 +4,9 @@ package interactive
import ast.tpd
-import core._
-import Contexts._, NameOps._, Symbols._, StdNames._
-import util._, util.Spans._
+import core.*
+import Contexts.*, NameOps.*, Symbols.*, StdNames.*
+import util.*, util.Spans.*
/**
* A `tree` coming from `source`
@@ -55,7 +55,7 @@ object SourceTree {
!sym.source.exists) // FIXME: We cannot deal with external projects yet
Nil
else {
- import ast.Trees._
+ import ast.Trees.*
def sourceTreeOfClass(tree: tpd.Tree): Option[SourceTree] = tree match {
case PackageDef(_, stats) =>
stats.flatMap(sourceTreeOfClass).headOption
diff --git a/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala b/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala
index c63409d0d52b..aa0e9a47f775 100644
--- a/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala
+++ b/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala
@@ -2,7 +2,9 @@ package dotty.tools
package dotc
package parsing
-import util.Chars._
+import util.Chars.*
+
+import scala.compiletime.uninitialized
abstract class CharArrayReader { self =>
@@ -16,7 +18,7 @@ abstract class CharArrayReader { self =>
protected def error(msg: String, offset: Int): Unit
/** the last read character */
- var ch: Char = _
+ var ch: Char = uninitialized
/** The offset one past the last read character */
var charOffset: Int = startFrom
diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala
index 6ec896dcb200..8e075acdf5e3 100644
--- a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala
+++ b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala
@@ -6,31 +6,31 @@ import dotty.tools.dotc.core.Constants.Constant
import dotty.tools.dotc.core.Flags
import dotty.tools.dotc.core.Flags.FlagSet
-import JavaTokens._
-import JavaScanners._
+import JavaTokens.*
+import JavaScanners.*
import Scanners.Offset
-import Parsers._
-import core._
-import Contexts._
-import Names._
-import Types._
-import ast.Trees._
-import Decorators._
-import StdNames._
-import reporting._
+import Parsers.*
+import core.*
+import Contexts.*
+import Names.*
+import Types.*
+import ast.Trees.*
+import Decorators.*
+import StdNames.*
+import reporting.*
import dotty.tools.dotc.util.SourceFile
-import util.Spans._
+import util.Spans.*
import scala.collection.mutable.{ListBuffer, LinkedHashMap}
object JavaParsers {
- import ast.untpd._
+ import ast.untpd.*
class JavaParser(source: SourceFile)(using Context) extends ParserCommon(source) {
val definitions: Definitions = ctx.definitions
- import definitions._
+ import definitions.*
val in: JavaScanner = new JavaScanner(source)
diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala b/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala
index 6a1d5d8b216c..f50dcdda438c 100644
--- a/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala
+++ b/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala
@@ -2,13 +2,13 @@ package dotty.tools
package dotc
package parsing
-import core.Contexts._
+import core.Contexts.*
import core.Names.SimpleName
-import Scanners._
+import Scanners.*
import util.SourceFile
-import JavaTokens._
+import JavaTokens.*
import scala.annotation.{switch, tailrec}
-import util.Chars._
+import util.Chars.*
import PartialFunction.cond
import core.Decorators.em
diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala
index 48653ad96197..805c25ad40e4 100644
--- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala
+++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala
@@ -8,35 +8,35 @@ import scala.annotation.internal.sharable
import scala.collection.mutable.ListBuffer
import scala.collection.immutable.BitSet
import util.{ SourceFile, SourcePosition, NoSourcePosition }
-import Tokens._
-import Scanners._
+import Tokens.*
+import Scanners.*
import xml.MarkupParsers.MarkupParser
-import core._
-import Flags._
-import Contexts._
-import Names._
+import core.*
+import Flags.*
+import Contexts.*
+import Names.*
import NameKinds.{WildcardParamName, QualifiedName}
-import NameOps._
+import NameOps.*
import ast.{Positioned, Trees}
-import ast.Trees._
-import StdNames._
-import util.Spans._
-import Constants._
+import ast.Trees.*
+import StdNames.*
+import util.Spans.*
+import Constants.*
import Symbols.NoSymbol
-import ScriptParsers._
-import Decorators._
+import ScriptParsers.*
+import Decorators.*
import util.Chars
import scala.annotation.tailrec
import rewrites.Rewrites.{patch, overlapsPatch}
-import reporting._
+import reporting.*
import config.Feature
import config.Feature.{sourceVersion, migrateTo3, globalOnlyImports}
-import config.SourceVersion._
+import config.SourceVersion.*
import config.SourceVersion
object Parsers {
- import ast.untpd._
+ import ast.untpd.*
case class OpInfo(operand: Tree, operator: Ident, offset: Offset)
@@ -1233,7 +1233,7 @@ object Parsers {
case EXPOLIT => return Number(digits, NumberKind.Floating)
case _ =>
}
- import scala.util.FromDigits._
+ import scala.util.FromDigits.*
val value =
try token match {
case INTLIT => intFromDigits(digits, in.base)
@@ -1756,8 +1756,13 @@ object Parsers {
if in.token == LBRACE || in.token == INDENT then
t
else
- if sourceVersion.isAtLeast(future) then
- deprecationWarning(DeprecatedWithOperator(), withOffset)
+ val withSpan = Span(withOffset, withOffset + 4)
+ report.errorOrMigrationWarning(
+ DeprecatedWithOperator(rewriteNotice(`future-migration`)),
+ source.atSpan(withSpan),
+ from = future)
+ if sourceVersion == `future-migration` then
+ patch(source, withSpan, "&")
atSpan(startOffset(t)) { makeAndType(t, withType()) }
else t
@@ -2344,7 +2349,7 @@ object Parsers {
in.sourcePos(uscoreStart),
future)
if sourceVersion == `future-migration` then
- patch(source, Span(t.span.end, in.lastOffset), " *")
+ patch(source, Span(t.span.end, in.lastOffset), "*")
else if opStack.nonEmpty then
report.errorOrMigrationWarning(
em"""`_*` can be used only for last argument of method application.
@@ -2973,12 +2978,6 @@ object Parsers {
case p =>
p
- private def warnStarMigration(p: Tree) =
- report.errorOrMigrationWarning(
- em"The syntax `x: _*` is no longer supported for vararg splices; use `x*` instead",
- in.sourcePos(startOffset(p)),
- from = future)
-
/** InfixPattern ::= SimplePattern {id [nl] SimplePattern}
*/
def infixPattern(): Tree =
@@ -3520,12 +3519,13 @@ object Parsers {
/** ‘*' | ‘_' */
def wildcardSelector() =
- if in.token == USCORE && sourceVersion.isAtLeast(future) then
+ if in.token == USCORE then
report.errorOrMigrationWarning(
em"`_` is no longer supported for a wildcard $exprName; use `*` instead${rewriteNotice(`future-migration`)}",
in.sourcePos(),
from = future)
- patch(source, Span(in.offset, in.offset + 1), "*")
+ if sourceVersion == `future-migration` then
+ patch(source, Span(in.offset, in.offset + 1), "*")
ImportSelector(atSpan(in.skipToken()) { Ident(nme.WILDCARD) })
/** 'given [InfixType]' */
@@ -3539,14 +3539,15 @@ object Parsers {
/** id [‘as’ (id | ‘_’) */
def namedSelector(from: Ident) =
if in.token == ARROW || isIdent(nme.as) then
- if in.token == ARROW && sourceVersion.isAtLeast(future) then
+ if in.token == ARROW then
report.errorOrMigrationWarning(
em"The $exprName renaming `a => b` is no longer supported ; use `a as b` instead${rewriteNotice(`future-migration`)}",
in.sourcePos(),
from = future)
- patch(source, Span(in.offset, in.offset + 2),
- if testChar(in.offset - 1, ' ') && testChar(in.offset + 2, ' ') then "as"
- else " as ")
+ if sourceVersion == `future-migration` then
+ patch(source, Span(in.offset, in.offset + 2),
+ if testChar(in.offset - 1, ' ') && testChar(in.offset + 2, ' ') then "as"
+ else " as ")
atSpan(startOffset(from), in.skipToken()) {
val to = if in.token == USCORE then wildcardIdent() else termIdent()
ImportSelector(from, if to.name == nme.ERROR then EmptyTree else to)
diff --git a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala
index 0339fc0531f4..44b0c43e545b 100644
--- a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala
+++ b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala
@@ -4,14 +4,14 @@ package parsing
import scala.language.unsafeNulls
-import core.Names._, core.Contexts._, core.Decorators._, util.Spans._
-import core.StdNames._, core.Comments._
+import core.Names.*, core.Contexts.*, core.Decorators.*, util.Spans.*
+import core.StdNames.*, core.Comments.*
import util.SourceFile
-import util.Chars._
+import util.Chars.*
import util.{SourcePosition, CharBuffer}
import util.Spans.Span
import config.Config
-import Tokens._
+import Tokens.*
import scala.annotation.{switch, tailrec}
import scala.collection.mutable
import scala.collection.immutable.SortedMap
diff --git a/compiler/src/dotty/tools/dotc/parsing/ScriptParsers.scala b/compiler/src/dotty/tools/dotc/parsing/ScriptParsers.scala
index d11db73b0455..d71e4cf11102 100644
--- a/compiler/src/dotty/tools/dotc/parsing/ScriptParsers.scala
+++ b/compiler/src/dotty/tools/dotc/parsing/ScriptParsers.scala
@@ -3,9 +3,9 @@ package dotc
package parsing
import util.SourceFile
-import core._
-import Contexts._
-import Parsers._
+import core.*
+import Contexts.*
+import Parsers.*
/**
Performs the following context-free rewritings:
@@ -45,7 +45,7 @@ import Parsers._
*/
object ScriptParsers {
- import ast.untpd._
+ import ast.untpd.*
class ScriptParser(source: SourceFile)(using Context) extends Parser(source) {
@@ -118,7 +118,7 @@ object ScriptParsers {
* }
* }
*/
- import definitions._
+ import definitions.*
def emptyPkg = atPos(0, 0, 0) { Ident(nme.EMPTY_PACKAGE_NAME) }
def emptyInit = DefDef(
diff --git a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala
index 8a20009f1927..fbf4e8d701dd 100644
--- a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala
+++ b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala
@@ -5,7 +5,7 @@ package parsing
import scala.language.unsafeNulls
import collection.immutable.BitSet
-import core.Decorators._
+import core.Decorators.*
import core.StdNames.nme
abstract class TokensCommon {
@@ -14,7 +14,7 @@ abstract class TokensCommon {
type Token = Int
type TokenSet = BitSet
- def tokenRange(lo: Int, hi: Int): TokenSet = BitSet(lo to hi: _*)
+ def tokenRange(lo: Int, hi: Int): TokenSet = BitSet(lo to hi *)
val tokenString, debugString: Array[String] = new Array[String](maxToken + 1)
diff --git a/compiler/src/dotty/tools/dotc/parsing/package.scala b/compiler/src/dotty/tools/dotc/parsing/package.scala
index ee3ecda60aee..0e51b487d7c4 100644
--- a/compiler/src/dotty/tools/dotc/parsing/package.scala
+++ b/compiler/src/dotty/tools/dotc/parsing/package.scala
@@ -1,9 +1,9 @@
package dotty.tools.dotc
-import util.Chars._
+import util.Chars.*
import core.Names.Name
import core.StdNames.nme
-import core.NameOps._
+import core.NameOps.*
package object parsing {
diff --git a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParserCommon.scala b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParserCommon.scala
index 0f7d426fbd28..803470fe85a5 100644
--- a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParserCommon.scala
+++ b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParserCommon.scala
@@ -9,7 +9,7 @@ package dotty.tools.dotc
package parsing
package xml
-import Utility._
+import Utility.*
import util.Chars.SU
import scala.collection.BufferedIterator
diff --git a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala
index b3f41fab9eaa..5567b4f569d5 100644
--- a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala
+++ b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala
@@ -4,6 +4,7 @@ package parsing
package xml
import scala.language.unsafeNulls
+import scala.compiletime.uninitialized
import scala.collection.mutable
import scala.collection.BufferedIterator
@@ -11,13 +12,13 @@ import core.Contexts.Context
import mutable.{ Buffer, ArrayBuffer, ListBuffer }
import scala.util.control.ControlThrowable
import util.Chars.SU
-import Parsers._
-import util.Spans._
-import core._
-import Constants._
+import Parsers.*
+import util.Spans.*
+import core.*
+import Constants.*
import Decorators.{em, toMessage}
import util.SourceFile
-import Utility._
+import Utility.*
// XXX/Note: many/most of the functions in here are almost direct cut and pastes
@@ -38,7 +39,7 @@ import Utility._
*/
object MarkupParsers {
- import ast.untpd._
+ import ast.untpd.*
case object MissingEndTagControl extends ControlThrowable {
override def getMessage: String = "start tag was here: "
@@ -71,7 +72,7 @@ object MarkupParsers {
if (ch == SU) throw TruncatedXMLControl
else reportSyntaxError(msg)
- var input : CharArrayReader = _
+ var input : CharArrayReader = uninitialized
def lookahead(): BufferedIterator[Char] =
(input.buf drop input.charOffset).iterator.buffered
diff --git a/compiler/src/dotty/tools/dotc/parsing/xml/SymbolicXMLBuilder.scala b/compiler/src/dotty/tools/dotc/parsing/xml/SymbolicXMLBuilder.scala
index 0e70cc077fa4..d1f2875064d4 100644
--- a/compiler/src/dotty/tools/dotc/parsing/xml/SymbolicXMLBuilder.scala
+++ b/compiler/src/dotty/tools/dotc/parsing/xml/SymbolicXMLBuilder.scala
@@ -4,14 +4,15 @@ package parsing
package xml
import scala.language.unsafeNulls
+import scala.compiletime.uninitialized
import scala.collection.mutable
-import core._
-import Decorators._
+import core.*
+import Decorators.*
import Flags.Mutable
-import Names._, StdNames._, ast.Trees._, ast.{tpd, untpd}
-import Symbols._, Contexts._
-import util.Spans._
+import Names.*, StdNames.*, ast.Trees.*, ast.{tpd, untpd}
+import Symbols.*, Contexts.*
+import util.Spans.*
import Parsers.Parser
/** This class builds instance of `Tree` that represent XML.
@@ -28,11 +29,11 @@ import Parsers.Parser
class SymbolicXMLBuilder(parser: Parser, preserveWS: Boolean)(using Context) {
import Constants.Constant
- import untpd._
+ import untpd.*
import parser.atSpan
- private[parsing] var isPattern: Boolean = _
+ private[parsing] var isPattern: Boolean = uninitialized
private object xmltypes extends ScalaTypeNames {
val _Comment: TypeName = "Comment"
@@ -66,7 +67,7 @@ class SymbolicXMLBuilder(parser: Parser, preserveWS: Boolean)(using Context) {
import xmlterms.{_Null, __Elem, __Text, _buf, _md, _plus, _scope, _tmpscope, _xml}
// convenience methods
- private def LL[A](x: A*): List[List[A]] = List(List(x:_*))
+ private def LL[A](x: A*): List[List[A]] = List(x.toList)
private def const(x: Any) = Literal(Constant(x))
private def wild = Ident(nme.WILDCARD)
private def wildStar = Ident(tpnme.WILDCARD_STAR)
@@ -220,7 +221,7 @@ class SymbolicXMLBuilder(parser: Parser, preserveWS: Boolean)(using Context) {
if (pre == null) (_scala_xml_UnprefixedAttribute, baseArgs)
else (_scala_xml_PrefixedAttribute , const(pre) :: baseArgs)
- Assign(Ident(_md), New(clazz, LL(attrArgs: _*)))
+ Assign(Ident(_md), New(clazz, LL(attrArgs*)))
}
def handlePrefixedAttribute(pre: String, key: String, value: Tree) = mkAttributeTree(pre, key, value)
diff --git a/compiler/src/dotty/tools/dotc/parsing/xml/Utility.scala b/compiler/src/dotty/tools/dotc/parsing/xml/Utility.scala
index 87412cf6d69c..6577030ec671 100644
--- a/compiler/src/dotty/tools/dotc/parsing/xml/Utility.scala
+++ b/compiler/src/dotty/tools/dotc/parsing/xml/Utility.scala
@@ -130,7 +130,7 @@ object Utility {
* See [4] and Appendix B of XML 1.0 specification.
*/
def isNameChar(ch: Char): Boolean = {
- import java.lang.Character._
+ import java.lang.Character.*
// The constants represent groups Mc, Me, Mn, Lm, and Nd.
isNameStart(ch) || (getType(ch).toByte match {
@@ -151,7 +151,7 @@ object Utility {
* See [3] and Appendix B of XML 1.0 specification
*/
def isNameStart(ch: Char): Boolean = {
- import java.lang.Character._
+ import java.lang.Character.*
getType(ch).toByte match {
case LOWERCASE_LETTER |
diff --git a/compiler/src/dotty/tools/dotc/plugins/Plugin.scala b/compiler/src/dotty/tools/dotc/plugins/Plugin.scala
index 30ef94239ac1..ce77a5b9d97a 100644
--- a/compiler/src/dotty/tools/dotc/plugins/Plugin.scala
+++ b/compiler/src/dotty/tools/dotc/plugins/Plugin.scala
@@ -3,10 +3,10 @@ package plugins
import scala.language.unsafeNulls
-import core._
-import Contexts._
-import Phases._
-import dotty.tools.io._
+import core.*
+import Contexts.*
+import Phases.*
+import dotty.tools.io.*
import transform.MegaPhase.MiniPhase
import java.io.InputStream
diff --git a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala
index c44fe4cf59b4..4d6e44a3bea7 100644
--- a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala
+++ b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala
@@ -3,14 +3,16 @@ package plugins
import scala.language.unsafeNulls
-import core._
-import Contexts._
+import core.*
+import Contexts.*
import Decorators.em
import config.{ PathResolver, Feature }
-import dotty.tools.io._
-import Phases._
+import dotty.tools.io.*
+import Phases.*
import config.Printers.plugins.{ println => debug }
+import scala.compiletime.uninitialized
+
/** Support for run-time loading of compiler plugins.
*
* @author Lex Spoon
@@ -44,7 +46,7 @@ trait Plugins {
goods map (_.get)
}
- private var _roughPluginsList: List[Plugin] = _
+ private var _roughPluginsList: List[Plugin] = uninitialized
protected def roughPluginsList(using Context): List[Plugin] =
if (_roughPluginsList == null) {
_roughPluginsList = loadRoughPluginsList
@@ -96,7 +98,7 @@ trait Plugins {
plugs
}
- private var _plugins: List[Plugin] = _
+ private var _plugins: List[Plugin] = uninitialized
def plugins(using Context): List[Plugin] =
if (_plugins == null) {
_plugins = loadPlugins
diff --git a/compiler/src/dotty/tools/dotc/printing/Formatting.scala b/compiler/src/dotty/tools/dotc/printing/Formatting.scala
index 3f32b29654c9..02f470324e8a 100644
--- a/compiler/src/dotty/tools/dotc/printing/Formatting.scala
+++ b/compiler/src/dotty/tools/dotc/printing/Formatting.scala
@@ -6,12 +6,12 @@ import scala.language.unsafeNulls
import scala.collection.mutable
-import core._
-import Texts._, Types._, Flags._, Symbols._, Contexts._
-import Decorators._
+import core.*
+import Texts.*, Types.*, Flags.*, Symbols.*, Contexts.*
+import Decorators.*
import reporting.Message
import util.DiffUtil
-import Highlighting._
+import Highlighting.*
object Formatting {
@@ -144,7 +144,7 @@ object Formatting {
case Nil => ("", Nil)
}
val (args1, suffixes1) = args.lazyZip(suffixes).map(treatArg(_, _)).unzip
- new StringContext(prefix :: suffixes1.toList: _*).s(args1: _*)
+ new StringContext(prefix :: suffixes1.toList*).s(args1*)
}
}
diff --git a/compiler/src/dotty/tools/dotc/printing/Highlighting.scala b/compiler/src/dotty/tools/dotc/printing/Highlighting.scala
index ceb5afdea750..c9b3e2a5aa83 100644
--- a/compiler/src/dotty/tools/dotc/printing/Highlighting.scala
+++ b/compiler/src/dotty/tools/dotc/printing/Highlighting.scala
@@ -3,7 +3,7 @@ package dotc
package printing
import scala.collection.mutable
-import core.Contexts._
+import core.Contexts.*
object Highlighting {
diff --git a/compiler/src/dotty/tools/dotc/printing/MessageLimiter.scala b/compiler/src/dotty/tools/dotc/printing/MessageLimiter.scala
index c9ac4a5af4ce..24f02f37956e 100644
--- a/compiler/src/dotty/tools/dotc/printing/MessageLimiter.scala
+++ b/compiler/src/dotty/tools/dotc/printing/MessageLimiter.scala
@@ -2,8 +2,8 @@ package dotty.tools
package dotc
package printing
-import core._
-import Contexts._
+import core.*
+import Contexts.*
import util.Property
import Texts.Text
diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala
index 60a25413d8e5..d5c60cf44579 100644
--- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala
+++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala
@@ -1,14 +1,14 @@
package dotty.tools.dotc
package printing
-import core._
-import Texts._, Types._, Flags._, Names._, Symbols._, NameOps._, Constants._, Denotations._
-import StdNames._
-import Contexts._
+import core.*
+import Texts.*, Types.*, Flags.*, Names.*, Symbols.*, NameOps.*, Constants.*, Denotations.*
+import StdNames.*
+import Contexts.*
import Scopes.Scope, Denotations.Denotation, Annotations.Annotation
import StdNames.nme
-import ast.Trees._
-import typer.Implicits._
+import ast.Trees.*
+import typer.Implicits.*
import typer.ImportInfo
import Variances.varianceSign
import util.SourcePosition
diff --git a/compiler/src/dotty/tools/dotc/printing/Printer.scala b/compiler/src/dotty/tools/dotc/printing/Printer.scala
index eafa399313da..8687925ed5fb 100644
--- a/compiler/src/dotty/tools/dotc/printing/Printer.scala
+++ b/compiler/src/dotty/tools/dotc/printing/Printer.scala
@@ -2,8 +2,8 @@ package dotty.tools
package dotc
package printing
-import core._
-import Texts._, ast.Trees._
+import core.*
+import Texts.*, ast.Trees.*
import Types.{Type, SingletonType, LambdaParam, NamedType},
Symbols.Symbol, Scopes.Scope, Constants.Constant,
Names.Name, Denotations._, Annotations.Annotation, Contexts.Context
diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala
index 70bb6f89663c..9426842363a7 100644
--- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala
+++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala
@@ -2,29 +2,29 @@ package dotty.tools
package dotc
package printing
-import core._
+import core.*
import Constants.*
-import Texts._
-import Types._
-import Flags._
-import Names._
-import Symbols._
-import NameOps._
+import Texts.*
+import Types.*
+import Flags.*
+import Names.*
+import Symbols.*
+import NameOps.*
import TypeErasure.ErasedValueType
-import Contexts._
+import Contexts.*
import Annotations.Annotation
-import Denotations._
-import SymDenotations._
+import Denotations.*
+import SymDenotations.*
import StdNames.{nme, tpnme}
import ast.{Trees, tpd, untpd}
import typer.{Implicits, Namer, Applications}
-import typer.ProtoTypes._
-import Trees._
-import TypeApplications._
+import typer.ProtoTypes.*
+import Trees.*
+import TypeApplications.*
import NameKinds.{WildcardParamName, DefaultGetterName}
import util.Chars.isOperatorPart
-import transform.TypeUtils._
-import transform.SymUtils._
+import transform.TypeUtils.*
+import transform.SymUtils.*
import config.{Config, Feature}
import dotty.tools.dotc.util.SourcePosition
@@ -345,7 +345,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
}
protected def toTextCore[T <: Untyped](tree: Tree[T]): Text = {
- import untpd._
+ import untpd.*
def isLocalThis(tree: Tree) = tree.typeOpt match {
case tp: ThisType => tp.cls == ctx.owner.enclosingClass
@@ -781,7 +781,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
super.toTextCapturing(tp, refsText, boxText)
override def toText[T <: Untyped](tree: Tree[T]): Text = controlled {
- import untpd._
+ import untpd.*
var txt = toTextCore(tree)
@@ -924,7 +924,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
}
protected def defDefToText[T <: Untyped](tree: DefDef[T]): Text = {
- import untpd._
+ import untpd.*
dclTextOr(tree) {
val defKeyword = modText(tree.mods, tree.symbol, keywordStr("def"), isType = false)
val isExtension = tree.hasType && tree.symbol.is(ExtensionMethod)
diff --git a/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala b/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala
index ea3afef27fae..f02cbf159224 100644
--- a/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala
+++ b/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala
@@ -2,13 +2,13 @@ package dotty.tools.dotc.printing
import dotty.tools.dotc.core.Constants
import dotty.tools.dotc.core.Constants.Constant
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Flags._
-import dotty.tools.dotc.core.NameOps._
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Flags.*
+import dotty.tools.dotc.core.NameOps.*
import dotty.tools.dotc.core.Names.Name
-import dotty.tools.dotc.core.Symbols._
-import dotty.tools.dotc.core.Types._
-import dotty.tools.dotc.printing.Texts._
+import dotty.tools.dotc.core.Symbols.*
+import dotty.tools.dotc.core.Types.*
+import dotty.tools.dotc.printing.Texts.*
class ReplPrinter(_ctx: Context) extends RefinedPrinter(_ctx) {
diff --git a/compiler/src/dotty/tools/dotc/printing/Showable.scala b/compiler/src/dotty/tools/dotc/printing/Showable.scala
index 4a0e68861a1a..4480aa9c76a4 100644
--- a/compiler/src/dotty/tools/dotc/printing/Showable.scala
+++ b/compiler/src/dotty/tools/dotc/printing/Showable.scala
@@ -1,9 +1,9 @@
package dotty.tools.dotc
package printing
-import core._
+import core.*
-import Contexts._, Texts._, Decorators._
+import Contexts.*, Texts.*, Decorators.*
import config.Config.summarizeDepth
trait Showable extends Any {
diff --git a/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala b/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala
index 7030776dd06c..6f65320d2c8e 100644
--- a/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala
+++ b/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala
@@ -4,11 +4,11 @@ package printing
import scala.language.unsafeNulls
import dotty.tools.dotc.ast.untpd
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.StdNames._
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.StdNames.*
import dotty.tools.dotc.parsing.Parsers.Parser
import dotty.tools.dotc.parsing.Scanners.Scanner
-import dotty.tools.dotc.parsing.Tokens._
+import dotty.tools.dotc.parsing.Tokens.*
import dotty.tools.dotc.reporting.Reporter
import dotty.tools.dotc.util.Spans.Span
import dotty.tools.dotc.util.SourceFile
@@ -87,7 +87,7 @@ object SyntaxHighlighting {
highlightPosition(comment.span, CommentColor)
object TreeHighlighter extends untpd.UntypedTreeTraverser {
- import untpd._
+ import untpd.*
def ignored(tree: NameTree) = {
val name = tree.name.toTermName
diff --git a/compiler/src/dotty/tools/dotc/profile/Profiler.scala b/compiler/src/dotty/tools/dotc/profile/Profiler.scala
index 64cc08160701..a13c9d41b529 100644
--- a/compiler/src/dotty/tools/dotc/profile/Profiler.scala
+++ b/compiler/src/dotty/tools/dotc/profile/Profiler.scala
@@ -11,7 +11,7 @@ import javax.management.openmbean.CompositeData
import javax.management.{Notification, NotificationEmitter, NotificationListener}
import dotty.tools.dotc.core.Phases.Phase
-import dotty.tools.dotc.core.Contexts._
+import dotty.tools.dotc.core.Contexts.*
import dotty.tools.io.AbstractFile
import annotation.internal.sharable
@@ -83,7 +83,7 @@ private [profile] object NoOpProfiler extends Profiler {
override def finished(): Unit = ()
}
private [profile] object RealProfiler {
- import scala.jdk.CollectionConverters._
+ import scala.jdk.CollectionConverters.*
val runtimeMx: RuntimeMXBean = ManagementFactory.getRuntimeMXBean
val memoryMx: MemoryMXBean = ManagementFactory.getMemoryMXBean
val gcMx: List[GarbageCollectorMXBean] = ManagementFactory.getGarbageCollectorMXBeans.asScala.toList
@@ -106,7 +106,7 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context)
@nowarn("cat=deprecation")
private[profile] def snapThread(idleTimeNanos: Long): ProfileSnap = {
- import RealProfiler._
+ import RealProfiler.*
val current = Thread.currentThread()
ProfileSnap(
diff --git a/compiler/src/dotty/tools/dotc/profile/ThreadPoolFactory.scala b/compiler/src/dotty/tools/dotc/profile/ThreadPoolFactory.scala
index 1e9969e0bc65..e3ea69d9be06 100644
--- a/compiler/src/dotty/tools/dotc/profile/ThreadPoolFactory.scala
+++ b/compiler/src/dotty/tools/dotc/profile/ThreadPoolFactory.scala
@@ -3,11 +3,11 @@ package dotty.tools.dotc.profile
import scala.language.unsafeNulls
import java.util.concurrent.ThreadPoolExecutor.AbortPolicy
-import java.util.concurrent._
+import java.util.concurrent.*
import java.util.concurrent.atomic.AtomicInteger
import dotty.tools.dotc.core.Phases.Phase
-import dotty.tools.dotc.core.Contexts._
+import dotty.tools.dotc.core.Contexts.*
sealed trait ThreadPoolFactory {
diff --git a/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala b/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala
index dbf4fe91a970..c124e12077fe 100644
--- a/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala
+++ b/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala
@@ -11,19 +11,19 @@ import java.lang.reflect.{InvocationTargetException, Method => JLRMethod}
import dotty.tools.dotc.ast.tpd
import dotty.tools.dotc.ast.TreeMapWithImplicits
-import dotty.tools.dotc.core.Annotations._
-import dotty.tools.dotc.core.Constants._
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Decorators._
+import dotty.tools.dotc.core.Annotations.*
+import dotty.tools.dotc.core.Constants.*
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Decorators.*
import dotty.tools.dotc.core.Denotations.staticRef
-import dotty.tools.dotc.core.Flags._
+import dotty.tools.dotc.core.Flags.*
import dotty.tools.dotc.core.NameKinds.FlatName
-import dotty.tools.dotc.core.Names._
-import dotty.tools.dotc.core.StdNames._
-import dotty.tools.dotc.core.Symbols._
+import dotty.tools.dotc.core.Names.*
+import dotty.tools.dotc.core.StdNames.*
+import dotty.tools.dotc.core.Symbols.*
import dotty.tools.dotc.core.TypeErasure
-import dotty.tools.dotc.core.Types._
-import dotty.tools.dotc.quoted._
+import dotty.tools.dotc.core.Types.*
+import dotty.tools.dotc.quoted.*
import dotty.tools.dotc.typer.ImportInfo.withRootImports
import dotty.tools.dotc.util.SrcPos
import dotty.tools.dotc.reporting.Message
@@ -32,8 +32,8 @@ import dotty.tools.dotc.core.CyclicReference
/** Tree interpreter for metaprogramming constructs */
class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context):
- import Interpreter._
- import tpd._
+ import Interpreter.*
+ import tpd.*
val classLoader =
if ctx.owner.topLevelClass.name.startsWith(str.REPL_SESSION_LINE) then
@@ -171,7 +171,7 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context):
val clazz = inst.getClass
val name = fn.name.asTermName
val method = getMethod(clazz, name, paramsSig(fn))
- stopIfRuntimeException(method.invoke(inst, args: _*), method)
+ stopIfRuntimeException(method.invoke(inst, args*), method)
}
private def interpretedStaticFieldAccess(sym: Symbol): Object = {
@@ -186,8 +186,8 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context):
private def interpretNew(fn: Symbol, args: List[Object]): Object = {
val className = fn.owner.fullName.mangledString.replaceAll("\\$\\.", "\\$")
val clazz = loadClass(className)
- val constr = clazz.getConstructor(paramsSig(fn): _*)
- constr.newInstance(args: _*).asInstanceOf[Object]
+ val constr = clazz.getConstructor(paramsSig(fn)*)
+ constr.newInstance(args*).asInstanceOf[Object]
}
private def unexpectedTree(tree: Tree): Object =
@@ -218,7 +218,7 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context):
private def getMethod(clazz: Class[?], name: Name, paramClasses: List[Class[?]]): JLRMethod =
- try clazz.getMethod(name.toString, paramClasses: _*)
+ try clazz.getMethod(name.toString, paramClasses*)
catch {
case _: NoSuchMethodException =>
val msg = em"Could not find method ${clazz.getCanonicalName}.$name with parameters ($paramClasses%, %)"
@@ -328,7 +328,7 @@ object Interpreter:
class StopInterpretation(val msg: Message, val pos: SrcPos) extends Exception
object Call:
- import tpd._
+ import tpd.*
/** Matches an expression that is either a field access or an application
* It returns a TermRef containing field accessed or a method reference and the arguments passed to it.
*/
diff --git a/compiler/src/dotty/tools/dotc/quoted/MacroExpansion.scala b/compiler/src/dotty/tools/dotc/quoted/MacroExpansion.scala
index 141b349826b4..d8ba1b72ce3b 100644
--- a/compiler/src/dotty/tools/dotc/quoted/MacroExpansion.scala
+++ b/compiler/src/dotty/tools/dotc/quoted/MacroExpansion.scala
@@ -1,7 +1,7 @@
package dotty.tools.dotc.quoted
import dotty.tools.dotc.ast.tpd
-import dotty.tools.dotc.core.Contexts._
+import dotty.tools.dotc.core.Contexts.*
import dotty.tools.dotc.typer.Typer
import dotty.tools.dotc.util.{Property, SourcePosition}
diff --git a/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala b/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala
index 1b354abf929f..a9b66fc056e2 100644
--- a/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala
+++ b/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala
@@ -1,14 +1,14 @@
package dotty.tools.dotc.quoted
-import dotty.tools.dotc.ast.Trees._
+import dotty.tools.dotc.ast.Trees.*
import dotty.tools.dotc.ast.{TreeTypeMap, tpd}
-import dotty.tools.dotc.config.Printers._
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Decorators._
-import dotty.tools.dotc.core.Flags._
+import dotty.tools.dotc.config.Printers.*
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Decorators.*
+import dotty.tools.dotc.core.Flags.*
import dotty.tools.dotc.core.Mode
-import dotty.tools.dotc.core.Symbols._
-import dotty.tools.dotc.core.Types._
+import dotty.tools.dotc.core.Symbols.*
+import dotty.tools.dotc.core.Types.*
import dotty.tools.dotc.core.tasty.{ PositionPickler, TastyPickler, TastyPrinter, TreePickler }
import dotty.tools.dotc.core.tasty.DottyUnpickler
import dotty.tools.dotc.core.tasty.TreeUnpickler.UnpickleMode
@@ -16,14 +16,14 @@ import dotty.tools.dotc.report
import dotty.tools.dotc.reporting.Message
import scala.quoted.Quotes
-import scala.quoted.runtime.impl._
+import scala.quoted.runtime.impl.*
import scala.collection.mutable
-import QuoteUtils._
+import QuoteUtils.*
object PickledQuotes {
- import tpd._
+ import tpd.*
/** Pickle the tree of the quote into strings */
def pickleQuote(tree: Tree)(using Context): List[String] =
diff --git a/compiler/src/dotty/tools/dotc/quoted/QuotePatterns.scala b/compiler/src/dotty/tools/dotc/quoted/QuotePatterns.scala
index 48884f6b2d6e..eb5395194d11 100644
--- a/compiler/src/dotty/tools/dotc/quoted/QuotePatterns.scala
+++ b/compiler/src/dotty/tools/dotc/quoted/QuotePatterns.scala
@@ -18,12 +18,12 @@ import dotty.tools.dotc.core.Symbols.*
import dotty.tools.dotc.core.TypeOps.*
import dotty.tools.dotc.core.Types.*
import dotty.tools.dotc.reporting.IllegalVariableInPatternAlternative
-import dotty.tools.dotc.transform.SymUtils._
+import dotty.tools.dotc.transform.SymUtils.*
import scala.collection.mutable
object QuotePatterns:
- import tpd._
+ import tpd.*
/** Check for restricted patterns */
def checkPattern(quotePattern: QuotePattern)(using Context): Unit = new tpd.TreeTraverser {
diff --git a/compiler/src/dotty/tools/dotc/quoted/QuoteUtils.scala b/compiler/src/dotty/tools/dotc/quoted/QuoteUtils.scala
index 604c8da3420a..a015c726c59f 100644
--- a/compiler/src/dotty/tools/dotc/quoted/QuoteUtils.scala
+++ b/compiler/src/dotty/tools/dotc/quoted/QuoteUtils.scala
@@ -1,11 +1,11 @@
package dotty.tools.dotc.quoted
import dotty.tools.dotc.ast.tpd
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Symbols._
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Symbols.*
object QuoteUtils:
- import tpd._
+ import tpd.*
/** Get the owner of a tree if it has one */
def treeOwner(tree: Tree)(using Context): Option[Symbol] = {
diff --git a/compiler/src/dotty/tools/dotc/quoted/QuotesCache.scala b/compiler/src/dotty/tools/dotc/quoted/QuotesCache.scala
index c063e437cb19..4147e49b87ce 100644
--- a/compiler/src/dotty/tools/dotc/quoted/QuotesCache.scala
+++ b/compiler/src/dotty/tools/dotc/quoted/QuotesCache.scala
@@ -1,12 +1,12 @@
package dotty.tools.dotc.quoted
-import dotty.tools.dotc.core.Contexts._
+import dotty.tools.dotc.core.Contexts.*
import dotty.tools.dotc.util.Property
import dotty.tools.dotc.ast.tpd
object QuotesCache {
- import tpd._
+ import tpd.*
/** A key to be used in a context property that caches the unpickled trees */
private val QuotesCacheKey = new Property.Key[collection.mutable.Map[String | List[String], Tree]]
diff --git a/compiler/src/dotty/tools/dotc/quoted/reflect/FromSymbol.scala b/compiler/src/dotty/tools/dotc/quoted/reflect/FromSymbol.scala
index 87d0cbb7be08..cfc09a8ed836 100644
--- a/compiler/src/dotty/tools/dotc/quoted/reflect/FromSymbol.scala
+++ b/compiler/src/dotty/tools/dotc/quoted/reflect/FromSymbol.scala
@@ -3,11 +3,11 @@ package reflect
import dotty.tools.dotc.ast.tpd
import dotty.tools.dotc.ast.untpd
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Flags._
-import dotty.tools.dotc.core.StdNames._
-import dotty.tools.dotc.core.Symbols._
-import dotty.tools.dotc.core.Types._
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Flags.*
+import dotty.tools.dotc.core.StdNames.*
+import dotty.tools.dotc.core.Symbols.*
+import dotty.tools.dotc.core.Types.*
object FromSymbol {
diff --git a/compiler/src/dotty/tools/dotc/report.scala b/compiler/src/dotty/tools/dotc/report.scala
index 75261fb6890e..142561dcbbee 100644
--- a/compiler/src/dotty/tools/dotc/report.scala
+++ b/compiler/src/dotty/tools/dotc/report.scala
@@ -1,12 +1,12 @@
package dotty.tools.dotc
-import reporting._
-import Diagnostic._
+import reporting.*
+import Diagnostic.*
import util.{SourcePosition, NoSourcePosition, SrcPos}
-import core._
-import Contexts._, Flags.*, Symbols._, Decorators._
+import core.*
+import Contexts.*, Flags.*, Symbols.*, Decorators.*
import config.SourceVersion
-import ast._
+import ast.*
import config.Feature.sourceVersion
import java.lang.System.currentTimeMillis
diff --git a/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala b/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala
index a95af962c053..ce7477f4da70 100644
--- a/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala
@@ -2,7 +2,7 @@ package dotty.tools
package dotc
package reporting
-import core.Contexts._
+import core.Contexts.*
import java.io.{ BufferedReader, PrintWriter }
import Diagnostic.Error
diff --git a/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala b/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala
index 624aa93924e8..7a8edb233aee 100644
--- a/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala
@@ -5,12 +5,12 @@ package reporting
import scala.language.unsafeNulls
import dotty.tools.dotc.config.Settings.Setting
-import dotty.tools.dotc.core.Contexts._
+import dotty.tools.dotc.core.Contexts.*
import dotty.tools.dotc.interfaces.Diagnostic.{ERROR, INFO, WARNING}
import dotty.tools.dotc.util.SourcePosition
import java.util.{Collections, Optional, List => JList}
-import scala.util.chaining._
+import scala.util.chaining.*
import core.Decorators.toMessage
object Diagnostic:
diff --git a/compiler/src/dotty/tools/dotc/reporting/DidYouMean.scala b/compiler/src/dotty/tools/dotc/reporting/DidYouMean.scala
index f78fd3bd190b..a9b2f68d07d6 100644
--- a/compiler/src/dotty/tools/dotc/reporting/DidYouMean.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/DidYouMean.scala
@@ -2,8 +2,8 @@ package dotty.tools
package dotc
package reporting
-import core._
-import Contexts._
+import core.*
+import Contexts.*
import Decorators.*, Symbols.*, Names.*, Types.*, Flags.*
import typer.ProtoTypes.{FunProto, SelectionProto}
import transform.SymUtils.isNoValue
diff --git a/compiler/src/dotty/tools/dotc/reporting/ExploringReporter.scala b/compiler/src/dotty/tools/dotc/reporting/ExploringReporter.scala
index a2062bd1b2c7..f469c03764c0 100644
--- a/compiler/src/dotty/tools/dotc/reporting/ExploringReporter.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/ExploringReporter.scala
@@ -6,7 +6,7 @@ import scala.language.unsafeNulls
import collection.mutable
import core.Contexts.Context
-import Diagnostic._
+import Diagnostic.*
/** A re-usable Reporter used in Contexts#test */
class ExploringReporter extends StoreReporter(null, fromTyperState = false):
diff --git a/compiler/src/dotty/tools/dotc/reporting/HideNonSensicalMessages.scala b/compiler/src/dotty/tools/dotc/reporting/HideNonSensicalMessages.scala
index 9b6a3c75ba5d..5910d9b4d656 100644
--- a/compiler/src/dotty/tools/dotc/reporting/HideNonSensicalMessages.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/HideNonSensicalMessages.scala
@@ -2,7 +2,7 @@ package dotty.tools
package dotc
package reporting
-import core.Contexts._
+import core.Contexts.*
/**
* This trait implements `isHidden` so that we avoid reporting non-sensical messages.
diff --git a/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala b/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala
index f53359fb8b19..6881235e3dc1 100644
--- a/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala
@@ -6,11 +6,11 @@ import scala.language.unsafeNulls
import java.lang.System.{lineSeparator => EOL}
-import core.Contexts._
-import core.Decorators._
+import core.Contexts.*
+import core.Decorators.*
import printing.Highlighting.{Blue, Red, Yellow}
import printing.SyntaxHighlighting
-import Diagnostic._
+import Diagnostic.*
import util.{ SourcePosition, NoSourcePosition }
import util.Chars.{ LF, CR, FF, SU }
import scala.annotation.switch
@@ -210,7 +210,7 @@ trait MessageRendering {
}
private def appendFilterHelp(dia: Diagnostic, sb: mutable.StringBuilder): Unit =
- import dia._
+ import dia.*
val hasId = msg.errorId.errorNumber >= 0
val category = dia match {
case _: UncheckedWarning => "unchecked"
@@ -228,7 +228,7 @@ trait MessageRendering {
/** The whole message rendered from `msg` */
def messageAndPos(dia: Diagnostic)(using Context): String = {
- import dia._
+ import dia.*
val pos1 = pos.nonInlined
val inlineStack = inlinePosStack(pos).filter(_ != pos1)
val maxLineNumber =
diff --git a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala
index 3be1a159c55c..f567e094e831 100644
--- a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala
@@ -4,11 +4,11 @@ package reporting
import scala.language.unsafeNulls
-import dotty.tools.dotc.core.Contexts._
+import dotty.tools.dotc.core.Contexts.*
import dotty.tools.dotc.core.Mode
import dotty.tools.dotc.core.Symbols.{NoSymbol, Symbol}
-import dotty.tools.dotc.reporting.Diagnostic._
-import dotty.tools.dotc.reporting.Message._
+import dotty.tools.dotc.reporting.Diagnostic.*
+import dotty.tools.dotc.reporting.Message.*
import dotty.tools.dotc.util.NoSourcePosition
import java.io.{BufferedReader, PrintWriter}
@@ -63,7 +63,7 @@ object Reporter {
* error messages.
*/
abstract class Reporter extends interfaces.ReporterResult {
- import Reporter._
+ import Reporter.*
/** Report a diagnostic */
def doReport(dia: Diagnostic)(using Context): Unit
@@ -179,7 +179,7 @@ abstract class Reporter extends interfaces.ReporterResult {
case _ => dia
def go() =
- import Action._
+ import Action.*
dia match
case w: Warning => WConf.parsed.action(dia) match
case Error => issueUnconfigured(w.toError)
diff --git a/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala b/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala
index 9783a3208a60..aef5f2c5863b 100644
--- a/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala
@@ -2,10 +2,10 @@ package dotty.tools
package dotc
package reporting
-import core.Contexts._
+import core.Contexts.*
import collection.mutable
import config.Printers.typr
-import Diagnostic._
+import Diagnostic.*
/** This class implements a Reporter that stores all messages
*
diff --git a/compiler/src/dotty/tools/dotc/reporting/TestReporter.scala b/compiler/src/dotty/tools/dotc/reporting/TestReporter.scala
index 8d7204a93fa2..c0b5ffe8e650 100644
--- a/compiler/src/dotty/tools/dotc/reporting/TestReporter.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/TestReporter.scala
@@ -5,7 +5,7 @@ package reporting
import scala.language.unsafeNulls
import collection.mutable
-import Diagnostic._
+import Diagnostic.*
/** A re-usable Reporter used in Contexts#test */
class TestingReporter extends StoreReporter(null, fromTyperState = false):
diff --git a/compiler/src/dotty/tools/dotc/reporting/ThrowingReporter.scala b/compiler/src/dotty/tools/dotc/reporting/ThrowingReporter.scala
index 153212522541..75c698a28ee4 100644
--- a/compiler/src/dotty/tools/dotc/reporting/ThrowingReporter.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/ThrowingReporter.scala
@@ -2,7 +2,7 @@ package dotty.tools
package dotc
package reporting
-import core.Contexts._
+import core.Contexts.*
import Diagnostic.Error
/**
diff --git a/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala b/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala
index 98fd7da3032a..d8426aa8781e 100644
--- a/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala
@@ -4,7 +4,7 @@ package reporting
import scala.collection.mutable
import util.SourceFile
-import core.Contexts._
+import core.Contexts.*
/** This trait implements `isHidden` so that multiple messages per position
* are suppressed, unless they are of increasing severity. */
@@ -25,14 +25,14 @@ trait UniqueMessagePositions extends Reporter {
||
dia.pos.exists
&& !ctx.settings.YshowSuppressedErrors.value
- && (dia.pos.start to dia.pos.end).exists(pos =>
- positions.get((ctx.source, pos)).exists(_.hides(dia)))
+ && (dia.pos.start to dia.pos.end).exists: offset =>
+ positions.get((ctx.source, offset)).exists(_.hides(dia))
override def markReported(dia: Diagnostic)(using Context): Unit =
if dia.pos.exists then
- for (pos <- dia.pos.start to dia.pos.end)
- positions.get(ctx.source, pos) match
+ for offset <- dia.pos.start to dia.pos.end do
+ positions.get((ctx.source, offset)) match
case Some(dia1) if dia1.hides(dia) =>
- case _ => positions((ctx.source, pos)) = dia
+ case _ => positions((ctx.source, offset)) = dia
super.markReported(dia)
}
diff --git a/compiler/src/dotty/tools/dotc/reporting/WConf.scala b/compiler/src/dotty/tools/dotc/reporting/WConf.scala
index 5303ccd7f219..29b5bccb7714 100644
--- a/compiler/src/dotty/tools/dotc/reporting/WConf.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/WConf.scala
@@ -4,7 +4,7 @@ package reporting
import scala.language.unsafeNulls
-import dotty.tools.dotc.core.Contexts._
+import dotty.tools.dotc.core.Contexts.*
import dotty.tools.dotc.util.SourcePosition
import java.util.regex.PatternSyntaxException
@@ -36,8 +36,8 @@ final case class WConf(confs: List[(List[MessageFilter], Action)]):
}.getOrElse(Action.Warning)
object WConf:
- import Action._
- import MessageFilter._
+ import Action.*
+ import MessageFilter.*
private type Conf = (List[MessageFilter], Action)
diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala
index 3770d5f6f119..5c7eb1d0f775 100644
--- a/compiler/src/dotty/tools/dotc/reporting/messages.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala
@@ -2,17 +2,17 @@ package dotty.tools
package dotc
package reporting
-import core._
-import Contexts._
-import Decorators._, Symbols._, Names._, NameOps._, Types._, Flags._, Phases._
+import core.*
+import Contexts.*
+import Decorators.*, Symbols.*, Names.*, NameOps.*, Types.*, Flags.*, Phases.*
import Denotations.SingleDenotation
import SymDenotations.SymDenotation
import NameKinds.{WildcardParamName, ContextFunctionParamName}
import parsing.Scanners.Token
import parsing.Tokens
-import printing.Highlighting._
+import printing.Highlighting.*
import printing.Formatting
-import ErrorMessageID._
+import ErrorMessageID.*
import ast.Trees
import config.{Feature, ScalaVersion}
import typer.ErrorReporting.{err, matchReductionAddendum, substitutableTypeSymbolsInScope}
@@ -22,10 +22,10 @@ import typer.Inferencing
import scala.util.control.NonFatal
import StdNames.nme
import printing.Formatting.hl
-import ast.Trees._
+import ast.Trees.*
import ast.untpd
import ast.tpd
-import transform.SymUtils._
+import transform.SymUtils.*
import scala.util.matching.Regex
import java.util.regex.Matcher.quoteReplacement
import cc.CaptureSet.IdentityCaptRefMap
@@ -138,10 +138,10 @@ extends EmptyCatchOrFinallyBlock(tryBody, EmptyCatchAndFinallyBlockID) {
|its body in a block; no exceptions are handled."""
}
-class DeprecatedWithOperator()(using Context)
+class DeprecatedWithOperator(rewrite: String)(using Context)
extends SyntaxMsg(DeprecatedWithOperatorID) {
def msg(using Context) =
- i"""${hl("with")} as a type operator has been deprecated; use ${hl("&")} instead"""
+ i"""${hl("with")} as a type operator has been deprecated; use ${hl("&")} instead$rewrite"""
def explain(using Context) =
i"""|Dotty introduces intersection types - ${hl("&")} types. These replace the
|use of the ${hl("with")} keyword. There are a few differences in
@@ -1699,7 +1699,7 @@ class JavaEnumParentArgs(parent: Type)(using Context)
class CannotHaveSameNameAs(sym: Symbol, cls: Symbol, reason: CannotHaveSameNameAs.Reason)(using Context)
extends NamingMsg(CannotHaveSameNameAsID) {
- import CannotHaveSameNameAs._
+ import CannotHaveSameNameAs.*
def reasonMessage(using Context): String = reason match {
case CannotBeOverridden => "class definitions cannot be overridden"
case DefinedInSelf(self) =>
@@ -2268,7 +2268,7 @@ extends NamingMsg(DoubleDefinitionID) {
def erasedType = if ctx.erasedTypes then i" ${decl.info}" else ""
def details(using Context): String =
if (decl.isRealMethod && previousDecl.isRealMethod) {
- import Signature.MatchDegree._
+ import Signature.MatchDegree.*
// compare the signatures when both symbols represent methods
decl.signature.matchDegree(previousDecl.signature) match {
diff --git a/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala b/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala
index 5bea0fb66ed0..2586ad8604c3 100644
--- a/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala
+++ b/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala
@@ -3,7 +3,7 @@ package rewrites
import util.{SourceFile, Spans}
import Spans.Span
-import core.Contexts._
+import core.Contexts.*
import collection.mutable
import scala.annotation.tailrec
import dotty.tools.dotc.reporting.Reporter
@@ -114,6 +114,6 @@ object Rewrites {
* as an optional setting.
*/
class Rewrites {
- import Rewrites._
+ import Rewrites.*
private val patched = new PatchedFiles
}
diff --git a/compiler/src/dotty/tools/dotc/sbt/APIUtils.scala b/compiler/src/dotty/tools/dotc/sbt/APIUtils.scala
index 833cf7f2e0ff..07fa2027fbe6 100644
--- a/compiler/src/dotty/tools/dotc/sbt/APIUtils.scala
+++ b/compiler/src/dotty/tools/dotc/sbt/APIUtils.scala
@@ -3,11 +3,11 @@ package sbt
import scala.language.unsafeNulls
-import core._
-import Contexts._
-import Flags._
-import Symbols._
-import NameOps._
+import core.*
+import Contexts.*
+import Flags.*
+import Symbols.*
+import NameOps.*
import xsbti.api
import xsbti.api.SafeLazy.strict
@@ -24,7 +24,7 @@ object APIUtils {
val EmptyType = api.EmptyType.of()
}
- import Constants._
+ import Constants.*
/** Registers a dummy class for sbt's incremental compilation.
*
diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala
index 6398fe613b12..5561a241c975 100644
--- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala
+++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala
@@ -5,20 +5,20 @@ import scala.language.unsafeNulls
import ExtractDependencies.internalError
import ast.{Positioned, Trees, tpd}
-import core._
-import core.Decorators._
-import Annotations._
-import Contexts._
-import Flags._
-import Phases._
-import Trees._
-import Types._
-import Symbols._
-import Names._
-import NameOps._
+import core.*
+import core.Decorators.*
+import Annotations.*
+import Contexts.*
+import Flags.*
+import Phases.*
+import Trees.*
+import Types.*
+import Symbols.*
+import Names.*
+import NameOps.*
import inlines.Inlines
import transform.ValueClasses
-import transform.SymUtils._
+import transform.SymUtils.*
import dotty.tools.io.File
import java.io.PrintWriter
@@ -135,7 +135,7 @@ object ExtractAPI:
* http://www.scala-sbt.org/0.13/docs/Understanding-Recompilation.html#Hashing+an+API+representation
*/
private class ExtractAPICollector(using Context) extends ThunkHolder {
- import tpd._
+ import tpd.*
import xsbti.api
/** This cache is necessary for correctness, see the comment about inherited
@@ -566,7 +566,7 @@ private class ExtractAPICollector(using Context) extends ThunkHolder {
case ExprType(resultType) =>
withMarker(apiType(resultType), byNameMarker)
case MatchType(bound, scrut, cases) =>
- val s = combineApiTypes(apiType(bound) :: apiType(scrut) :: cases.map(apiType): _*)
+ val s = combineApiTypes(apiType(bound) :: apiType(scrut) :: cases.map(apiType)*)
withMarker(s, matchMarker)
case ConstantType(constant) =>
api.Constant.of(apiType(constant.tpe), constant.stringValue)
@@ -614,7 +614,7 @@ private class ExtractAPICollector(using Context) extends ThunkHolder {
apiType(lo), apiType(hi))
def apiVariance(v: Int): api.Variance = {
- import api.Variance._
+ import api.Variance.*
if (v < 0) Contravariant
else if (v > 0) Covariant
else Invariant
diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala
index d0330a955148..fbf6e08f8b60 100644
--- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala
+++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala
@@ -9,27 +9,27 @@ import java.util.{Arrays, EnumSet}
import dotty.tools.dotc.ast.tpd
import dotty.tools.dotc.classpath.FileUtils.{isTasty, hasClassExtension, hasTastyExtension}
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Decorators._
-import dotty.tools.dotc.core.Flags._
-import dotty.tools.dotc.core.NameOps._
-import dotty.tools.dotc.core.Names._
-import dotty.tools.dotc.core.Phases._
-import dotty.tools.dotc.core.Symbols._
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Decorators.*
+import dotty.tools.dotc.core.Flags.*
+import dotty.tools.dotc.core.NameOps.*
+import dotty.tools.dotc.core.Names.*
+import dotty.tools.dotc.core.Phases.*
+import dotty.tools.dotc.core.Symbols.*
import dotty.tools.dotc.core.Denotations.StaleSymbol
-import dotty.tools.dotc.core.Types._
-import dotty.tools.dotc.transform.SymUtils._
+import dotty.tools.dotc.core.Types.*
+import dotty.tools.dotc.transform.SymUtils.*
import dotty.tools.dotc.util.{SrcPos, NoSourcePosition}
import dotty.tools.io
import dotty.tools.io.{AbstractFile, PlainFile, ZipArchive, NoAbstractFile}
import xsbti.UseScope
import xsbti.api.DependencyContext
-import xsbti.api.DependencyContext._
+import xsbti.api.DependencyContext.*
import scala.jdk.CollectionConverters.*
import scala.collection.{Set, mutable}
-
+import scala.compiletime.uninitialized
/** This phase sends information on classes' dependencies to sbt via callbacks.
*
@@ -51,7 +51,7 @@ import scala.collection.{Set, mutable}
* @see ExtractAPI
*/
class ExtractDependencies extends Phase {
- import ExtractDependencies._
+ import ExtractDependencies.*
override def phaseName: String = ExtractDependencies.name
@@ -119,7 +119,7 @@ object ExtractDependencies {
* inheritance" in the "Name hashing algorithm" section.
*/
private class ExtractDependenciesCollector(rec: DependencyRecorder) extends tpd.TreeTraverser { thisTreeTraverser =>
- import tpd._
+ import tpd.*
private def addMemberRefDependency(sym: Symbol)(using Context): Unit =
if (!ignoreDependency(sym)) {
@@ -519,9 +519,9 @@ class DependencyRecorder {
}
}
- private var lastOwner: Symbol = _
- private var lastDepSource: Symbol = _
- private var lastFoundCache: FoundDepsInClass | Null = _
+ private var lastOwner: Symbol = uninitialized
+ private var lastDepSource: Symbol = uninitialized
+ private var lastFoundCache: FoundDepsInClass | Null = uninitialized
/** The source of the dependency according to `nonLocalEnclosingClass`
* if it exists, otherwise fall back to `responsibleForImports`.
@@ -558,7 +558,7 @@ class DependencyRecorder {
clazz
}
- private var _responsibleForImports: Symbol = _
+ private var _responsibleForImports: Symbol = uninitialized
/** Top level import dependencies are registered as coming from a first top level
* class/trait/object declared in the compilation unit. If none exists, issue a warning and return NoSymbol.
diff --git a/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala
index cacb10cf98bc..61baebbe9517 100644
--- a/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala
+++ b/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala
@@ -11,7 +11,7 @@ package sbt
import scala.language.unsafeNulls
-import xsbti.api._
+import xsbti.api.*
import scala.util.Try
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/ConstantOps.scala b/compiler/src/dotty/tools/dotc/semanticdb/ConstantOps.scala
index 975d5480fe9b..784b23cfc78c 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/ConstantOps.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/ConstantOps.scala
@@ -5,7 +5,7 @@ package semanticdb
import dotty.tools.dotc.{semanticdb => s}
import core.Contexts.Context
-import core.Constants._
+import core.Constants.*
object ConstantOps:
extension (const: Constant)
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala
index 07f3fcea2e88..75805d4aed17 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala
@@ -4,24 +4,24 @@ package semanticdb
import scala.language.unsafeNulls
-import core._
-import Phases._
-import ast.tpd._
+import core.*
+import Phases.*
+import ast.tpd.*
import ast.Trees.{mods, WithEndMarker}
-import Contexts._
-import Symbols._
-import Flags._
+import Contexts.*
+import Symbols.*
+import Flags.*
import Names.Name
import StdNames.nme
-import NameOps._
+import NameOps.*
import Denotations.StaleSymbol
import util.Spans.Span
import util.SourceFile
-import transform.SymUtils._
+import transform.SymUtils.*
import scala.collection.mutable
import scala.annotation.{ threadUnsafe => tu, tailrec }
-import scala.jdk.CollectionConverters._
+import scala.jdk.CollectionConverters.*
import scala.PartialFunction.condOpt
import typer.ImportInfo.withRootImports
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/PPrint.scala b/compiler/src/dotty/tools/dotc/semanticdb/PPrint.scala
index b53ee787f501..fdf159836878 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/PPrint.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/PPrint.scala
@@ -4,7 +4,7 @@ import dotty.tools.dotc.{semanticdb => s}
import scala.collection.mutable
import dotty.tools.dotc.semanticdb.Scala3.given
-import SymbolInformation.Kind._
+import SymbolInformation.Kind.*
import dotty.tools.dotc.util.SourceFile
class SymbolInformationPrinter (symtab: PrinterSymtab):
val notes = InfoNotes()
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala b/compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala
index f1302330bd8b..f49b00089712 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala
@@ -2,11 +2,11 @@ package dotty.tools.dotc.semanticdb
import dotty.tools.dotc.core
import core.Symbols.{ Symbol , defn, NoSymbol }
-import core.Contexts._
+import core.Contexts.*
import core.Names
import core.Names.Name
import core.Types.{Type, TypeBounds}
-import core.Flags._
+import core.Flags.*
import core.NameKinds
import core.StdNames.nme
import SymbolInformation.{Kind => k}
@@ -20,8 +20,8 @@ import scala.annotation.internal.sharable
import scala.annotation.switch
object Scala3:
- import Symbols._
- import core.NameOps._
+ import Symbols.*
+ import core.NameOps.*
@sharable private val unicodeEscape = raw"\$$u(\p{XDigit}{4})".r
@sharable private val locals = raw"local(\d+)".r
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala b/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala
index 0b92ebddb02c..6376fb86d6c5 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala
@@ -2,10 +2,10 @@ package dotty.tools
package dotc
package semanticdb
-import core._
-import Contexts._
-import Symbols._
-import Flags._
+import core.*
+import Contexts.*
+import Symbols.*
+import Flags.*
import Names.Name
import scala.annotation.tailrec
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/SyntheticsExtractor.scala b/compiler/src/dotty/tools/dotc/semanticdb/SyntheticsExtractor.scala
index b2f26e3e992f..af38315a857e 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/SyntheticsExtractor.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/SyntheticsExtractor.scala
@@ -1,8 +1,8 @@
package dotty.tools.dotc.semanticdb
-import dotty.tools.dotc.ast.tpd._
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Flags._
+import dotty.tools.dotc.ast.tpd.*
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Flags.*
import dotty.tools.dotc.core.StdNames.nme
import dotty.tools.dotc.core.NameKinds
import dotty.tools.dotc.{semanticdb => s}
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/Tools.scala b/compiler/src/dotty/tools/dotc/semanticdb/Tools.scala
index 6c6e69f12578..ea95e34a57b9 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/Tools.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/Tools.scala
@@ -1,8 +1,8 @@
package dotty.tools.dotc.semanticdb
-import java.nio.file._
+import java.nio.file.*
import java.nio.charset.StandardCharsets
-import scala.jdk.CollectionConverters._
+import scala.jdk.CollectionConverters.*
import dotty.tools.dotc.util.SourceFile
import dotty.tools.dotc.semanticdb.Scala3.given
@@ -92,7 +92,7 @@ object Tools:
end metac
private def schemaString(schema: Schema) =
- import Schema._
+ import Schema.*
schema match
case SEMANTICDB3 => "SemanticDB v3"
case SEMANTICDB4 => "SemanticDB v4"
@@ -101,7 +101,7 @@ object Tools:
end schemaString
private def languageString(language: Language) =
- import Language._
+ import Language.*
language match
case SCALA => "Scala"
case JAVA => "Java"
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala b/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala
index b0d032c7d83b..4293ecd6ca43 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala
@@ -2,9 +2,9 @@ package dotty.tools
package dotc
package semanticdb
-import core.Symbols._
+import core.Symbols.*
import core.Contexts.Context
-import core.Types._
+import core.Types.*
import core.Annotations.Annotation
import core.Flags
import core.Names.Name
@@ -18,7 +18,7 @@ import Scala3.{FakeSymbol, SemanticSymbol, WildcardTypeSymbol, TypeParamRefSymbo
import dotty.tools.dotc.core.Names.Designator
class TypeOps:
- import SymbolScopeOps._
+ import SymbolScopeOps.*
import Scala3.given
private val paramRefSymtab = mutable.Map[(LambdaType, Name), Symbol]()
private val refinementSymtab = mutable.Map[(RefinedType, Name), Symbol]()
@@ -245,7 +245,7 @@ class TypeOps:
loop(tpe)
def toSemanticType(sym: Symbol)(using LinkMode, SemanticSymbolBuilder, Context): s.Type =
- import ConstantOps._
+ import ConstantOps.*
def loop(tpe: Type): s.Type = tpe match {
case t if t.isFromJavaObject =>
loop(defn.AnyType)
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Access.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Access.scala
index c646e67b69ad..2d2621c34390 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Access.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Access.scala
@@ -5,7 +5,7 @@
// Protofile syntax: PROTO3
package dotty.tools.dotc.semanticdb
-import dotty.tools.dotc.semanticdb.internal._
+import dotty.tools.dotc.semanticdb.internal.*
import scala.annotation.internal.sharable
sealed trait Access extends SemanticdbGeneratedSealedOneof derives CanEqual {
@@ -18,10 +18,10 @@ sealed trait Access extends SemanticdbGeneratedSealedOneof derives CanEqual {
object Access {
case object Empty extends dotty.tools.dotc.semanticdb.Access
-
+
sealed trait NonEmpty extends dotty.tools.dotc.semanticdb.Access
def defaultInstance: dotty.tools.dotc.semanticdb.Access = Empty
-
+
implicit val AccessTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.AccessMessage, dotty.tools.dotc.semanticdb.Access] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.AccessMessage, dotty.tools.dotc.semanticdb.Access] {
override def toCustom(__base: dotty.tools.dotc.semanticdb.AccessMessage): dotty.tools.dotc.semanticdb.Access = __base.sealedValue match {
case __v: dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PrivateAccess => __v.value
@@ -90,7 +90,7 @@ final case class AccessMessage(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
sealedValue.privateAccess.foreach { __v =>
@@ -152,10 +152,10 @@ final case class AccessMessage(
def withPublicAccess(__v: dotty.tools.dotc.semanticdb.PublicAccess): AccessMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PublicAccess(__v))
def clearSealedValue: AccessMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.Empty)
def withSealedValue(__v: dotty.tools.dotc.semanticdb.AccessMessage.SealedValue): AccessMessage = copy(sealedValue = __v)
-
-
-
-
+
+
+
+
def toAccess: dotty.tools.dotc.semanticdb.Access = dotty.tools.dotc.semanticdb.Access.AccessTypeMapper.toCustom(this)
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Access])
}
@@ -190,12 +190,12 @@ object AccessMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.do
sealedValue = __sealedValue
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.AccessMessage(
sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.Empty
)
@@ -226,7 +226,7 @@ object AccessMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.do
override def number: _root_.scala.Int = 0
override def value: _root_.scala.Nothing = throw new java.util.NoSuchElementException("Empty.value")
}
-
+
@SerialVersionUID(0L)
final case class PrivateAccess(value: dotty.tools.dotc.semanticdb.PrivateAccess) extends dotty.tools.dotc.semanticdb.AccessMessage.SealedValue derives CanEqual {
type ValueType = dotty.tools.dotc.semanticdb.PrivateAccess
@@ -298,10 +298,10 @@ final case class PrivateAccess(
final override def serializedSize: _root_.scala.Int = 0
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
}
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.PrivateAccess])
}
@@ -319,12 +319,12 @@ object PrivateAccess extends SemanticdbGeneratedMessageCompanion[dotty.tools.do
dotty.tools.dotc.semanticdb.PrivateAccess(
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.PrivateAccess(
)
def of(
@@ -339,10 +339,10 @@ final case class PrivateThisAccess(
final override def serializedSize: _root_.scala.Int = 0
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
}
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.PrivateThisAccess])
}
@@ -360,12 +360,12 @@ object PrivateThisAccess extends SemanticdbGeneratedMessageCompanion[dotty.tool
dotty.tools.dotc.semanticdb.PrivateThisAccess(
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.PrivateThisAccess(
)
def of(
@@ -382,7 +382,7 @@ final case class PrivateWithinAccess(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = symbol
if (!__value.isEmpty) {
@@ -398,7 +398,7 @@ final case class PrivateWithinAccess(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -409,10 +409,10 @@ final case class PrivateWithinAccess(
};
}
def withSymbol(__v: _root_.scala.Predef.String): PrivateWithinAccess = copy(symbol = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.PrivateWithinAccess])
}
@@ -434,12 +434,12 @@ object PrivateWithinAccess extends SemanticdbGeneratedMessageCompanion[dotty.to
symbol = __symbol
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.PrivateWithinAccess(
symbol = ""
)
@@ -458,10 +458,10 @@ final case class ProtectedAccess(
final override def serializedSize: _root_.scala.Int = 0
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
}
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ProtectedAccess])
}
@@ -479,12 +479,12 @@ object ProtectedAccess extends SemanticdbGeneratedMessageCompanion[dotty.tools.
dotty.tools.dotc.semanticdb.ProtectedAccess(
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.ProtectedAccess(
)
def of(
@@ -499,10 +499,10 @@ final case class ProtectedThisAccess(
final override def serializedSize: _root_.scala.Int = 0
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
}
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ProtectedThisAccess])
}
@@ -520,12 +520,12 @@ object ProtectedThisAccess extends SemanticdbGeneratedMessageCompanion[dotty.to
dotty.tools.dotc.semanticdb.ProtectedThisAccess(
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.ProtectedThisAccess(
)
def of(
@@ -542,7 +542,7 @@ final case class ProtectedWithinAccess(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = symbol
if (!__value.isEmpty) {
@@ -558,7 +558,7 @@ final case class ProtectedWithinAccess(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -569,10 +569,10 @@ final case class ProtectedWithinAccess(
};
}
def withSymbol(__v: _root_.scala.Predef.String): ProtectedWithinAccess = copy(symbol = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ProtectedWithinAccess])
}
@@ -594,12 +594,12 @@ object ProtectedWithinAccess extends SemanticdbGeneratedMessageCompanion[dotty.
symbol = __symbol
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.ProtectedWithinAccess(
symbol = ""
)
@@ -618,10 +618,10 @@ final case class PublicAccess(
final override def serializedSize: _root_.scala.Int = 0
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
}
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.PublicAccess])
}
@@ -639,12 +639,12 @@ object PublicAccess extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot
dotty.tools.dotc.semanticdb.PublicAccess(
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.PublicAccess(
)
def of(
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Annotation.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Annotation.scala
index 2cb478d89e2d..a4f076585a50 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Annotation.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Annotation.scala
@@ -5,7 +5,7 @@
// Protofile syntax: PROTO3
package dotty.tools.dotc.semanticdb
-import dotty.tools.dotc.semanticdb.internal._
+import dotty.tools.dotc.semanticdb.internal.*
import scala.annotation.internal.sharable
@SerialVersionUID(0L)
@@ -16,7 +16,7 @@ final case class Annotation(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = dotty.tools.dotc.semanticdb.Annotation._typemapper_tpe.toBase(tpe)
if (__value.serializedSize != 0) {
@@ -32,7 +32,7 @@ final case class Annotation(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -45,10 +45,10 @@ final case class Annotation(
};
}
def withTpe(__v: dotty.tools.dotc.semanticdb.Type): Annotation = copy(tpe = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Annotation])
}
@@ -70,12 +70,12 @@ object Annotation extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.
tpe = dotty.tools.dotc.semanticdb.Annotation._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance))
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.Annotation(
tpe = dotty.tools.dotc.semanticdb.Annotation._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)
)
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Constant.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Constant.scala
index 0ca96d9ae8c6..91bbaa75e654 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Constant.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Constant.scala
@@ -5,7 +5,7 @@
// Protofile syntax: PROTO3
package dotty.tools.dotc.semanticdb
-import dotty.tools.dotc.semanticdb.internal._
+import dotty.tools.dotc.semanticdb.internal.*
import scala.annotation.internal.sharable
sealed trait Constant extends SemanticdbGeneratedSealedOneof derives CanEqual {
@@ -18,10 +18,10 @@ sealed trait Constant extends SemanticdbGeneratedSealedOneof derives CanEqual
object Constant {
case object Empty extends dotty.tools.dotc.semanticdb.Constant
-
+
sealed trait NonEmpty extends dotty.tools.dotc.semanticdb.Constant
def defaultInstance: dotty.tools.dotc.semanticdb.Constant = Empty
-
+
implicit val ConstantTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.ConstantMessage, dotty.tools.dotc.semanticdb.Constant] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.ConstantMessage, dotty.tools.dotc.semanticdb.Constant] {
override def toCustom(__base: dotty.tools.dotc.semanticdb.ConstantMessage): dotty.tools.dotc.semanticdb.Constant = __base.sealedValue match {
case __v: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.UnitConstant => __v.value
@@ -114,7 +114,7 @@ final case class ConstantMessage(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
sealedValue.unitConstant.foreach { __v =>
@@ -208,10 +208,10 @@ final case class ConstantMessage(
def withNullConstant(__v: dotty.tools.dotc.semanticdb.NullConstant): ConstantMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.NullConstant(__v))
def clearSealedValue: ConstantMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.Empty)
def withSealedValue(__v: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue): ConstantMessage = copy(sealedValue = __v)
-
-
-
-
+
+
+
+
def toConstant: dotty.tools.dotc.semanticdb.Constant = dotty.tools.dotc.semanticdb.Constant.ConstantTypeMapper.toCustom(this)
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Constant])
}
@@ -254,12 +254,12 @@ object ConstantMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.
sealedValue = __sealedValue
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.ConstantMessage(
sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.Empty
)
@@ -298,7 +298,7 @@ object ConstantMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.
override def number: _root_.scala.Int = 0
override def value: _root_.scala.Nothing = throw new java.util.NoSuchElementException("Empty.value")
}
-
+
@SerialVersionUID(0L)
final case class UnitConstant(value: dotty.tools.dotc.semanticdb.UnitConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue derives CanEqual {
type ValueType = dotty.tools.dotc.semanticdb.UnitConstant
@@ -402,10 +402,10 @@ final case class UnitConstant(
final override def serializedSize: _root_.scala.Int = 0
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
}
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.UnitConstant])
}
@@ -423,12 +423,12 @@ object UnitConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot
dotty.tools.dotc.semanticdb.UnitConstant(
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.UnitConstant(
)
def of(
@@ -445,7 +445,7 @@ final case class BooleanConstant(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = value
if (__value != false) {
@@ -461,7 +461,7 @@ final case class BooleanConstant(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -472,10 +472,10 @@ final case class BooleanConstant(
};
}
def withValue(__v: _root_.scala.Boolean): BooleanConstant = copy(value = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.BooleanConstant])
}
@@ -497,12 +497,12 @@ object BooleanConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.
value = __value
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.BooleanConstant(
value = false
)
@@ -523,7 +523,7 @@ final case class ByteConstant(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = value
if (__value != 0) {
@@ -539,7 +539,7 @@ final case class ByteConstant(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -550,10 +550,10 @@ final case class ByteConstant(
};
}
def withValue(__v: _root_.scala.Int): ByteConstant = copy(value = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ByteConstant])
}
@@ -575,12 +575,12 @@ object ByteConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot
value = __value
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.ByteConstant(
value = 0
)
@@ -601,7 +601,7 @@ final case class ShortConstant(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = value
if (__value != 0) {
@@ -617,7 +617,7 @@ final case class ShortConstant(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -628,10 +628,10 @@ final case class ShortConstant(
};
}
def withValue(__v: _root_.scala.Int): ShortConstant = copy(value = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ShortConstant])
}
@@ -653,12 +653,12 @@ object ShortConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.do
value = __value
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.ShortConstant(
value = 0
)
@@ -679,7 +679,7 @@ final case class CharConstant(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = value
if (__value != 0) {
@@ -695,7 +695,7 @@ final case class CharConstant(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -706,10 +706,10 @@ final case class CharConstant(
};
}
def withValue(__v: _root_.scala.Int): CharConstant = copy(value = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.CharConstant])
}
@@ -731,12 +731,12 @@ object CharConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot
value = __value
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.CharConstant(
value = 0
)
@@ -757,7 +757,7 @@ final case class IntConstant(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = value
if (__value != 0) {
@@ -773,7 +773,7 @@ final case class IntConstant(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -784,10 +784,10 @@ final case class IntConstant(
};
}
def withValue(__v: _root_.scala.Int): IntConstant = copy(value = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.IntConstant])
}
@@ -809,12 +809,12 @@ object IntConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc
value = __value
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.IntConstant(
value = 0
)
@@ -835,7 +835,7 @@ final case class LongConstant(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = value
if (__value != 0L) {
@@ -851,7 +851,7 @@ final case class LongConstant(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -862,10 +862,10 @@ final case class LongConstant(
};
}
def withValue(__v: _root_.scala.Long): LongConstant = copy(value = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.LongConstant])
}
@@ -887,12 +887,12 @@ object LongConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot
value = __value
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.LongConstant(
value = 0L
)
@@ -913,7 +913,7 @@ final case class FloatConstant(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = value
if (__value != 0.0f) {
@@ -929,7 +929,7 @@ final case class FloatConstant(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -940,10 +940,10 @@ final case class FloatConstant(
};
}
def withValue(__v: _root_.scala.Float): FloatConstant = copy(value = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.FloatConstant])
}
@@ -965,12 +965,12 @@ object FloatConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.do
value = __value
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.FloatConstant(
value = 0.0f
)
@@ -991,7 +991,7 @@ final case class DoubleConstant(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = value
if (__value != 0.0) {
@@ -1007,7 +1007,7 @@ final case class DoubleConstant(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -1018,10 +1018,10 @@ final case class DoubleConstant(
};
}
def withValue(__v: _root_.scala.Double): DoubleConstant = copy(value = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.DoubleConstant])
}
@@ -1043,12 +1043,12 @@ object DoubleConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.d
value = __value
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.DoubleConstant(
value = 0.0
)
@@ -1069,7 +1069,7 @@ final case class StringConstant(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = value
if (!__value.isEmpty) {
@@ -1085,7 +1085,7 @@ final case class StringConstant(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -1096,10 +1096,10 @@ final case class StringConstant(
};
}
def withValue(__v: _root_.scala.Predef.String): StringConstant = copy(value = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.StringConstant])
}
@@ -1121,12 +1121,12 @@ object StringConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.d
value = __value
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.StringConstant(
value = ""
)
@@ -1145,10 +1145,10 @@ final case class NullConstant(
final override def serializedSize: _root_.scala.Int = 0
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
}
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.NullConstant])
}
@@ -1166,12 +1166,12 @@ object NullConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot
dotty.tools.dotc.semanticdb.NullConstant(
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.NullConstant(
)
def of(
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Diagnostic.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Diagnostic.scala
index cc8aa82bf8ea..5917ab82f59f 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Diagnostic.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Diagnostic.scala
@@ -5,7 +5,7 @@
// Protofile syntax: PROTO3
package dotty.tools.dotc.semanticdb
-import dotty.tools.dotc.semanticdb.internal._
+import dotty.tools.dotc.semanticdb.internal.*
import scala.annotation.internal.sharable
@SerialVersionUID(0L)
@@ -22,14 +22,14 @@ final case class Diagnostic(
val __value = range.get
__size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
};
-
+
{
val __value = severity.value
if (__value != 0) {
__size += SemanticdbOutputStream.computeEnumSize(2, __value)
}
};
-
+
{
val __value = message
if (!__value.isEmpty) {
@@ -45,7 +45,7 @@ final case class Diagnostic(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
range.foreach { __v =>
@@ -72,10 +72,10 @@ final case class Diagnostic(
def withRange(__v: dotty.tools.dotc.semanticdb.Range): Diagnostic = copy(range = Option(__v))
def withSeverity(__v: dotty.tools.dotc.semanticdb.Diagnostic.Severity): Diagnostic = copy(severity = __v)
def withMessage(__v: _root_.scala.Predef.String): Diagnostic = copy(message = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Diagnostic])
}
@@ -105,12 +105,12 @@ object Diagnostic extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.
message = __message
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.Diagnostic(
range = _root_.scala.None,
severity = dotty.tools.dotc.semanticdb.Diagnostic.Severity.UNKNOWN_SEVERITY,
@@ -123,49 +123,49 @@ object Diagnostic extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.
def isWarning: _root_.scala.Boolean = false
def isInformation: _root_.scala.Boolean = false
def isHint: _root_.scala.Boolean = false
-
+
final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.Diagnostic.Severity.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Diagnostic.Severity.Recognized])
}
-
+
object Severity {
sealed trait Recognized extends Severity
-
-
+
+
@SerialVersionUID(0L)
case object UNKNOWN_SEVERITY extends Severity(0) with Severity.Recognized {
val index = 0
val name = "UNKNOWN_SEVERITY"
override def isUnknownSeverity: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object ERROR extends Severity(1) with Severity.Recognized {
val index = 1
val name = "ERROR"
override def isError: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object WARNING extends Severity(2) with Severity.Recognized {
val index = 2
val name = "WARNING"
override def isWarning: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object INFORMATION extends Severity(3) with Severity.Recognized {
val index = 3
val name = "INFORMATION"
override def isInformation: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object HINT extends Severity(4) with Severity.Recognized {
val index = 4
val name = "HINT"
override def isHint: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Severity(unrecognizedValue) with SemanticdbUnrecognizedEnum
lazy val values = scala.collection.immutable.Seq(UNKNOWN_SEVERITY, ERROR, WARNING, INFORMATION, HINT)
@@ -177,8 +177,8 @@ object Diagnostic extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.
case 4 => HINT
case __other => Unrecognized(__other)
}
-
-
+
+
}
final val RANGE_FIELD_NUMBER = 1
final val SEVERITY_FIELD_NUMBER = 2
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Documentation.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Documentation.scala
index 07fbda4991af..695dea973016 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Documentation.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Documentation.scala
@@ -5,7 +5,7 @@
// Protofile syntax: PROTO3
package dotty.tools.dotc.semanticdb
-import dotty.tools.dotc.semanticdb.internal._
+import dotty.tools.dotc.semanticdb.internal.*
import scala.annotation.internal.sharable
@SerialVersionUID(0L)
@@ -17,14 +17,14 @@ final case class Documentation(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = message
if (!__value.isEmpty) {
__size += SemanticdbOutputStream.computeStringSize(1, __value)
}
};
-
+
{
val __value = format.value
if (__value != 0) {
@@ -40,7 +40,7 @@ final case class Documentation(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -58,10 +58,10 @@ final case class Documentation(
}
def withMessage(__v: _root_.scala.Predef.String): Documentation = copy(message = __v)
def withFormat(__v: dotty.tools.dotc.semanticdb.Documentation.Format): Documentation = copy(format = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Documentation])
}
@@ -87,12 +87,12 @@ object Documentation extends SemanticdbGeneratedMessageCompanion[dotty.tools.do
format = __format
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.Documentation(
message = "",
format = dotty.tools.dotc.semanticdb.Documentation.Format.HTML
@@ -104,49 +104,49 @@ object Documentation extends SemanticdbGeneratedMessageCompanion[dotty.tools.do
def isJavadoc: _root_.scala.Boolean = false
def isScaladoc: _root_.scala.Boolean = false
def isKdoc: _root_.scala.Boolean = false
-
+
final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.Documentation.Format.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Documentation.Format.Recognized])
}
-
+
object Format {
sealed trait Recognized extends Format
-
-
+
+
@SerialVersionUID(0L)
case object HTML extends Format(0) with Format.Recognized {
val index = 0
val name = "HTML"
override def isHtml: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object MARKDOWN extends Format(1) with Format.Recognized {
val index = 1
val name = "MARKDOWN"
override def isMarkdown: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object JAVADOC extends Format(2) with Format.Recognized {
val index = 2
val name = "JAVADOC"
override def isJavadoc: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object SCALADOC extends Format(3) with Format.Recognized {
val index = 3
val name = "SCALADOC"
override def isScaladoc: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object KDOC extends Format(4) with Format.Recognized {
val index = 4
val name = "KDOC"
override def isKdoc: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Format(unrecognizedValue) with SemanticdbUnrecognizedEnum
lazy val values = scala.collection.immutable.Seq(HTML, MARKDOWN, JAVADOC, SCALADOC, KDOC)
@@ -158,8 +158,8 @@ object Documentation extends SemanticdbGeneratedMessageCompanion[dotty.tools.do
case 4 => KDOC
case __other => Unrecognized(__other)
}
-
-
+
+
}
final val MESSAGE_FIELD_NUMBER = 1
final val FORMAT_FIELD_NUMBER = 2
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Language.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Language.scala
index c57a3d3cddc3..ef47e9020361 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Language.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Language.scala
@@ -5,7 +5,7 @@
// Protofile syntax: PROTO3
package dotty.tools.dotc.semanticdb
-import dotty.tools.dotc.semanticdb.internal._
+import dotty.tools.dotc.semanticdb.internal.*
import scala.annotation.internal.sharable
sealed abstract class Language(val value: _root_.scala.Int) extends SemanticdbGeneratedEnum derives CanEqual {
@@ -13,35 +13,35 @@ sealed abstract class Language(val value: _root_.scala.Int) extends SemanticdbG
def isUnknownLanguage: _root_.scala.Boolean = false
def isScala: _root_.scala.Boolean = false
def isJava: _root_.scala.Boolean = false
-
+
final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.Language.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Language.Recognized])
}
object Language {
sealed trait Recognized extends Language
-
-
+
+
@SerialVersionUID(0L)
case object UNKNOWN_LANGUAGE extends Language(0) with Language.Recognized {
val index = 0
val name = "UNKNOWN_LANGUAGE"
override def isUnknownLanguage: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object SCALA extends Language(1) with Language.Recognized {
val index = 1
val name = "SCALA"
override def isScala: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object JAVA extends Language(2) with Language.Recognized {
val index = 2
val name = "JAVA"
override def isJava: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Language(unrecognizedValue) with SemanticdbUnrecognizedEnum
lazy val values = scala.collection.immutable.Seq(UNKNOWN_LANGUAGE, SCALA, JAVA)
@@ -51,6 +51,6 @@ object Language {
case 2 => JAVA
case __other => Unrecognized(__other)
}
-
-
+
+
}
\ No newline at end of file
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Location.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Location.scala
index a3667e944ae4..756b7711d304 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Location.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Location.scala
@@ -5,7 +5,7 @@
// Protofile syntax: PROTO3
package dotty.tools.dotc.semanticdb
-import dotty.tools.dotc.semanticdb.internal._
+import dotty.tools.dotc.semanticdb.internal.*
import scala.annotation.internal.sharable
@SerialVersionUID(0L)
@@ -17,7 +17,7 @@ final case class Location(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = uri
if (!__value.isEmpty) {
@@ -37,7 +37,7 @@ final case class Location(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -57,10 +57,10 @@ final case class Location(
def getRange: dotty.tools.dotc.semanticdb.Range = range.getOrElse(dotty.tools.dotc.semanticdb.Range.defaultInstance)
def clearRange: Location = copy(range = _root_.scala.None)
def withRange(__v: dotty.tools.dotc.semanticdb.Range): Location = copy(range = Option(__v))
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Location])
}
@@ -86,12 +86,12 @@ object Location extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.se
range = __range
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.Location(
uri = "",
range = _root_.scala.None
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Range.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Range.scala
index d273664bdf6a..0f7436524ee1 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Range.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Range.scala
@@ -5,7 +5,7 @@
// Protofile syntax: PROTO3
package dotty.tools.dotc.semanticdb
-import dotty.tools.dotc.semanticdb.internal._
+import dotty.tools.dotc.semanticdb.internal.*
import scala.annotation.internal.sharable
@SerialVersionUID(0L)
@@ -19,28 +19,28 @@ final case class Range(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = startLine
if (__value != 0) {
__size += SemanticdbOutputStream.computeInt32Size(1, __value)
}
};
-
+
{
val __value = startCharacter
if (__value != 0) {
__size += SemanticdbOutputStream.computeInt32Size(2, __value)
}
};
-
+
{
val __value = endLine
if (__value != 0) {
__size += SemanticdbOutputStream.computeInt32Size(3, __value)
}
};
-
+
{
val __value = endCharacter
if (__value != 0) {
@@ -56,7 +56,7 @@ final case class Range(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -88,10 +88,10 @@ final case class Range(
def withStartCharacter(__v: _root_.scala.Int): Range = copy(startCharacter = __v)
def withEndLine(__v: _root_.scala.Int): Range = copy(endLine = __v)
def withEndCharacter(__v: _root_.scala.Int): Range = copy(endCharacter = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Range])
}
@@ -125,12 +125,12 @@ object Range extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.seman
endCharacter = __endCharacter
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.Range(
startLine = 0,
startCharacter = 0,
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Schema.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Schema.scala
index 841e69166feb..c9239c85f409 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Schema.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Schema.scala
@@ -5,7 +5,7 @@
// Protofile syntax: PROTO3
package dotty.tools.dotc.semanticdb
-import dotty.tools.dotc.semanticdb.internal._
+import dotty.tools.dotc.semanticdb.internal.*
import scala.annotation.internal.sharable
sealed abstract class Schema(val value: _root_.scala.Int) extends SemanticdbGeneratedEnum derives CanEqual {
@@ -13,35 +13,35 @@ sealed abstract class Schema(val value: _root_.scala.Int) extends SemanticdbGen
def isLegacy: _root_.scala.Boolean = false
def isSemanticdb3: _root_.scala.Boolean = false
def isSemanticdb4: _root_.scala.Boolean = false
-
+
final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.Schema.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Schema.Recognized])
}
object Schema {
sealed trait Recognized extends Schema
-
-
+
+
@SerialVersionUID(0L)
case object LEGACY extends Schema(0) with Schema.Recognized {
val index = 0
val name = "LEGACY"
override def isLegacy: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object SEMANTICDB3 extends Schema(3) with Schema.Recognized {
val index = 1
val name = "SEMANTICDB3"
override def isSemanticdb3: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object SEMANTICDB4 extends Schema(4) with Schema.Recognized {
val index = 2
val name = "SEMANTICDB4"
override def isSemanticdb4: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Schema(unrecognizedValue) with SemanticdbUnrecognizedEnum
lazy val values = scala.collection.immutable.Seq(LEGACY, SEMANTICDB3, SEMANTICDB4)
@@ -51,6 +51,6 @@ object Schema {
case 4 => SEMANTICDB4
case __other => Unrecognized(__other)
}
-
-
+
+
}
\ No newline at end of file
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Scope.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Scope.scala
index 655ebe75185e..7a2ee40478c4 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Scope.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Scope.scala
@@ -5,7 +5,7 @@
// Protofile syntax: PROTO3
package dotty.tools.dotc.semanticdb
-import dotty.tools.dotc.semanticdb.internal._
+import dotty.tools.dotc.semanticdb.internal.*
import scala.annotation.internal.sharable
@SerialVersionUID(0L)
@@ -34,7 +34,7 @@ final case class Scope(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
symlinks.foreach { __v =>
@@ -56,10 +56,10 @@ final case class Scope(
def addHardlinks(__vs: dotty.tools.dotc.semanticdb.SymbolInformation *): Scope = addAllHardlinks(__vs)
def addAllHardlinks(__vs: Iterable[dotty.tools.dotc.semanticdb.SymbolInformation]): Scope = copy(hardlinks = hardlinks ++ __vs)
def withHardlinks(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.SymbolInformation]): Scope = copy(hardlinks = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Scope])
}
@@ -85,12 +85,12 @@ object Scope extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.seman
hardlinks = __hardlinks.result()
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.Scope(
symlinks = _root_.scala.Seq.empty,
hardlinks = _root_.scala.Seq.empty
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Signature.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Signature.scala
index 228e2f02349b..7a0331be0ed3 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Signature.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Signature.scala
@@ -5,7 +5,7 @@
// Protofile syntax: PROTO3
package dotty.tools.dotc.semanticdb
-import dotty.tools.dotc.semanticdb.internal._
+import dotty.tools.dotc.semanticdb.internal.*
import scala.annotation.internal.sharable
sealed trait Signature extends SemanticdbGeneratedSealedOneof derives CanEqual {
@@ -18,10 +18,10 @@ sealed trait Signature extends SemanticdbGeneratedSealedOneof derives CanEqual
object Signature {
case object Empty extends dotty.tools.dotc.semanticdb.Signature
-
+
sealed trait NonEmpty extends dotty.tools.dotc.semanticdb.Signature
def defaultInstance: dotty.tools.dotc.semanticdb.Signature = Empty
-
+
implicit val SignatureTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.SignatureMessage, dotty.tools.dotc.semanticdb.Signature] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.SignatureMessage, dotty.tools.dotc.semanticdb.Signature] {
override def toCustom(__base: dotty.tools.dotc.semanticdb.SignatureMessage): dotty.tools.dotc.semanticdb.Signature = __base.sealedValue match {
case __v: dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.ClassSignature => __v.value
@@ -72,7 +72,7 @@ final case class SignatureMessage(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
sealedValue.classSignature.foreach { __v =>
@@ -110,10 +110,10 @@ final case class SignatureMessage(
def withValueSignature(__v: dotty.tools.dotc.semanticdb.ValueSignature): SignatureMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.ValueSignature(__v))
def clearSealedValue: SignatureMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.Empty)
def withSealedValue(__v: dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue): SignatureMessage = copy(sealedValue = __v)
-
-
-
-
+
+
+
+
def toSignature: dotty.tools.dotc.semanticdb.Signature = dotty.tools.dotc.semanticdb.Signature.SignatureTypeMapper.toCustom(this)
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Signature])
}
@@ -142,12 +142,12 @@ object SignatureMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools
sealedValue = __sealedValue
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.SignatureMessage(
sealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.Empty
)
@@ -172,7 +172,7 @@ object SignatureMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools
override def number: _root_.scala.Int = 0
override def value: _root_.scala.Nothing = throw new java.util.NoSuchElementException("Empty.value")
}
-
+
@SerialVersionUID(0L)
final case class ClassSignature(value: dotty.tools.dotc.semanticdb.ClassSignature) extends dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue derives CanEqual {
type ValueType = dotty.tools.dotc.semanticdb.ClassSignature
@@ -233,7 +233,7 @@ final case class ClassSignature(
val __value = dotty.tools.dotc.semanticdb.ClassSignature._typemapper_parents.toBase(__item)
__size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
}
-
+
{
val __value = dotty.tools.dotc.semanticdb.ClassSignature._typemapper_self.toBase(self)
if (__value.serializedSize != 0) {
@@ -253,7 +253,7 @@ final case class ClassSignature(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
typeParameters.foreach { __v =>
@@ -294,10 +294,10 @@ final case class ClassSignature(
def getDeclarations: dotty.tools.dotc.semanticdb.Scope = declarations.getOrElse(dotty.tools.dotc.semanticdb.Scope.defaultInstance)
def clearDeclarations: ClassSignature = copy(declarations = _root_.scala.None)
def withDeclarations(__v: dotty.tools.dotc.semanticdb.Scope): ClassSignature = copy(declarations = Option(__v))
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ClassSignature])
}
@@ -331,12 +331,12 @@ object ClassSignature extends SemanticdbGeneratedMessageCompanion[dotty.tools.d
declarations = __declarations
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.ClassSignature(
typeParameters = _root_.scala.None,
parents = _root_.scala.Seq.empty,
@@ -383,7 +383,7 @@ final case class MethodSignature(
val __value = __item
__size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
}
-
+
{
val __value = dotty.tools.dotc.semanticdb.MethodSignature._typemapper_returnType.toBase(returnType)
if (__value.serializedSize != 0) {
@@ -399,7 +399,7 @@ final case class MethodSignature(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
typeParameters.foreach { __v =>
@@ -431,10 +431,10 @@ final case class MethodSignature(
def addAllParameterLists(__vs: Iterable[dotty.tools.dotc.semanticdb.Scope]): MethodSignature = copy(parameterLists = parameterLists ++ __vs)
def withParameterLists(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Scope]): MethodSignature = copy(parameterLists = __v)
def withReturnType(__v: dotty.tools.dotc.semanticdb.Type): MethodSignature = copy(returnType = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.MethodSignature])
}
@@ -464,12 +464,12 @@ object MethodSignature extends SemanticdbGeneratedMessageCompanion[dotty.tools.
returnType = dotty.tools.dotc.semanticdb.MethodSignature._typemapper_returnType.toCustom(__returnType.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance))
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.MethodSignature(
typeParameters = _root_.scala.None,
parameterLists = _root_.scala.Seq.empty,
@@ -506,14 +506,14 @@ final case class TypeSignature(
val __value = typeParameters.get
__size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
};
-
+
{
val __value = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_lowerBound.toBase(lowerBound)
if (__value.serializedSize != 0) {
__size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
}
};
-
+
{
val __value = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_upperBound.toBase(upperBound)
if (__value.serializedSize != 0) {
@@ -529,7 +529,7 @@ final case class TypeSignature(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
typeParameters.foreach { __v =>
@@ -560,10 +560,10 @@ final case class TypeSignature(
def withTypeParameters(__v: dotty.tools.dotc.semanticdb.Scope): TypeSignature = copy(typeParameters = Option(__v))
def withLowerBound(__v: dotty.tools.dotc.semanticdb.Type): TypeSignature = copy(lowerBound = __v)
def withUpperBound(__v: dotty.tools.dotc.semanticdb.Type): TypeSignature = copy(upperBound = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.TypeSignature])
}
@@ -593,12 +593,12 @@ object TypeSignature extends SemanticdbGeneratedMessageCompanion[dotty.tools.do
upperBound = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_upperBound.toCustom(__upperBound.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance))
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.TypeSignature(
typeParameters = _root_.scala.None,
lowerBound = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_lowerBound.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance),
@@ -631,7 +631,7 @@ final case class ValueSignature(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = dotty.tools.dotc.semanticdb.ValueSignature._typemapper_tpe.toBase(tpe)
if (__value.serializedSize != 0) {
@@ -647,7 +647,7 @@ final case class ValueSignature(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -660,10 +660,10 @@ final case class ValueSignature(
};
}
def withTpe(__v: dotty.tools.dotc.semanticdb.Type): ValueSignature = copy(tpe = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ValueSignature])
}
@@ -685,12 +685,12 @@ object ValueSignature extends SemanticdbGeneratedMessageCompanion[dotty.tools.d
tpe = dotty.tools.dotc.semanticdb.ValueSignature._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance))
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.ValueSignature(
tpe = dotty.tools.dotc.semanticdb.ValueSignature._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)
)
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolInformation.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolInformation.scala
index 93fbb207c4f6..92917cb23a41 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolInformation.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolInformation.scala
@@ -5,7 +5,7 @@
// Protofile syntax: PROTO3
package dotty.tools.dotc.semanticdb
-import dotty.tools.dotc.semanticdb.internal._
+import dotty.tools.dotc.semanticdb.internal.*
import scala.annotation.internal.sharable
@SerialVersionUID(0L)
@@ -25,42 +25,42 @@ final case class SymbolInformation(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = symbol
if (!__value.isEmpty) {
__size += SemanticdbOutputStream.computeStringSize(1, __value)
}
};
-
+
{
val __value = language.value
if (__value != 0) {
__size += SemanticdbOutputStream.computeEnumSize(16, __value)
}
};
-
+
{
val __value = kind.value
if (__value != 0) {
__size += SemanticdbOutputStream.computeEnumSize(3, __value)
}
};
-
+
{
val __value = properties
if (__value != 0) {
__size += SemanticdbOutputStream.computeInt32Size(4, __value)
}
};
-
+
{
val __value = displayName
if (!__value.isEmpty) {
__size += SemanticdbOutputStream.computeStringSize(5, __value)
}
};
-
+
{
val __value = dotty.tools.dotc.semanticdb.SymbolInformation._typemapper_signature.toBase(signature)
if (__value.serializedSize != 0) {
@@ -71,7 +71,7 @@ final case class SymbolInformation(
val __value = __item
__size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
}
-
+
{
val __value = dotty.tools.dotc.semanticdb.SymbolInformation._typemapper_access.toBase(access)
if (__value.serializedSize != 0) {
@@ -95,7 +95,7 @@ final case class SymbolInformation(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -179,10 +179,10 @@ final case class SymbolInformation(
def getDocumentation: dotty.tools.dotc.semanticdb.Documentation = documentation.getOrElse(dotty.tools.dotc.semanticdb.Documentation.defaultInstance)
def clearDocumentation: SymbolInformation = copy(documentation = _root_.scala.None)
def withDocumentation(__v: dotty.tools.dotc.semanticdb.Documentation): SymbolInformation = copy(documentation = Option(__v))
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.SymbolInformation])
}
@@ -240,12 +240,12 @@ object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tool
documentation = __documentation
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.SymbolInformation(
symbol = "",
language = dotty.tools.dotc.semanticdb.Language.UNKNOWN_LANGUAGE,
@@ -276,126 +276,126 @@ object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tool
def isClass: _root_.scala.Boolean = false
def isTrait: _root_.scala.Boolean = false
def isInterface: _root_.scala.Boolean = false
-
+
final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.SymbolInformation.Kind.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.SymbolInformation.Kind.Recognized])
}
-
+
object Kind {
sealed trait Recognized extends Kind
-
-
+
+
@SerialVersionUID(0L)
case object UNKNOWN_KIND extends Kind(0) with Kind.Recognized {
val index = 0
val name = "UNKNOWN_KIND"
override def isUnknownKind: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object LOCAL extends Kind(19) with Kind.Recognized {
val index = 1
val name = "LOCAL"
override def isLocal: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object FIELD extends Kind(20) with Kind.Recognized {
val index = 2
val name = "FIELD"
override def isField: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object METHOD extends Kind(3) with Kind.Recognized {
val index = 3
val name = "METHOD"
override def isMethod: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object CONSTRUCTOR extends Kind(21) with Kind.Recognized {
val index = 4
val name = "CONSTRUCTOR"
override def isConstructor: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object MACRO extends Kind(6) with Kind.Recognized {
val index = 5
val name = "MACRO"
override def isMacro: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object TYPE extends Kind(7) with Kind.Recognized {
val index = 6
val name = "TYPE"
override def isType: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object PARAMETER extends Kind(8) with Kind.Recognized {
val index = 7
val name = "PARAMETER"
override def isParameter: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object SELF_PARAMETER extends Kind(17) with Kind.Recognized {
val index = 8
val name = "SELF_PARAMETER"
override def isSelfParameter: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object TYPE_PARAMETER extends Kind(9) with Kind.Recognized {
val index = 9
val name = "TYPE_PARAMETER"
override def isTypeParameter: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object OBJECT extends Kind(10) with Kind.Recognized {
val index = 10
val name = "OBJECT"
override def isObject: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object PACKAGE extends Kind(11) with Kind.Recognized {
val index = 11
val name = "PACKAGE"
override def isPackage: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object PACKAGE_OBJECT extends Kind(12) with Kind.Recognized {
val index = 12
val name = "PACKAGE_OBJECT"
override def isPackageObject: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object CLASS extends Kind(13) with Kind.Recognized {
val index = 13
val name = "CLASS"
override def isClass: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object TRAIT extends Kind(14) with Kind.Recognized {
val index = 14
val name = "TRAIT"
override def isTrait: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object INTERFACE extends Kind(18) with Kind.Recognized {
val index = 15
val name = "INTERFACE"
override def isInterface: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Kind(unrecognizedValue) with SemanticdbUnrecognizedEnum
lazy val values = scala.collection.immutable.Seq(UNKNOWN_KIND, LOCAL, FIELD, METHOD, CONSTRUCTOR, MACRO, TYPE, PARAMETER, SELF_PARAMETER, TYPE_PARAMETER, OBJECT, PACKAGE, PACKAGE_OBJECT, CLASS, TRAIT, INTERFACE)
@@ -418,8 +418,8 @@ object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tool
case 21 => CONSTRUCTOR
case __other => Unrecognized(__other)
}
-
-
+
+
}
sealed abstract class Property(val value: _root_.scala.Int) extends SemanticdbGeneratedEnum derives CanEqual {
type EnumType = Property
@@ -444,161 +444,161 @@ object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tool
def isTransparent: _root_.scala.Boolean = false
def isInfix: _root_.scala.Boolean = false
def isOpaque: _root_.scala.Boolean = false
-
+
final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.SymbolInformation.Property.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.SymbolInformation.Property.Recognized])
}
-
+
object Property {
sealed trait Recognized extends Property
-
-
+
+
@SerialVersionUID(0L)
case object UNKNOWN_PROPERTY extends Property(0) with Property.Recognized {
val index = 0
val name = "UNKNOWN_PROPERTY"
override def isUnknownProperty: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object ABSTRACT extends Property(4) with Property.Recognized {
val index = 1
val name = "ABSTRACT"
override def isAbstract: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object FINAL extends Property(8) with Property.Recognized {
val index = 2
val name = "FINAL"
override def isFinal: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object SEALED extends Property(16) with Property.Recognized {
val index = 3
val name = "SEALED"
override def isSealed: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object IMPLICIT extends Property(32) with Property.Recognized {
val index = 4
val name = "IMPLICIT"
override def isImplicit: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object LAZY extends Property(64) with Property.Recognized {
val index = 5
val name = "LAZY"
override def isLazy: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object CASE extends Property(128) with Property.Recognized {
val index = 6
val name = "CASE"
override def isCase: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object COVARIANT extends Property(256) with Property.Recognized {
val index = 7
val name = "COVARIANT"
override def isCovariant: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object CONTRAVARIANT extends Property(512) with Property.Recognized {
val index = 8
val name = "CONTRAVARIANT"
override def isContravariant: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object VAL extends Property(1024) with Property.Recognized {
val index = 9
val name = "VAL"
override def isVal: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object VAR extends Property(2048) with Property.Recognized {
val index = 10
val name = "VAR"
override def isVar: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object STATIC extends Property(4096) with Property.Recognized {
val index = 11
val name = "STATIC"
override def isStatic: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object PRIMARY extends Property(8192) with Property.Recognized {
val index = 12
val name = "PRIMARY"
override def isPrimary: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object ENUM extends Property(16384) with Property.Recognized {
val index = 13
val name = "ENUM"
override def isEnum: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object DEFAULT extends Property(32768) with Property.Recognized {
val index = 14
val name = "DEFAULT"
override def isDefault: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object GIVEN extends Property(65536) with Property.Recognized {
val index = 15
val name = "GIVEN"
override def isGiven: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object INLINE extends Property(131072) with Property.Recognized {
val index = 16
val name = "INLINE"
override def isInline: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object OPEN extends Property(262144) with Property.Recognized {
val index = 17
val name = "OPEN"
override def isOpen: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object TRANSPARENT extends Property(524288) with Property.Recognized {
val index = 18
val name = "TRANSPARENT"
override def isTransparent: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object INFIX extends Property(1048576) with Property.Recognized {
val index = 19
val name = "INFIX"
override def isInfix: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object OPAQUE extends Property(2097152) with Property.Recognized {
val index = 20
val name = "OPAQUE"
override def isOpaque: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Property(unrecognizedValue) with SemanticdbUnrecognizedEnum
lazy val values = scala.collection.immutable.Seq(UNKNOWN_PROPERTY, ABSTRACT, FINAL, SEALED, IMPLICIT, LAZY, CASE, COVARIANT, CONTRAVARIANT, VAL, VAR, STATIC, PRIMARY, ENUM, DEFAULT, GIVEN, INLINE, OPEN, TRANSPARENT, INFIX, OPAQUE)
@@ -626,8 +626,8 @@ object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tool
case 2097152 => OPAQUE
case __other => Unrecognized(__other)
}
-
-
+
+
}
final val SYMBOL_FIELD_NUMBER = 1
final val LANGUAGE_FIELD_NUMBER = 16
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolOccurrence.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolOccurrence.scala
index 5d7670dfdd32..39a5228ed02d 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolOccurrence.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolOccurrence.scala
@@ -5,7 +5,7 @@
// Protofile syntax: PROTO3
package dotty.tools.dotc.semanticdb
-import dotty.tools.dotc.semanticdb.internal._
+import dotty.tools.dotc.semanticdb.internal.*
import scala.annotation.internal.sharable
@SerialVersionUID(0L)
@@ -22,14 +22,14 @@ final case class SymbolOccurrence(
val __value = range.get
__size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
};
-
+
{
val __value = symbol
if (!__value.isEmpty) {
__size += SemanticdbOutputStream.computeStringSize(2, __value)
}
};
-
+
{
val __value = role.value
if (__value != 0) {
@@ -45,7 +45,7 @@ final case class SymbolOccurrence(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
range.foreach { __v =>
@@ -72,10 +72,10 @@ final case class SymbolOccurrence(
def withRange(__v: dotty.tools.dotc.semanticdb.Range): SymbolOccurrence = copy(range = Option(__v))
def withSymbol(__v: _root_.scala.Predef.String): SymbolOccurrence = copy(symbol = __v)
def withRole(__v: dotty.tools.dotc.semanticdb.SymbolOccurrence.Role): SymbolOccurrence = copy(role = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.SymbolOccurrence])
}
@@ -105,12 +105,12 @@ object SymbolOccurrence extends SemanticdbGeneratedMessageCompanion[dotty.tools
role = __role
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.SymbolOccurrence(
range = _root_.scala.None,
symbol = "",
@@ -121,35 +121,35 @@ object SymbolOccurrence extends SemanticdbGeneratedMessageCompanion[dotty.tools
def isUnknownRole: _root_.scala.Boolean = false
def isReference: _root_.scala.Boolean = false
def isDefinition: _root_.scala.Boolean = false
-
+
final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.SymbolOccurrence.Role.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.SymbolOccurrence.Role.Recognized])
}
-
+
object Role {
sealed trait Recognized extends Role
-
-
+
+
@SerialVersionUID(0L)
case object UNKNOWN_ROLE extends Role(0) with Role.Recognized {
val index = 0
val name = "UNKNOWN_ROLE"
override def isUnknownRole: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object REFERENCE extends Role(1) with Role.Recognized {
val index = 1
val name = "REFERENCE"
override def isReference: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
case object DEFINITION extends Role(2) with Role.Recognized {
val index = 2
val name = "DEFINITION"
override def isDefinition: _root_.scala.Boolean = true
}
-
+
@SerialVersionUID(0L)
final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Role(unrecognizedValue) with SemanticdbUnrecognizedEnum
lazy val values = scala.collection.immutable.Seq(UNKNOWN_ROLE, REFERENCE, DEFINITION)
@@ -159,8 +159,8 @@ object SymbolOccurrence extends SemanticdbGeneratedMessageCompanion[dotty.tools
case 2 => DEFINITION
case __other => Unrecognized(__other)
}
-
-
+
+
}
final val RANGE_FIELD_NUMBER = 1
final val SYMBOL_FIELD_NUMBER = 2
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Synthetic.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Synthetic.scala
index 3c6fcfbf4c6a..7916fdb2e07a 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Synthetic.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Synthetic.scala
@@ -5,7 +5,7 @@
// Protofile syntax: PROTO3
package dotty.tools.dotc.semanticdb
-import dotty.tools.dotc.semanticdb.internal._
+import dotty.tools.dotc.semanticdb.internal.*
import scala.annotation.internal.sharable
@SerialVersionUID(0L)
@@ -21,7 +21,7 @@ final case class Synthetic(
val __value = range.get
__size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
};
-
+
{
val __value = dotty.tools.dotc.semanticdb.Synthetic._typemapper_tree.toBase(tree)
if (__value.serializedSize != 0) {
@@ -37,7 +37,7 @@ final case class Synthetic(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
range.foreach { __v =>
@@ -59,10 +59,10 @@ final case class Synthetic(
def clearRange: Synthetic = copy(range = _root_.scala.None)
def withRange(__v: dotty.tools.dotc.semanticdb.Range): Synthetic = copy(range = Option(__v))
def withTree(__v: dotty.tools.dotc.semanticdb.Tree): Synthetic = copy(tree = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Synthetic])
}
@@ -88,12 +88,12 @@ object Synthetic extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s
tree = dotty.tools.dotc.semanticdb.Synthetic._typemapper_tree.toCustom(__tree.getOrElse(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance))
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.Synthetic(
range = _root_.scala.None,
tree = dotty.tools.dotc.semanticdb.Synthetic._typemapper_tree.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance)
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocument.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocument.scala
index f0347e86d9e3..80322ec45e0e 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocument.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocument.scala
@@ -5,7 +5,7 @@
// Protofile syntax: PROTO3
package dotty.tools.dotc.semanticdb
-import dotty.tools.dotc.semanticdb.internal._
+import dotty.tools.dotc.semanticdb.internal.*
import scala.annotation.internal.sharable
@SerialVersionUID(0L)
@@ -24,35 +24,35 @@ final case class TextDocument(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = schema.value
if (__value != 0) {
__size += SemanticdbOutputStream.computeEnumSize(1, __value)
}
};
-
+
{
val __value = uri
if (!__value.isEmpty) {
__size += SemanticdbOutputStream.computeStringSize(2, __value)
}
};
-
+
{
val __value = text
if (!__value.isEmpty) {
__size += SemanticdbOutputStream.computeStringSize(3, __value)
}
};
-
+
{
val __value = md5
if (!__value.isEmpty) {
__size += SemanticdbOutputStream.computeStringSize(11, __value)
}
};
-
+
{
val __value = language.value
if (__value != 0) {
@@ -84,7 +84,7 @@ final case class TextDocument(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -163,10 +163,10 @@ final case class TextDocument(
def addSynthetics(__vs: dotty.tools.dotc.semanticdb.Synthetic *): TextDocument = addAllSynthetics(__vs)
def addAllSynthetics(__vs: Iterable[dotty.tools.dotc.semanticdb.Synthetic]): TextDocument = copy(synthetics = synthetics ++ __vs)
def withSynthetics(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Synthetic]): TextDocument = copy(synthetics = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.TextDocument])
}
@@ -220,12 +220,12 @@ object TextDocument extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot
synthetics = __synthetics.result()
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.TextDocument(
schema = dotty.tools.dotc.semanticdb.Schema.LEGACY,
uri = "",
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocuments.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocuments.scala
index 41b8e1b3f491..a35bc23bf665 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocuments.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocuments.scala
@@ -5,7 +5,7 @@
// Protofile syntax: PROTO3
package dotty.tools.dotc.semanticdb
-import dotty.tools.dotc.semanticdb.internal._
+import dotty.tools.dotc.semanticdb.internal.*
import scala.annotation.internal.sharable
@SerialVersionUID(0L)
@@ -29,7 +29,7 @@ final case class TextDocuments(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
documents.foreach { __v =>
@@ -43,10 +43,10 @@ final case class TextDocuments(
def addDocuments(__vs: dotty.tools.dotc.semanticdb.TextDocument *): TextDocuments = addAllDocuments(__vs)
def addAllDocuments(__vs: Iterable[dotty.tools.dotc.semanticdb.TextDocument]): TextDocuments = copy(documents = documents ++ __vs)
def withDocuments(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.TextDocument]): TextDocuments = copy(documents = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.TextDocuments])
}
@@ -68,12 +68,12 @@ object TextDocuments extends SemanticdbGeneratedMessageCompanion[dotty.tools.do
documents = __documents.result()
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.TextDocuments(
documents = _root_.scala.Seq.empty
)
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Tree.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Tree.scala
index ed84d9b2f2d0..6a19494cd65a 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Tree.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Tree.scala
@@ -5,7 +5,7 @@
// Protofile syntax: PROTO3
package dotty.tools.dotc.semanticdb
-import dotty.tools.dotc.semanticdb.internal._
+import dotty.tools.dotc.semanticdb.internal.*
import scala.annotation.internal.sharable
sealed trait Tree extends SemanticdbGeneratedSealedOneof derives CanEqual {
@@ -18,10 +18,10 @@ sealed trait Tree extends SemanticdbGeneratedSealedOneof derives CanEqual {
object Tree {
case object Empty extends dotty.tools.dotc.semanticdb.Tree
-
+
sealed trait NonEmpty extends dotty.tools.dotc.semanticdb.Tree
def defaultInstance: dotty.tools.dotc.semanticdb.Tree = Empty
-
+
implicit val TreeTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TreeMessage, dotty.tools.dotc.semanticdb.Tree] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TreeMessage, dotty.tools.dotc.semanticdb.Tree] {
override def toCustom(__base: dotty.tools.dotc.semanticdb.TreeMessage): dotty.tools.dotc.semanticdb.Tree = __base.sealedValue match {
case __v: dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.ApplyTree => __v.value
@@ -96,7 +96,7 @@ final case class TreeMessage(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
sealedValue.applyTree.foreach { __v =>
@@ -166,10 +166,10 @@ final case class TreeMessage(
def withTypeApplyTree(__v: dotty.tools.dotc.semanticdb.TypeApplyTree): TreeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.TypeApplyTree(__v))
def clearSealedValue: TreeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.Empty)
def withSealedValue(__v: dotty.tools.dotc.semanticdb.TreeMessage.SealedValue): TreeMessage = copy(sealedValue = __v)
-
-
-
-
+
+
+
+
def toTree: dotty.tools.dotc.semanticdb.Tree = dotty.tools.dotc.semanticdb.Tree.TreeTypeMapper.toCustom(this)
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Tree])
}
@@ -206,12 +206,12 @@ object TreeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc
sealedValue = __sealedValue
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.TreeMessage(
sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.Empty
)
@@ -244,7 +244,7 @@ object TreeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc
override def number: _root_.scala.Int = 0
override def value: _root_.scala.Nothing = throw new java.util.NoSuchElementException("Empty.value")
}
-
+
@SerialVersionUID(0L)
final case class ApplyTree(value: dotty.tools.dotc.semanticdb.ApplyTree) extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue derives CanEqual {
type ValueType = dotty.tools.dotc.semanticdb.ApplyTree
@@ -327,7 +327,7 @@ final case class ApplyTree(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = dotty.tools.dotc.semanticdb.ApplyTree._typemapper_function.toBase(function)
if (__value.serializedSize != 0) {
@@ -347,7 +347,7 @@ final case class ApplyTree(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -370,10 +370,10 @@ final case class ApplyTree(
def addArguments(__vs: dotty.tools.dotc.semanticdb.Tree *): ApplyTree = addAllArguments(__vs)
def addAllArguments(__vs: Iterable[dotty.tools.dotc.semanticdb.Tree]): ApplyTree = copy(arguments = arguments ++ __vs)
def withArguments(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Tree]): ApplyTree = copy(arguments = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ApplyTree])
}
@@ -399,12 +399,12 @@ object ApplyTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s
arguments = __arguments.result()
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.ApplyTree(
function = dotty.tools.dotc.semanticdb.ApplyTree._typemapper_function.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance),
arguments = _root_.scala.Seq.empty
@@ -438,7 +438,7 @@ final case class FunctionTree(
val __value = __item
__size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
}
-
+
{
val __value = dotty.tools.dotc.semanticdb.FunctionTree._typemapper_body.toBase(body)
if (__value.serializedSize != 0) {
@@ -454,7 +454,7 @@ final case class FunctionTree(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
parameters.foreach { __v =>
@@ -477,10 +477,10 @@ final case class FunctionTree(
def addAllParameters(__vs: Iterable[dotty.tools.dotc.semanticdb.IdTree]): FunctionTree = copy(parameters = parameters ++ __vs)
def withParameters(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.IdTree]): FunctionTree = copy(parameters = __v)
def withBody(__v: dotty.tools.dotc.semanticdb.Tree): FunctionTree = copy(body = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.FunctionTree])
}
@@ -506,12 +506,12 @@ object FunctionTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot
body = dotty.tools.dotc.semanticdb.FunctionTree._typemapper_body.toCustom(__body.getOrElse(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance))
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.FunctionTree(
parameters = _root_.scala.Seq.empty,
body = dotty.tools.dotc.semanticdb.FunctionTree._typemapper_body.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance)
@@ -538,7 +538,7 @@ final case class IdTree(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = symbol
if (!__value.isEmpty) {
@@ -554,7 +554,7 @@ final case class IdTree(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -565,10 +565,10 @@ final case class IdTree(
};
}
def withSymbol(__v: _root_.scala.Predef.String): IdTree = copy(symbol = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.IdTree])
}
@@ -590,12 +590,12 @@ object IdTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.sema
symbol = __symbol
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.IdTree(
symbol = ""
)
@@ -616,7 +616,7 @@ final case class LiteralTree(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = dotty.tools.dotc.semanticdb.LiteralTree._typemapper_constant.toBase(constant)
if (__value.serializedSize != 0) {
@@ -632,7 +632,7 @@ final case class LiteralTree(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -645,10 +645,10 @@ final case class LiteralTree(
};
}
def withConstant(__v: dotty.tools.dotc.semanticdb.Constant): LiteralTree = copy(constant = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.LiteralTree])
}
@@ -670,12 +670,12 @@ object LiteralTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc
constant = dotty.tools.dotc.semanticdb.LiteralTree._typemapper_constant.toCustom(__constant.getOrElse(dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance))
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.LiteralTree(
constant = dotty.tools.dotc.semanticdb.LiteralTree._typemapper_constant.toCustom(dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance)
)
@@ -699,14 +699,14 @@ final case class MacroExpansionTree(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_beforeExpansion.toBase(beforeExpansion)
if (__value.serializedSize != 0) {
__size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
}
};
-
+
{
val __value = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_tpe.toBase(tpe)
if (__value.serializedSize != 0) {
@@ -722,7 +722,7 @@ final case class MacroExpansionTree(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -744,10 +744,10 @@ final case class MacroExpansionTree(
}
def withBeforeExpansion(__v: dotty.tools.dotc.semanticdb.Tree): MacroExpansionTree = copy(beforeExpansion = __v)
def withTpe(__v: dotty.tools.dotc.semanticdb.Type): MacroExpansionTree = copy(tpe = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.MacroExpansionTree])
}
@@ -773,12 +773,12 @@ object MacroExpansionTree extends SemanticdbGeneratedMessageCompanion[dotty.too
tpe = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance))
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.MacroExpansionTree(
beforeExpansion = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_beforeExpansion.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance),
tpe = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)
@@ -820,7 +820,7 @@ final case class OriginalTree(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
range.foreach { __v =>
@@ -833,10 +833,10 @@ final case class OriginalTree(
def getRange: dotty.tools.dotc.semanticdb.Range = range.getOrElse(dotty.tools.dotc.semanticdb.Range.defaultInstance)
def clearRange: OriginalTree = copy(range = _root_.scala.None)
def withRange(__v: dotty.tools.dotc.semanticdb.Range): OriginalTree = copy(range = Option(__v))
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.OriginalTree])
}
@@ -858,12 +858,12 @@ object OriginalTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot
range = __range
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.OriginalTree(
range = _root_.scala.None
)
@@ -885,7 +885,7 @@ final case class SelectTree(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = dotty.tools.dotc.semanticdb.SelectTree._typemapper_qualifier.toBase(qualifier)
if (__value.serializedSize != 0) {
@@ -905,7 +905,7 @@ final case class SelectTree(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -927,10 +927,10 @@ final case class SelectTree(
def getId: dotty.tools.dotc.semanticdb.IdTree = id.getOrElse(dotty.tools.dotc.semanticdb.IdTree.defaultInstance)
def clearId: SelectTree = copy(id = _root_.scala.None)
def withId(__v: dotty.tools.dotc.semanticdb.IdTree): SelectTree = copy(id = Option(__v))
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.SelectTree])
}
@@ -956,12 +956,12 @@ object SelectTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.
id = __id
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.SelectTree(
qualifier = dotty.tools.dotc.semanticdb.SelectTree._typemapper_qualifier.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance),
id = _root_.scala.None
@@ -989,7 +989,7 @@ final case class TypeApplyTree(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = dotty.tools.dotc.semanticdb.TypeApplyTree._typemapper_function.toBase(function)
if (__value.serializedSize != 0) {
@@ -1009,7 +1009,7 @@ final case class TypeApplyTree(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -1032,10 +1032,10 @@ final case class TypeApplyTree(
def addTypeArguments(__vs: dotty.tools.dotc.semanticdb.Type *): TypeApplyTree = addAllTypeArguments(__vs)
def addAllTypeArguments(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): TypeApplyTree = copy(typeArguments = typeArguments ++ __vs)
def withTypeArguments(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]): TypeApplyTree = copy(typeArguments = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.TypeApplyTree])
}
@@ -1061,12 +1061,12 @@ object TypeApplyTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.do
typeArguments = __typeArguments.result()
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.TypeApplyTree(
function = dotty.tools.dotc.semanticdb.TypeApplyTree._typemapper_function.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance),
typeArguments = _root_.scala.Seq.empty
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Type.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Type.scala
index be9cc6034f2c..8f675e82b802 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Type.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Type.scala
@@ -5,7 +5,7 @@
// Protofile syntax: PROTO3
package dotty.tools.dotc.semanticdb
-import dotty.tools.dotc.semanticdb.internal._
+import dotty.tools.dotc.semanticdb.internal.*
import scala.annotation.internal.sharable
sealed trait Type extends SemanticdbGeneratedSealedOneof derives CanEqual {
@@ -18,10 +18,10 @@ sealed trait Type extends SemanticdbGeneratedSealedOneof derives CanEqual {
object Type {
case object Empty extends dotty.tools.dotc.semanticdb.Type
-
+
sealed trait NonEmpty extends dotty.tools.dotc.semanticdb.Type
def defaultInstance: dotty.tools.dotc.semanticdb.Type = Empty
-
+
implicit val TypeTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] {
override def toCustom(__base: dotty.tools.dotc.semanticdb.TypeMessage): dotty.tools.dotc.semanticdb.Type = __base.sealedValue match {
case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.TypeRef => __v.value
@@ -144,7 +144,7 @@ final case class TypeMessage(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
sealedValue.typeRef.foreach { __v =>
@@ -278,10 +278,10 @@ final case class TypeMessage(
def withLambdaType(__v: dotty.tools.dotc.semanticdb.LambdaType): TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.LambdaType(__v))
def clearSealedValue: TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.Empty)
def withSealedValue(__v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue): TypeMessage = copy(sealedValue = __v)
-
-
-
-
+
+
+
+
def toType: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.Type.TypeTypeMapper.toCustom(this)
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Type])
}
@@ -334,12 +334,12 @@ object TypeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc
sealedValue = __sealedValue
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.TypeMessage(
sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.Empty
)
@@ -388,7 +388,7 @@ object TypeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc
override def number: _root_.scala.Int = 0
override def value: _root_.scala.Nothing = throw new java.util.NoSuchElementException("Empty.value")
}
-
+
@SerialVersionUID(0L)
final case class TypeRef(value: dotty.tools.dotc.semanticdb.TypeRef) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual {
type ValueType = dotty.tools.dotc.semanticdb.TypeRef
@@ -536,14 +536,14 @@ final case class TypeRef(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = dotty.tools.dotc.semanticdb.TypeRef._typemapper_prefix.toBase(prefix)
if (__value.serializedSize != 0) {
__size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
}
};
-
+
{
val __value = symbol
if (!__value.isEmpty) {
@@ -563,7 +563,7 @@ final case class TypeRef(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -593,10 +593,10 @@ final case class TypeRef(
def addTypeArguments(__vs: dotty.tools.dotc.semanticdb.Type *): TypeRef = addAllTypeArguments(__vs)
def addAllTypeArguments(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): TypeRef = copy(typeArguments = typeArguments ++ __vs)
def withTypeArguments(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]): TypeRef = copy(typeArguments = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.TypeRef])
}
@@ -626,12 +626,12 @@ object TypeRef extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.sem
typeArguments = __typeArguments.result()
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.TypeRef(
prefix = dotty.tools.dotc.semanticdb.TypeRef._typemapper_prefix.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance),
symbol = "",
@@ -665,14 +665,14 @@ final case class SingleType(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = dotty.tools.dotc.semanticdb.SingleType._typemapper_prefix.toBase(prefix)
if (__value.serializedSize != 0) {
__size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
}
};
-
+
{
val __value = symbol
if (!__value.isEmpty) {
@@ -688,7 +688,7 @@ final case class SingleType(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -708,10 +708,10 @@ final case class SingleType(
}
def withPrefix(__v: dotty.tools.dotc.semanticdb.Type): SingleType = copy(prefix = __v)
def withSymbol(__v: _root_.scala.Predef.String): SingleType = copy(symbol = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.SingleType])
}
@@ -737,12 +737,12 @@ object SingleType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.
symbol = __symbol
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.SingleType(
prefix = dotty.tools.dotc.semanticdb.SingleType._typemapper_prefix.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance),
symbol = ""
@@ -769,7 +769,7 @@ final case class ThisType(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = symbol
if (!__value.isEmpty) {
@@ -785,7 +785,7 @@ final case class ThisType(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -796,10 +796,10 @@ final case class ThisType(
};
}
def withSymbol(__v: _root_.scala.Predef.String): ThisType = copy(symbol = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ThisType])
}
@@ -821,12 +821,12 @@ object ThisType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.se
symbol = __symbol
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.ThisType(
symbol = ""
)
@@ -848,14 +848,14 @@ final case class SuperType(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = dotty.tools.dotc.semanticdb.SuperType._typemapper_prefix.toBase(prefix)
if (__value.serializedSize != 0) {
__size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
}
};
-
+
{
val __value = symbol
if (!__value.isEmpty) {
@@ -871,7 +871,7 @@ final case class SuperType(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -891,10 +891,10 @@ final case class SuperType(
}
def withPrefix(__v: dotty.tools.dotc.semanticdb.Type): SuperType = copy(prefix = __v)
def withSymbol(__v: _root_.scala.Predef.String): SuperType = copy(symbol = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.SuperType])
}
@@ -920,12 +920,12 @@ object SuperType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s
symbol = __symbol
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.SuperType(
prefix = dotty.tools.dotc.semanticdb.SuperType._typemapper_prefix.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance),
symbol = ""
@@ -952,7 +952,7 @@ final case class ConstantType(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = dotty.tools.dotc.semanticdb.ConstantType._typemapper_constant.toBase(constant)
if (__value.serializedSize != 0) {
@@ -968,7 +968,7 @@ final case class ConstantType(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -981,10 +981,10 @@ final case class ConstantType(
};
}
def withConstant(__v: dotty.tools.dotc.semanticdb.Constant): ConstantType = copy(constant = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ConstantType])
}
@@ -1006,12 +1006,12 @@ object ConstantType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot
constant = dotty.tools.dotc.semanticdb.ConstantType._typemapper_constant.toCustom(__constant.getOrElse(dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance))
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.ConstantType(
constant = dotty.tools.dotc.semanticdb.ConstantType._typemapper_constant.toCustom(dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance)
)
@@ -1047,7 +1047,7 @@ final case class IntersectionType(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
types.foreach { __v =>
@@ -1061,10 +1061,10 @@ final case class IntersectionType(
def addTypes(__vs: dotty.tools.dotc.semanticdb.Type *): IntersectionType = addAllTypes(__vs)
def addAllTypes(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): IntersectionType = copy(types = types ++ __vs)
def withTypes(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]): IntersectionType = copy(types = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.IntersectionType])
}
@@ -1086,12 +1086,12 @@ object IntersectionType extends SemanticdbGeneratedMessageCompanion[dotty.tools
types = __types.result()
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.IntersectionType(
types = _root_.scala.Seq.empty
)
@@ -1127,7 +1127,7 @@ final case class UnionType(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
types.foreach { __v =>
@@ -1141,10 +1141,10 @@ final case class UnionType(
def addTypes(__vs: dotty.tools.dotc.semanticdb.Type *): UnionType = addAllTypes(__vs)
def addAllTypes(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): UnionType = copy(types = types ++ __vs)
def withTypes(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]): UnionType = copy(types = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.UnionType])
}
@@ -1166,12 +1166,12 @@ object UnionType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s
types = __types.result()
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.UnionType(
types = _root_.scala.Seq.empty
)
@@ -1207,7 +1207,7 @@ final case class WithType(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
types.foreach { __v =>
@@ -1221,10 +1221,10 @@ final case class WithType(
def addTypes(__vs: dotty.tools.dotc.semanticdb.Type *): WithType = addAllTypes(__vs)
def addAllTypes(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): WithType = copy(types = types ++ __vs)
def withTypes(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]): WithType = copy(types = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.WithType])
}
@@ -1246,12 +1246,12 @@ object WithType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.se
types = __types.result()
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.WithType(
types = _root_.scala.Seq.empty
)
@@ -1275,7 +1275,7 @@ final case class StructuralType(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = dotty.tools.dotc.semanticdb.StructuralType._typemapper_tpe.toBase(tpe)
if (__value.serializedSize != 0) {
@@ -1295,7 +1295,7 @@ final case class StructuralType(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -1317,10 +1317,10 @@ final case class StructuralType(
def getDeclarations: dotty.tools.dotc.semanticdb.Scope = declarations.getOrElse(dotty.tools.dotc.semanticdb.Scope.defaultInstance)
def clearDeclarations: StructuralType = copy(declarations = _root_.scala.None)
def withDeclarations(__v: dotty.tools.dotc.semanticdb.Scope): StructuralType = copy(declarations = Option(__v))
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.StructuralType])
}
@@ -1346,12 +1346,12 @@ object StructuralType extends SemanticdbGeneratedMessageCompanion[dotty.tools.d
declarations = __declarations
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.StructuralType(
tpe = dotty.tools.dotc.semanticdb.StructuralType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance),
declarations = _root_.scala.None
@@ -1383,7 +1383,7 @@ final case class AnnotatedType(
val __value = __item
__size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
}
-
+
{
val __value = dotty.tools.dotc.semanticdb.AnnotatedType._typemapper_tpe.toBase(tpe)
if (__value.serializedSize != 0) {
@@ -1399,7 +1399,7 @@ final case class AnnotatedType(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -1422,10 +1422,10 @@ final case class AnnotatedType(
def addAllAnnotations(__vs: Iterable[dotty.tools.dotc.semanticdb.Annotation]): AnnotatedType = copy(annotations = annotations ++ __vs)
def withAnnotations(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Annotation]): AnnotatedType = copy(annotations = __v)
def withTpe(__v: dotty.tools.dotc.semanticdb.Type): AnnotatedType = copy(tpe = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.AnnotatedType])
}
@@ -1451,12 +1451,12 @@ object AnnotatedType extends SemanticdbGeneratedMessageCompanion[dotty.tools.do
tpe = dotty.tools.dotc.semanticdb.AnnotatedType._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance))
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.AnnotatedType(
annotations = _root_.scala.Seq.empty,
tpe = dotty.tools.dotc.semanticdb.AnnotatedType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)
@@ -1484,7 +1484,7 @@ final case class ExistentialType(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = dotty.tools.dotc.semanticdb.ExistentialType._typemapper_tpe.toBase(tpe)
if (__value.serializedSize != 0) {
@@ -1504,7 +1504,7 @@ final case class ExistentialType(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -1526,10 +1526,10 @@ final case class ExistentialType(
def getDeclarations: dotty.tools.dotc.semanticdb.Scope = declarations.getOrElse(dotty.tools.dotc.semanticdb.Scope.defaultInstance)
def clearDeclarations: ExistentialType = copy(declarations = _root_.scala.None)
def withDeclarations(__v: dotty.tools.dotc.semanticdb.Scope): ExistentialType = copy(declarations = Option(__v))
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ExistentialType])
}
@@ -1555,12 +1555,12 @@ object ExistentialType extends SemanticdbGeneratedMessageCompanion[dotty.tools.
declarations = __declarations
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.ExistentialType(
tpe = dotty.tools.dotc.semanticdb.ExistentialType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance),
declarations = _root_.scala.None
@@ -1592,7 +1592,7 @@ final case class UniversalType(
val __value = typeParameters.get
__size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
};
-
+
{
val __value = dotty.tools.dotc.semanticdb.UniversalType._typemapper_tpe.toBase(tpe)
if (__value.serializedSize != 0) {
@@ -1608,7 +1608,7 @@ final case class UniversalType(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -1630,10 +1630,10 @@ final case class UniversalType(
def clearTypeParameters: UniversalType = copy(typeParameters = _root_.scala.None)
def withTypeParameters(__v: dotty.tools.dotc.semanticdb.Scope): UniversalType = copy(typeParameters = Option(__v))
def withTpe(__v: dotty.tools.dotc.semanticdb.Type): UniversalType = copy(tpe = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.UniversalType])
}
@@ -1659,12 +1659,12 @@ object UniversalType extends SemanticdbGeneratedMessageCompanion[dotty.tools.do
tpe = dotty.tools.dotc.semanticdb.UniversalType._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance))
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.UniversalType(
typeParameters = _root_.scala.None,
tpe = dotty.tools.dotc.semanticdb.UniversalType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)
@@ -1691,7 +1691,7 @@ final case class ByNameType(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = dotty.tools.dotc.semanticdb.ByNameType._typemapper_tpe.toBase(tpe)
if (__value.serializedSize != 0) {
@@ -1707,7 +1707,7 @@ final case class ByNameType(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -1720,10 +1720,10 @@ final case class ByNameType(
};
}
def withTpe(__v: dotty.tools.dotc.semanticdb.Type): ByNameType = copy(tpe = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ByNameType])
}
@@ -1745,12 +1745,12 @@ object ByNameType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.
tpe = dotty.tools.dotc.semanticdb.ByNameType._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance))
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.ByNameType(
tpe = dotty.tools.dotc.semanticdb.ByNameType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)
)
@@ -1773,7 +1773,7 @@ final case class RepeatedType(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = dotty.tools.dotc.semanticdb.RepeatedType._typemapper_tpe.toBase(tpe)
if (__value.serializedSize != 0) {
@@ -1789,7 +1789,7 @@ final case class RepeatedType(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -1802,10 +1802,10 @@ final case class RepeatedType(
};
}
def withTpe(__v: dotty.tools.dotc.semanticdb.Type): RepeatedType = copy(tpe = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.RepeatedType])
}
@@ -1827,12 +1827,12 @@ object RepeatedType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot
tpe = dotty.tools.dotc.semanticdb.RepeatedType._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance))
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.RepeatedType(
tpe = dotty.tools.dotc.semanticdb.RepeatedType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)
)
@@ -1856,7 +1856,7 @@ final case class MatchType(
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = dotty.tools.dotc.semanticdb.MatchType._typemapper_scrutinee.toBase(scrutinee)
if (__value.serializedSize != 0) {
@@ -1876,7 +1876,7 @@ final case class MatchType(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -1899,10 +1899,10 @@ final case class MatchType(
def addCases(__vs: dotty.tools.dotc.semanticdb.MatchType.CaseType *): MatchType = addAllCases(__vs)
def addAllCases(__vs: Iterable[dotty.tools.dotc.semanticdb.MatchType.CaseType]): MatchType = copy(cases = cases ++ __vs)
def withCases(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.MatchType.CaseType]): MatchType = copy(cases = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.MatchType])
}
@@ -1928,12 +1928,12 @@ object MatchType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s
cases = __cases.result()
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.MatchType(
scrutinee = dotty.tools.dotc.semanticdb.MatchType._typemapper_scrutinee.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance),
cases = _root_.scala.Seq.empty
@@ -1947,14 +1947,14 @@ object MatchType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
-
+
{
val __value = dotty.tools.dotc.semanticdb.MatchType.CaseType._typemapper_key.toBase(key)
if (__value.serializedSize != 0) {
__size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
}
};
-
+
{
val __value = dotty.tools.dotc.semanticdb.MatchType.CaseType._typemapper_body.toBase(body)
if (__value.serializedSize != 0) {
@@ -1970,7 +1970,7 @@ object MatchType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
{
@@ -1992,13 +1992,13 @@ object MatchType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s
}
def withKey(__v: dotty.tools.dotc.semanticdb.Type): CaseType = copy(key = __v)
def withBody(__v: dotty.tools.dotc.semanticdb.Type): CaseType = copy(body = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.MatchType.CaseType])
}
-
+
object CaseType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MatchType.CaseType] {
implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MatchType.CaseType] = this
def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.MatchType.CaseType = {
@@ -2021,12 +2021,12 @@ object MatchType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s
body = dotty.tools.dotc.semanticdb.MatchType.CaseType._typemapper_body.toCustom(__body.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance))
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.MatchType.CaseType(
key = dotty.tools.dotc.semanticdb.MatchType.CaseType._typemapper_key.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance),
body = dotty.tools.dotc.semanticdb.MatchType.CaseType._typemapper_body.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)
@@ -2046,7 +2046,7 @@ object MatchType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s
)
// @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MatchType.CaseType])
}
-
+
final val SCRUTINEE_FIELD_NUMBER = 1
final val CASES_FIELD_NUMBER = 2
@transient @sharable
@@ -2074,7 +2074,7 @@ final case class LambdaType(
val __value = parameters.get
__size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
};
-
+
{
val __value = dotty.tools.dotc.semanticdb.LambdaType._typemapper_returnType.toBase(returnType)
if (__value.serializedSize != 0) {
@@ -2090,7 +2090,7 @@ final case class LambdaType(
__serializedSizeMemoized = __size
}
__size - 1
-
+
}
def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
parameters.foreach { __v =>
@@ -2112,10 +2112,10 @@ final case class LambdaType(
def clearParameters: LambdaType = copy(parameters = _root_.scala.None)
def withParameters(__v: dotty.tools.dotc.semanticdb.Scope): LambdaType = copy(parameters = Option(__v))
def withReturnType(__v: dotty.tools.dotc.semanticdb.Type): LambdaType = copy(returnType = __v)
-
-
-
-
+
+
+
+
// @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.LambdaType])
}
@@ -2141,12 +2141,12 @@ object LambdaType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.
returnType = dotty.tools.dotc.semanticdb.LambdaType._typemapper_returnType.toCustom(__returnType.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance))
)
}
-
-
-
-
-
-
+
+
+
+
+
+
lazy val defaultInstance = dotty.tools.dotc.semanticdb.LambdaType(
parameters = _root_.scala.None,
returnType = dotty.tools.dotc.semanticdb.LambdaType._typemapper_returnType.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbInputStream.scala b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbInputStream.scala
index 8aed9e5b9771..699f85c0e303 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbInputStream.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbInputStream.scala
@@ -7,7 +7,7 @@ import java.io.InputStream
import java.util.Arrays
import java.nio.charset.StandardCharsets
-import SemanticdbInputStream._
+import SemanticdbInputStream.*
import scala.collection.mutable
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbOutputStream.scala b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbOutputStream.scala
index 37da7f868e25..359e861225b0 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbOutputStream.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbOutputStream.scala
@@ -7,7 +7,7 @@ import java.io.OutputStream
import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets
-import SemanticdbOutputStream._
+import SemanticdbOutputStream.*
object SemanticdbOutputStream {
diff --git a/compiler/src/dotty/tools/dotc/staging/CrossStageSafety.scala b/compiler/src/dotty/tools/dotc/staging/CrossStageSafety.scala
index 89d8f9a80ee3..85f2e84429c3 100644
--- a/compiler/src/dotty/tools/dotc/staging/CrossStageSafety.scala
+++ b/compiler/src/dotty/tools/dotc/staging/CrossStageSafety.scala
@@ -2,19 +2,19 @@ package dotty.tools.dotc
package staging
import dotty.tools.dotc.ast.{tpd, untpd}
-import dotty.tools.dotc.core.Annotations._
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Decorators._
-import dotty.tools.dotc.core.Flags._
-import dotty.tools.dotc.core.NameKinds._
-import dotty.tools.dotc.core.StdNames._
-import dotty.tools.dotc.core.Symbols._
-import dotty.tools.dotc.core.Types._
+import dotty.tools.dotc.core.Annotations.*
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Decorators.*
+import dotty.tools.dotc.core.Flags.*
+import dotty.tools.dotc.core.NameKinds.*
+import dotty.tools.dotc.core.StdNames.*
+import dotty.tools.dotc.core.Symbols.*
+import dotty.tools.dotc.core.Types.*
import dotty.tools.dotc.quoted.QuotePatterns
import dotty.tools.dotc.staging.QuoteTypeTags.*
import dotty.tools.dotc.staging.StagingLevel.*
import dotty.tools.dotc.util.Property
-import dotty.tools.dotc.util.Spans._
+import dotty.tools.dotc.util.Spans.*
import dotty.tools.dotc.util.SrcPos
/** Checks that staging level consistency holds and heals staged types.
@@ -52,7 +52,7 @@ import dotty.tools.dotc.util.SrcPos
*
*/
class CrossStageSafety extends TreeMapWithStages {
- import tpd._
+ import tpd.*
private val InAnnotation = Property.Key[Unit]()
diff --git a/compiler/src/dotty/tools/dotc/staging/HealType.scala b/compiler/src/dotty/tools/dotc/staging/HealType.scala
index 7d3ca0ad2f63..2469bd73bdcb 100644
--- a/compiler/src/dotty/tools/dotc/staging/HealType.scala
+++ b/compiler/src/dotty/tools/dotc/staging/HealType.scala
@@ -1,15 +1,15 @@
package dotty.tools.dotc
package staging
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Decorators._
-import dotty.tools.dotc.core.Flags._
-import dotty.tools.dotc.core.StdNames._
-import dotty.tools.dotc.core.Symbols._
-import dotty.tools.dotc.core.Types._
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Decorators.*
+import dotty.tools.dotc.core.Flags.*
+import dotty.tools.dotc.core.StdNames.*
+import dotty.tools.dotc.core.Symbols.*
+import dotty.tools.dotc.core.Types.*
import dotty.tools.dotc.staging.StagingLevel.*
import dotty.tools.dotc.staging.QuoteTypeTags.*
-import dotty.tools.dotc.transform.SymUtils._
+import dotty.tools.dotc.transform.SymUtils.*
import dotty.tools.dotc.typer.Implicits.SearchFailureType
import dotty.tools.dotc.util.SrcPos
diff --git a/compiler/src/dotty/tools/dotc/staging/QuoteTypeTags.scala b/compiler/src/dotty/tools/dotc/staging/QuoteTypeTags.scala
index 0b5032ea5a6d..467f1f440fd6 100644
--- a/compiler/src/dotty/tools/dotc/staging/QuoteTypeTags.scala
+++ b/compiler/src/dotty/tools/dotc/staging/QuoteTypeTags.scala
@@ -1,10 +1,10 @@
package dotty.tools.dotc.staging
import dotty.tools.dotc.ast.tpd
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Decorators._
-import dotty.tools.dotc.core.StdNames._
-import dotty.tools.dotc.core.Types._
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Decorators.*
+import dotty.tools.dotc.core.StdNames.*
+import dotty.tools.dotc.core.Types.*
import dotty.tools.dotc.staging.StagingLevel.*
import dotty.tools.dotc.util.Property
diff --git a/compiler/src/dotty/tools/dotc/staging/StagingLevel.scala b/compiler/src/dotty/tools/dotc/staging/StagingLevel.scala
index 05b3efab408c..0a229881804e 100644
--- a/compiler/src/dotty/tools/dotc/staging/StagingLevel.scala
+++ b/compiler/src/dotty/tools/dotc/staging/StagingLevel.scala
@@ -1,11 +1,11 @@
package dotty.tools.dotc
package staging
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Decorators._
-import dotty.tools.dotc.core.Flags._
-import dotty.tools.dotc.core.Symbols._
-import dotty.tools.dotc.core.Types._
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Decorators.*
+import dotty.tools.dotc.core.Flags.*
+import dotty.tools.dotc.core.Symbols.*
+import dotty.tools.dotc.core.Types.*
import dotty.tools.dotc.util.Property
import dotty.tools.dotc.util.SrcPos
diff --git a/compiler/src/dotty/tools/dotc/staging/TreeMapWithStages.scala b/compiler/src/dotty/tools/dotc/staging/TreeMapWithStages.scala
index 674dfff2f642..c2607f3daa68 100644
--- a/compiler/src/dotty/tools/dotc/staging/TreeMapWithStages.scala
+++ b/compiler/src/dotty/tools/dotc/staging/TreeMapWithStages.scala
@@ -3,16 +3,16 @@ package staging
import dotty.tools.dotc.ast.{TreeMapWithImplicits, tpd}
import dotty.tools.dotc.config.Printers.staging
-import dotty.tools.dotc.core.Decorators._
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Symbols._
+import dotty.tools.dotc.core.Decorators.*
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Symbols.*
import dotty.tools.dotc.staging.StagingLevel.*
import scala.collection.mutable
/** TreeMap that keeps track of staging levels using StagingLevel. */
abstract class TreeMapWithStages extends TreeMapWithImplicits {
- import tpd._
+ import tpd.*
override def transform(tree: Tree)(using Context): Tree =
if (tree.source != ctx.source && tree.source.exists)
diff --git a/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala b/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala
index 3175ffceae49..c5ffde140bd6 100644
--- a/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala
+++ b/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala
@@ -2,15 +2,15 @@ package dotty.tools
package dotc
package transform
-import core._
-import Contexts._
-import Symbols._
-import Flags._
-import Names._
-import NameOps._
-import Decorators._
-import TypeUtils._
-import Types._
+import core.*
+import Contexts.*
+import Symbols.*
+import Flags.*
+import Names.*
+import NameOps.*
+import Decorators.*
+import TypeUtils.*
+import Types.*
import util.Spans.Span
import config.Printers.transforms
@@ -18,8 +18,8 @@ import config.Printers.transforms
* inline accessors and protected accessors.
*/
abstract class AccessProxies {
- import ast.tpd._
- import AccessProxies._
+ import ast.tpd.*
+ import AccessProxies.*
/** accessor -> accessed */
private val accessedBy = MutableSymbolMap[Symbol]()
@@ -64,7 +64,7 @@ abstract class AccessProxies {
}
trait Insert {
- import ast.tpd._
+ import ast.tpd.*
/** The name of the accessor for definition with given `name` in given `site` */
def accessorNameOf(name: TermName, site: Symbol)(using Context): TermName
diff --git a/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala b/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala
index 872c7cc897de..6ece8ad63808 100644
--- a/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala
@@ -1,12 +1,12 @@
package dotty.tools.dotc
package transform
-import core._
-import MegaPhase._
-import Contexts._
-import Symbols._
-import Flags._
-import StdNames._
+import core.*
+import MegaPhase.*
+import Contexts.*
+import Symbols.*
+import Flags.*
+import StdNames.*
import dotty.tools.dotc.ast.tpd
@@ -16,7 +16,7 @@ import dotty.tools.dotc.ast.tpd
* Transforms `scala.Array.apply([....])` and `scala.Array.apply(..., [....])` into `[...]`
*/
class ArrayApply extends MiniPhase {
- import tpd._
+ import tpd.*
override def phaseName: String = ArrayApply.name
diff --git a/compiler/src/dotty/tools/dotc/transform/ArrayConstructors.scala b/compiler/src/dotty/tools/dotc/transform/ArrayConstructors.scala
index e783961649dd..b0106f0d2ff3 100644
--- a/compiler/src/dotty/tools/dotc/transform/ArrayConstructors.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ArrayConstructors.scala
@@ -1,12 +1,12 @@
package dotty.tools.dotc
package transform
-import core._
-import MegaPhase._
-import Contexts._
-import Symbols._
-import Types._
-import StdNames._
+import core.*
+import MegaPhase.*
+import Contexts.*
+import Symbols.*
+import Types.*
+import StdNames.*
import dotty.tools.dotc.ast.tpd
@@ -19,7 +19,7 @@ import scala.collection.immutable.::
* Additionally it optimizes calls to scala.Array.ofDim functions by replacing them with calls to newArray with specific dimensions
*/
class ArrayConstructors extends MiniPhase {
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = ArrayConstructors.name
diff --git a/compiler/src/dotty/tools/dotc/transform/BeanProperties.scala b/compiler/src/dotty/tools/dotc/transform/BeanProperties.scala
index 0c1f40d4f2bd..7e21703f67ee 100644
--- a/compiler/src/dotty/tools/dotc/transform/BeanProperties.scala
+++ b/compiler/src/dotty/tools/dotc/transform/BeanProperties.scala
@@ -1,19 +1,19 @@
package dotty.tools.dotc
package transform
-import core._
-import ast.tpd._
-import Annotations._
-import Contexts._
+import core.*
+import ast.tpd.*
+import Annotations.*
+import Contexts.*
import Symbols.*
import SymUtils.*
-import Decorators._
-import Flags._
-import Names._
-import Types._
-import util.Spans._
+import Decorators.*
+import Flags.*
+import Names.*
+import Types.*
+import util.Spans.*
-import DenotTransformers._
+import DenotTransformers.*
class BeanProperties(thisPhase: DenotTransformer):
def addBeanMethods(impl: Template)(using Context): Template =
diff --git a/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala b/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala
index ad36544cdec0..653a5e17990f 100644
--- a/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala
+++ b/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala
@@ -2,10 +2,10 @@ package dotty.tools
package dotc
package transform
-import core._
-import Flags._
-import MegaPhase._
-import Symbols._, Contexts._, Types._, Decorators._
+import core.*
+import Flags.*
+import MegaPhase.*
+import Symbols.*, Contexts.*, Types.*, Decorators.*
import StdNames.nme
import ast.TreeTypeMap
@@ -33,7 +33,7 @@ import scala.collection.mutable.ListBuffer
* return context functions. See i6375.scala.
*/
class BetaReduce extends MiniPhase:
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = BetaReduce.name
@@ -45,7 +45,7 @@ class BetaReduce extends MiniPhase:
app1
object BetaReduce:
- import ast.tpd._
+ import ast.tpd.*
val name: String = "betaReduce"
val description: String = "reduce closure applications"
diff --git a/compiler/src/dotty/tools/dotc/transform/Bridges.scala b/compiler/src/dotty/tools/dotc/transform/Bridges.scala
index 94f7b405c027..2110ac1464c2 100644
--- a/compiler/src/dotty/tools/dotc/transform/Bridges.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Bridges.scala
@@ -2,9 +2,9 @@ package dotty.tools
package dotc
package transform
-import core._
-import Symbols._, Types._, Contexts._, Decorators._, Flags._, Scopes._, Phases._
-import DenotTransformers._
+import core.*
+import Symbols.*, Types.*, Contexts.*, Decorators.*, Flags.*, Scopes.*, Phases.*
+import DenotTransformers.*
import ast.untpd
import collection.{mutable, immutable}
import util.SrcPos
@@ -16,7 +16,7 @@ import Erasure.Boxing.adaptClosure
/** A helper class for generating bridge methods in class `root`. */
class Bridges(root: ClassSymbol, thisPhase: DenotTransformer)(using Context) {
- import ast.tpd._
+ import ast.tpd.*
assert(ctx.phase == erasurePhase.next)
private val preErasureCtx = ctx.withPhase(erasurePhase)
diff --git a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala
index 8964beb26051..c1725cbd0255 100644
--- a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala
+++ b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala
@@ -1,35 +1,32 @@
package dotty.tools.dotc
package transform
-import MegaPhase._
-import core.DenotTransformers._
-import core.Symbols._
-import core.Contexts._
-import core.Flags._
-import core.Decorators._
+import MegaPhase.*
+import core.DenotTransformers.*
+import core.Symbols.*
+import core.Contexts.*
+import core.Flags.*
+import core.Decorators.*
import core.StdNames.nme
-import core.Names._
+import core.Names.*
import core.NameKinds.TempResultName
-import core.Constants._
+import core.Constants.*
import util.Store
import dotty.tools.uncheckedNN
+import ast.tpd.*
+import compiletime.uninitialized
/** This phase translates variables that are captured in closures to
* heap-allocated refs.
*/
class CapturedVars extends MiniPhase with IdentityDenotTransformer:
thisPhase =>
- import ast.tpd._
override def phaseName: String = CapturedVars.name
override def description: String = CapturedVars.description
- private[this] var Captured: Store.Location[util.ReadOnlySet[Symbol]] = _
- private def captured(using Context) = ctx.store(Captured)
-
- override def initContext(ctx: FreshContext): Unit =
- Captured = ctx.addLocation(util.ReadOnlySet.empty)
+ private val captured = util.HashSet[Symbol]()
private class RefInfo(using Context) {
/** The classes for which a Ref type exists. */
@@ -55,33 +52,10 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer:
myRefInfo.uncheckedNN
}
- private class CollectCaptured extends TreeTraverser {
- private val captured = util.HashSet[Symbol]()
- def traverse(tree: Tree)(using Context) = tree match {
- case id: Ident =>
- val sym = id.symbol
- if (sym.is(Mutable, butNot = Method) && sym.owner.isTerm) {
- val enclMeth = ctx.owner.enclosingMethod
- if (sym.enclosingMethod != enclMeth) {
- report.log(i"capturing $sym in ${sym.enclosingMethod}, referenced from $enclMeth")
- captured += sym
- }
- }
- case _ =>
- traverseChildren(tree)
- }
- def runOver(tree: Tree)(using Context): util.ReadOnlySet[Symbol] = {
- traverse(tree)
- captured
- }
- }
-
- override def prepareForUnit(tree: Tree)(using Context): Context = {
- val captured = atPhase(thisPhase) {
- CollectCaptured().runOver(ctx.compilationUnit.tpdTree)
- }
- ctx.fresh.updateStore(Captured, captured)
- }
+ override def prepareForUnit(tree: Tree)(using Context): Context =
+ captured.clear()
+ atPhase(thisPhase)(CapturedVars.collect(captured)).traverse(tree)
+ ctx
/** The {Volatile|}{Int|Double|...|Object}Ref class corresponding to the class `cls`,
* depending on whether the reference should be @volatile
@@ -141,3 +115,16 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer:
object CapturedVars:
val name: String = "capturedVars"
val description: String = "represent vars captured by closures as heap objects"
+
+ def collect(captured: util.HashSet[Symbol]): TreeTraverser = new:
+ def traverse(tree: Tree)(using Context) = tree match
+ case id: Ident =>
+ val sym = id.symbol
+ if sym.is(Mutable, butNot = Method) && sym.owner.isTerm then
+ val enclMeth = ctx.owner.enclosingMethod
+ if sym.enclosingMethod != enclMeth then
+ report.log(i"capturing $sym in ${sym.enclosingMethod}, referenced from $enclMeth")
+ captured += sym
+ case _ =>
+ traverseChildren(tree)
+end CapturedVars
diff --git a/compiler/src/dotty/tools/dotc/transform/CheckLoopingImplicits.scala b/compiler/src/dotty/tools/dotc/transform/CheckLoopingImplicits.scala
index 7c8082265161..8625d2dbb289 100644
--- a/compiler/src/dotty/tools/dotc/transform/CheckLoopingImplicits.scala
+++ b/compiler/src/dotty/tools/dotc/transform/CheckLoopingImplicits.scala
@@ -30,7 +30,7 @@ object CheckLoopingImplicits:
*/
class CheckLoopingImplicits extends MiniPhase:
thisPhase =>
- import tpd._
+ import tpd.*
override def phaseName: String = CheckLoopingImplicits.name
diff --git a/compiler/src/dotty/tools/dotc/transform/CheckNoSuperThis.scala b/compiler/src/dotty/tools/dotc/transform/CheckNoSuperThis.scala
index f43d000bbf44..127bd57b1bf2 100644
--- a/compiler/src/dotty/tools/dotc/transform/CheckNoSuperThis.scala
+++ b/compiler/src/dotty/tools/dotc/transform/CheckNoSuperThis.scala
@@ -15,7 +15,7 @@ object CheckNoSuperThis:
/** Checks that super and this calls do not pass `this` as (part of) an argument. */
class CheckNoSuperThis extends MiniPhase:
thisPhase =>
- import tpd._
+ import tpd.*
override def phaseName: String = CheckNoSuperThis.name
diff --git a/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala b/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala
index b63773687f74..073086ac5e2c 100644
--- a/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala
+++ b/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala
@@ -1,12 +1,12 @@
package dotty.tools.dotc
package transform
-import core._
-import dotty.tools.dotc.transform.MegaPhase._
-import Flags._
-import Contexts._
-import Symbols._
-import Decorators._
+import core.*
+import dotty.tools.dotc.transform.MegaPhase.*
+import Flags.*
+import Contexts.*
+import Symbols.*
+import Decorators.*
/** A no-op transform that checks whether the compiled sources are re-entrant.
* If -Ycheck:reentrant is set, the phase makes sure that there are no variables
@@ -27,7 +27,7 @@ import Decorators._
* for immutable array.
*/
class CheckReentrant extends MiniPhase {
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = CheckReentrant.name
diff --git a/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala b/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala
index ae69c1596009..fdc055df9ac4 100644
--- a/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala
+++ b/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala
@@ -138,7 +138,7 @@ class CheckShadowing extends MiniPhase:
}
private def nestedTypeTraverser(parent: Symbol) = new TreeTraverser:
- import tpd._
+ import tpd.*
override def traverse(tree: tpd.Tree)(using Context): Unit =
tree match
@@ -154,7 +154,7 @@ class CheckShadowing extends MiniPhase:
// To reach the imports during a miniphase traversal
private def importTraverser = new TreeTraverser:
- import tpd._
+ import tpd.*
override def traverse(tree: tpd.Tree)(using Context): Unit =
tree match
@@ -173,7 +173,7 @@ object CheckShadowing:
val description = "check for elements shadowing other elements in scope"
private class ShadowingData:
- import dotty.tools.dotc.transform.CheckShadowing.ShadowingData._
+ import dotty.tools.dotc.transform.CheckShadowing.ShadowingData.*
import collection.mutable.{Set => MutSet, Map => MutMap, Stack => MutStack}
private val rootImports = MutSet[SingleDenotation]()
diff --git a/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala b/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala
index 0d5154e212ee..2b616bad0a01 100644
--- a/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala
+++ b/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala
@@ -1,15 +1,15 @@
package dotty.tools.dotc
package transform
-import core._
-import dotty.tools.dotc.transform.MegaPhase._
-import Flags._
-import Contexts._
-import Symbols._
+import core.*
+import dotty.tools.dotc.transform.MegaPhase.*
+import Flags.*
+import Contexts.*
+import Symbols.*
import dotty.tools.dotc.ast.tpd
-import reporting._
+import reporting.*
-import dotty.tools.dotc.transform.SymUtils._
+import dotty.tools.dotc.transform.SymUtils.*
/** A transformer that check that requirements of Static fields\methods are implemented:
* 1. Only objects can have members annotated with `@static`
@@ -24,7 +24,7 @@ import dotty.tools.dotc.transform.SymUtils._
* 6. `@static` Lazy vals are currently unsupported.
*/
class CheckStatic extends MiniPhase {
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = CheckStatic.name
diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala
index 073626b4b5c6..7cff6fa5f1f0 100644
--- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala
+++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala
@@ -206,7 +206,7 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke
* corresponding context property
*/
private def traverser = new TreeTraverser:
- import tpd._
+ import tpd.*
import UnusedData.ScopeType
/* Register every imports, definition and usage */
@@ -607,14 +607,14 @@ object CheckUnused:
* package a:
* val x: Int = 0
* package b:
- * import a._ // no warning
+ * import a.* // no warning
* }}}
* --- WITH OBJECT : OK ---
* {{{
* object a:
* val x: Int = 0
* object b:
- * import a._ // unused warning
+ * import a.* // unused warning
* }}}
*/
private def isConstructorOfSynth(sym: Symbol)(using Context): Boolean =
@@ -643,8 +643,8 @@ object CheckUnused:
sel.isWildcard ||
imp.expr.tpe.member(sel.name.toTermName).alternatives.exists(_.symbol.isOneOf(GivenOrImplicit)) ||
imp.expr.tpe.member(sel.name.toTypeName).alternatives.exists(_.symbol.isOneOf(GivenOrImplicit))
- )
-
+ )
+
/**
* Ignore CanEqual imports
*/
@@ -655,7 +655,7 @@ object CheckUnused:
/**
* Ignore definitions of CanEqual given
- */
+ */
private def isDefIgnored(memDef: tpd.MemberDef)(using Context): Boolean =
memDef.symbol.isOneOf(GivenOrImplicit) && memDef.symbol.typeRef.baseClasses.exists(_.derivesFrom(defn.CanEqualClass))
diff --git a/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala b/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala
index 179625759b10..5534947c6799 100644
--- a/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala
+++ b/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala
@@ -1,12 +1,12 @@
package dotty.tools.dotc
package transform
-import core._
+import core.*
import ast.tpd
-import MegaPhase._
-import Contexts._
-import Symbols._
-import Phases._
+import MegaPhase.*
+import Contexts.*
+import Symbols.*
+import Phases.*
import dotty.tools.io.JarArchive
import dotty.tools.backend.jvm.GenBCode
diff --git a/compiler/src/dotty/tools/dotc/transform/CollectNullableFields.scala b/compiler/src/dotty/tools/dotc/transform/CollectNullableFields.scala
index 7b89c8785e05..22739dc528c8 100644
--- a/compiler/src/dotty/tools/dotc/transform/CollectNullableFields.scala
+++ b/compiler/src/dotty/tools/dotc/transform/CollectNullableFields.scala
@@ -1,11 +1,11 @@
package dotty.tools.dotc.transform
import dotty.tools.dotc.ast.tpd
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Flags._
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Flags.*
import dotty.tools.dotc.core.Symbols.Symbol
import dotty.tools.dotc.transform.MegaPhase.MiniPhase
-import dotty.tools.dotc.transform.SymUtils._
+import dotty.tools.dotc.transform.SymUtils.*
import scala.collection.mutable
@@ -40,7 +40,7 @@ object CollectNullableFields {
* - defined in the same class as the lazy val
*/
class CollectNullableFields extends MiniPhase {
- import tpd._
+ import tpd.*
override def phaseName: String = CollectNullableFields.name
diff --git a/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala b/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala
index b7e8ccf4e7e1..eac0b9f05c60 100644
--- a/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala
+++ b/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala
@@ -2,18 +2,18 @@ package dotty.tools
package dotc
package transform
-import core._
-import Names._
+import core.*
+import Names.*
import StdNames.nme
-import Types._
-import dotty.tools.dotc.transform.MegaPhase._
-import Flags._
-import Contexts._
-import Symbols._
-import Constants._
-import Decorators._
-import DenotTransformers._
-import SymUtils._
+import Types.*
+import dotty.tools.dotc.transform.MegaPhase.*
+import Flags.*
+import Contexts.*
+import Symbols.*
+import Constants.*
+import Decorators.*
+import DenotTransformers.*
+import SymUtils.*
object CompleteJavaEnums {
@@ -29,8 +29,8 @@ object CompleteJavaEnums {
* case to the java.lang.Enum class.
*/
class CompleteJavaEnums extends MiniPhase with InfoTransformer { thisPhase =>
- import CompleteJavaEnums._
- import ast.tpd._
+ import CompleteJavaEnums.*
+ import ast.tpd.*
override def phaseName: String = CompleteJavaEnums.name
diff --git a/compiler/src/dotty/tools/dotc/transform/Constructors.scala b/compiler/src/dotty/tools/dotc/transform/Constructors.scala
index 4dd7205e4ee0..9df98292fe84 100644
--- a/compiler/src/dotty/tools/dotc/transform/Constructors.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Constructors.scala
@@ -1,19 +1,19 @@
package dotty.tools.dotc
package transform
-import core._
-import MegaPhase._
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.StdNames._
-import ast._
-import Flags._
+import core.*
+import MegaPhase.*
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.StdNames.*
+import ast.*
+import Flags.*
import Names.Name
-import NameOps._
+import NameOps.*
import NameKinds.{FieldName, ExplicitFieldName}
-import SymUtils._
-import Symbols._
-import Decorators._
-import DenotTransformers._
+import SymUtils.*
+import Symbols.*
+import Decorators.*
+import DenotTransformers.*
import collection.mutable
object Constructors {
@@ -28,7 +28,7 @@ object Constructors {
* into the constructor if possible.
*/
class Constructors extends MiniPhase with IdentityDenotTransformer { thisPhase =>
- import tpd._
+ import tpd.*
override def phaseName: String = Constructors.name
@@ -352,7 +352,7 @@ class Constructors extends MiniPhase with IdentityDenotTransformer { thisPhase =
val expandedConstr =
if (cls.isAllOf(NoInitsTrait)) {
assert(finalConstrStats.isEmpty || {
- import dotty.tools.dotc.transform.sjs.JSSymUtils._
+ import dotty.tools.dotc.transform.sjs.JSSymUtils.*
ctx.settings.scalajs.value && cls.isJSType
})
constr
diff --git a/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala b/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala
index 01a77427698a..4719b5f5f6f3 100644
--- a/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala
@@ -2,11 +2,11 @@ package dotty.tools
package dotc
package transform
-import core._
-import Contexts._, Symbols._, Types._, Annotations._, Constants._, Phases._
+import core.*
+import Contexts.*, Symbols.*, Types.*, Annotations.*, Constants.*, Phases.*
import StdNames.nme
import ast.untpd
-import ast.tpd._
+import ast.tpd.*
import config.Config
object ContextFunctionResults:
diff --git a/compiler/src/dotty/tools/dotc/transform/CookComments.scala b/compiler/src/dotty/tools/dotc/transform/CookComments.scala
index 27f34891fc2c..d443e31fdc39 100644
--- a/compiler/src/dotty/tools/dotc/transform/CookComments.scala
+++ b/compiler/src/dotty/tools/dotc/transform/CookComments.scala
@@ -1,8 +1,8 @@
package dotty.tools.dotc.transform
import dotty.tools.dotc.ast.tpd
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.ContextOps._
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.ContextOps.*
import dotty.tools.dotc.typer.Docstrings
class CookComments extends MegaPhase.MiniPhase {
diff --git a/compiler/src/dotty/tools/dotc/transform/CountOuterAccesses.scala b/compiler/src/dotty/tools/dotc/transform/CountOuterAccesses.scala
index 91b5bc6a3de4..b5c02347d5d2 100644
--- a/compiler/src/dotty/tools/dotc/transform/CountOuterAccesses.scala
+++ b/compiler/src/dotty/tools/dotc/transform/CountOuterAccesses.scala
@@ -1,12 +1,12 @@
package dotty.tools.dotc
package transform
-import core._
+import core.*
import MegaPhase.MiniPhase
-import dotty.tools.dotc.core.Contexts._
-import ast._
-import Flags._
-import Symbols._
+import dotty.tools.dotc.core.Contexts.*
+import ast.*
+import Flags.*
+import Symbols.*
import ExplicitOuter.isOuterParamAccessor
import collection.mutable
@@ -33,7 +33,7 @@ object CountOuterAccesses:
*/
class CountOuterAccesses extends MiniPhase:
thisPhase =>
- import tpd._
+ import tpd.*
override def phaseName: String = CountOuterAccesses.name
diff --git a/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala b/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala
index 808cf928ecc2..89161cc8c013 100644
--- a/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala
+++ b/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala
@@ -1,7 +1,9 @@
package dotty.tools.dotc
package transform
-import core.Contexts._
+import core.Contexts.*
+
+import scala.compiletime.uninitialized
/** Utility class for lazy values whose evaluation depends on a context.
* This should be used whenever the evaluation of a lazy expression
@@ -12,7 +14,7 @@ import core.Contexts._
* the expression intiializing the lazy val depends only on the root context, but not any changes afterwards.
*/
class CtxLazy[T](expr: Context ?=> T) {
- private var myValue: T = _
+ private var myValue: T = uninitialized
private var forced = false
def apply()(using Context): T = {
if (!forced) {
diff --git a/compiler/src/dotty/tools/dotc/transform/Dependencies.scala b/compiler/src/dotty/tools/dotc/transform/Dependencies.scala
index 08189e107d94..9b1b931e0320 100644
--- a/compiler/src/dotty/tools/dotc/transform/Dependencies.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Dependencies.scala
@@ -6,6 +6,7 @@ import Symbols.*, Contexts.*, Types.*, Flags.*, Decorators.*
import SymUtils.*
import collection.mutable.{LinkedHashMap, LinkedHashSet}
import annotation.constructorOnly
+import scala.compiletime.uninitialized
import dotty.tools.backend.sjs.JSDefinitions.jsdefn
@@ -13,7 +14,7 @@ import dotty.tools.backend.sjs.JSDefinitions.jsdefn
* `freeVars`, `tracked`, and `logicalOwner`.
*/
abstract class Dependencies(root: ast.tpd.Tree, @constructorOnly rootContext: Context):
- import ast.tpd._
+ import ast.tpd.*
/** The symbol is a method or a lazy val that will be mapped to a method */
protected def isExpr(sym: Symbol)(using Context): Boolean
@@ -51,10 +52,10 @@ abstract class Dependencies(root: ast.tpd.Tree, @constructorOnly rootContext: Co
private val logicOwner = new LinkedHashMap[Symbol, Symbol]
/** A flag to indicate whether new free variables have been found */
- private var changedFreeVars: Boolean = _
+ private var changedFreeVars: Boolean = uninitialized
/** A flag to indicate whether lifted owners have changed */
- private var changedLogicOwner: Boolean = _
+ private var changedLogicOwner: Boolean = uninitialized
private def newSymSet: LinkedHashSet[Symbol] = new LinkedHashSet[Symbol]
diff --git a/compiler/src/dotty/tools/dotc/transform/DropBreaks.scala b/compiler/src/dotty/tools/dotc/transform/DropBreaks.scala
index 9f94a8c13a52..5f26a6af6c3c 100644
--- a/compiler/src/dotty/tools/dotc/transform/DropBreaks.scala
+++ b/compiler/src/dotty/tools/dotc/transform/DropBreaks.scala
@@ -6,9 +6,9 @@ import ast.{Trees, tpd}
import core.*
import Decorators.*
import NameKinds.BoundaryName
-import MegaPhase._
-import Types._, Contexts._, Flags._, DenotTransformers._
-import Symbols._, StdNames._, Trees._
+import MegaPhase.*
+import Types.*, Contexts.*, Flags.*, DenotTransformers.*
+import Symbols.*, StdNames.*, Trees.*
import util.Property
import Constants.Constant
import Flags.MethodOrLazy
@@ -41,7 +41,7 @@ object DropBreaks:
class DropBreaks extends MiniPhase:
import DropBreaks.*
- import tpd._
+ import tpd.*
override def phaseName: String = DropBreaks.name
diff --git a/compiler/src/dotty/tools/dotc/transform/DropEmptyCompanions.scala.disabled b/compiler/src/dotty/tools/dotc/transform/DropEmptyCompanions.scala.disabled
index 13adcf5c3f76..1a5cb0dfba47 100644
--- a/compiler/src/dotty/tools/dotc/transform/DropEmptyCompanions.scala.disabled
+++ b/compiler/src/dotty/tools/dotc/transform/DropEmptyCompanions.scala.disabled
@@ -1,17 +1,17 @@
package dotty.tools.dotc
package transform
-import core._
+import core.*
import DenotTransformers.SymTransformer
import Phases.Phase
-import Contexts._
-import Flags._
-import Symbols._
+import Contexts.*
+import Flags.*
+import Symbols.*
import SymDenotations.SymDenotation
-import ast.Trees._
+import ast.Trees.*
import collection.mutable
-import Decorators._
-import NameOps._
+import Decorators.*
+import NameOps.*
import MegaPhase.MiniPhase
import dotty.tools.dotc.transform.MegaPhase.TransformerInfo
@@ -29,7 +29,7 @@ import dotty.tools.dotc.transform.MegaPhase.TransformerInfo
* at their destination.
*/
class DropEmptyCompanions extends MiniPhase { thisTransform =>
- import ast.tpd._
+ import ast.tpd.*
override def phaseName = "dropEmptyCompanions"
override def runsAfter = Set(Flatten.name)
diff --git a/compiler/src/dotty/tools/dotc/transform/DropOuterAccessors.scala b/compiler/src/dotty/tools/dotc/transform/DropOuterAccessors.scala
index a363ccaeb0d0..b3bd1ab8dd26 100644
--- a/compiler/src/dotty/tools/dotc/transform/DropOuterAccessors.scala
+++ b/compiler/src/dotty/tools/dotc/transform/DropOuterAccessors.scala
@@ -1,15 +1,15 @@
package dotty.tools.dotc
package transform
-import core._
+import core.*
import MegaPhase.MiniPhase
-import dotty.tools.dotc.core.Contexts._
-import ast._
-import Flags._
-import Symbols._
-import Contexts._
-import Decorators._
-import DenotTransformers._
+import dotty.tools.dotc.core.Contexts.*
+import ast.*
+import Flags.*
+import Symbols.*
+import Contexts.*
+import Decorators.*
+import DenotTransformers.*
import ExplicitOuter.isOuterParamAccessor
import CountOuterAccesses.mightBeDropped
import collection.mutable
@@ -24,7 +24,7 @@ object DropOuterAccessors:
*/
class DropOuterAccessors extends MiniPhase with IdentityDenotTransformer:
thisPhase =>
- import tpd._
+ import tpd.*
override def phaseName: String = DropOuterAccessors.name
diff --git a/compiler/src/dotty/tools/dotc/transform/ElimByName.scala b/compiler/src/dotty/tools/dotc/transform/ElimByName.scala
index 151e841f0e48..eca3928569f1 100644
--- a/compiler/src/dotty/tools/dotc/transform/ElimByName.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ElimByName.scala
@@ -2,11 +2,11 @@ package dotty.tools
package dotc
package transform
-import core._
-import Contexts._
-import Symbols._
-import Types._
-import Flags._
+import core.*
+import Contexts.*
+import Symbols.*
+import Types.*
+import Flags.*
import SymDenotations.*
import DenotTransformers.InfoTransformer
import NameKinds.SuperArgName
@@ -53,7 +53,7 @@ import dotty.tools.dotc.core.Names.Name
class ElimByName extends MiniPhase, InfoTransformer:
thisPhase =>
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = ElimByName.name
diff --git a/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala b/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala
index 503561915040..0b0906148ba1 100644
--- a/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala
@@ -3,12 +3,12 @@ package dotc
package transform
import ast.{Trees, tpd}
-import core._, core.Decorators._
-import MegaPhase._
-import Types._, Contexts._, Flags._, DenotTransformers._, Phases._
-import Symbols._, StdNames._, Trees._
-import TypeErasure.ErasedValueType, ValueClasses._
-import reporting._
+import core.*, core.Decorators.*
+import MegaPhase.*
+import Types.*, Contexts.*, Flags.*, DenotTransformers.*, Phases.*
+import Symbols.*, StdNames.*, Trees.*
+import TypeErasure.ErasedValueType, ValueClasses.*
+import reporting.*
import NameKinds.SuperAccessorName
object ElimErasedValueType {
@@ -36,7 +36,7 @@ object ElimErasedValueType {
*/
class ElimErasedValueType extends MiniPhase with InfoTransformer { thisPhase =>
- import tpd._
+ import tpd.*
import ElimErasedValueType.elimEVT
override def phaseName: String = ElimErasedValueType.name
diff --git a/compiler/src/dotty/tools/dotc/transform/ElimOpaque.scala b/compiler/src/dotty/tools/dotc/transform/ElimOpaque.scala
index 2f55826ec2a3..0ee8781b6b70 100644
--- a/compiler/src/dotty/tools/dotc/transform/ElimOpaque.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ElimOpaque.scala
@@ -2,16 +2,16 @@ package dotty.tools
package dotc
package transform
-import core._
-import dotty.tools.dotc.transform.MegaPhase._
-import Flags._
-import Types._
-import Contexts._
-import Symbols._
+import core.*
+import dotty.tools.dotc.transform.MegaPhase.*
+import Flags.*
+import Types.*
+import Contexts.*
+import Symbols.*
import Denotations.{SingleDenotation, NonSymSingleDenotation}
import SymDenotations.SymDenotation
-import DenotTransformers._
-import Names._
+import DenotTransformers.*
+import Names.*
object ElimOpaque {
val name: String = "elimOpaque"
@@ -21,7 +21,7 @@ object ElimOpaque {
/** Rewrites opaque type aliases to normal alias types */
class ElimOpaque extends MiniPhase with DenotTransformer {
thisPhase =>
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = ElimOpaque.name
diff --git a/compiler/src/dotty/tools/dotc/transform/ElimOuterSelect.scala b/compiler/src/dotty/tools/dotc/transform/ElimOuterSelect.scala
index 3ddc8b614bae..8527ad26e51b 100644
--- a/compiler/src/dotty/tools/dotc/transform/ElimOuterSelect.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ElimOuterSelect.scala
@@ -1,17 +1,17 @@
package dotty.tools.dotc
package transform
-import core._
+import core.*
import MegaPhase.MiniPhase
-import Contexts._
-import Types._
+import Contexts.*
+import Types.*
import NameKinds.OuterSelectName
/** This phase rewrites outer selects `E.n_` which were introduced by
* inlining to outer paths.
*/
class ElimOuterSelect extends MiniPhase {
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = ElimOuterSelect.name
diff --git a/compiler/src/dotty/tools/dotc/transform/ElimPackagePrefixes.scala b/compiler/src/dotty/tools/dotc/transform/ElimPackagePrefixes.scala
index 83349f1f6199..6c577a872c56 100644
--- a/compiler/src/dotty/tools/dotc/transform/ElimPackagePrefixes.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ElimPackagePrefixes.scala
@@ -1,10 +1,10 @@
package dotty.tools.dotc
package transform
-import core._
-import Decorators._, Flags._, Types._, Contexts._, Symbols._
-import ast.tpd._
-import Flags._
+import core.*
+import Decorators.*, Flags.*, Types.*, Contexts.*, Symbols.*
+import ast.tpd.*
+import Flags.*
import MegaPhase.MiniPhase
/** Eliminates syntactic references to package terms as prefixes of classes, so that there's no chance
diff --git a/compiler/src/dotty/tools/dotc/transform/ElimPolyFunction.scala b/compiler/src/dotty/tools/dotc/transform/ElimPolyFunction.scala
index 756ddd9bf0eb..3ed337ee3a4a 100644
--- a/compiler/src/dotty/tools/dotc/transform/ElimPolyFunction.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ElimPolyFunction.scala
@@ -2,11 +2,11 @@ package dotty.tools.dotc
package transform
import ast.{Trees, tpd}
-import core._, core.Decorators._
-import MegaPhase._, Phases.Phase
-import Types._, Contexts._, Constants._, Names._, NameOps._, Flags._, DenotTransformers._
-import SymDenotations._, Symbols._, StdNames._, Annotations._, Trees._, Scopes._, Denotations._
-import TypeErasure.ErasedValueType, ValueClasses._
+import core.*, core.Decorators.*
+import MegaPhase.*, Phases.Phase
+import Types.*, Contexts.*, Constants.*, Names.*, NameOps.*, Flags.*, DenotTransformers.*
+import SymDenotations.*, Symbols.*, StdNames.*, Annotations.*, Trees.*, Scopes.*, Denotations.*
+import TypeErasure.ErasedValueType, ValueClasses.*
/** This phase rewrite PolyFunction subclasses to FunctionN subclasses
*
@@ -20,7 +20,7 @@ import TypeErasure.ErasedValueType, ValueClasses._
*/
class ElimPolyFunction extends MiniPhase with DenotTransformer {
- import tpd._
+ import tpd.*
override def phaseName: String = ElimPolyFunction.name
diff --git a/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala b/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala
index 359b882ef26b..b98d7d525089 100644
--- a/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala
@@ -2,17 +2,17 @@ package dotty.tools
package dotc
package transform
-import core._
+import core.*
import StdNames.nme
-import Types._
-import transform.MegaPhase._
-import Flags._
-import Contexts._
-import Symbols._
-import Decorators._
-import Denotations._, SymDenotations._
-import DenotTransformers._
-import NullOpsDecorator._
+import Types.*
+import transform.MegaPhase.*
+import Flags.*
+import Contexts.*
+import Symbols.*
+import Decorators.*
+import Denotations.*, SymDenotations.*
+import DenotTransformers.*
+import NullOpsDecorator.*
object ElimRepeated {
val name: String = "elimRepeated"
@@ -24,7 +24,7 @@ object ElimRepeated {
* the transformed type if needed.
*/
class ElimRepeated extends MiniPhase with InfoTransformer { thisPhase =>
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = ElimRepeated.name
diff --git a/compiler/src/dotty/tools/dotc/transform/ElimStaticThis.scala b/compiler/src/dotty/tools/dotc/transform/ElimStaticThis.scala
index 02612253c735..e2940532d463 100644
--- a/compiler/src/dotty/tools/dotc/transform/ElimStaticThis.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ElimStaticThis.scala
@@ -1,9 +1,9 @@
package dotty.tools.dotc
package transform
-import core._
-import Contexts._
-import Flags._
+import core.*
+import Contexts.*
+import Flags.*
import dotty.tools.dotc.ast.tpd
import MegaPhase.MiniPhase
import dotty.tools.dotc.core.Types.{ThisType, TermRef}
@@ -12,7 +12,7 @@ import dotty.tools.dotc.core.Types.{ThisType, TermRef}
* corresponding modules.
*/
class ElimStaticThis extends MiniPhase {
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = ElimStaticThis.name
diff --git a/compiler/src/dotty/tools/dotc/transform/Erasure.scala b/compiler/src/dotty/tools/dotc/transform/Erasure.scala
index 0b52b1725c3e..5f0854d31455 100644
--- a/compiler/src/dotty/tools/dotc/transform/Erasure.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Erasure.scala
@@ -2,39 +2,39 @@ package dotty.tools
package dotc
package transform
-import core.Phases._
-import core.DenotTransformers._
-import core.Denotations._
-import core.SymDenotations._
-import core.Symbols._
-import core.Contexts._
-import core.Types._
-import core.Names._
-import core.StdNames._
-import core.NameOps._
+import core.Phases.*
+import core.DenotTransformers.*
+import core.Denotations.*
+import core.SymDenotations.*
+import core.Symbols.*
+import core.Contexts.*
+import core.Types.*
+import core.Names.*
+import core.StdNames.*
+import core.NameOps.*
import core.NameKinds.{AdaptedClosureName, BodyRetainerName, DirectMethName}
import core.Scopes.newScopeWith
-import core.Decorators._
-import core.Constants._
-import core.Definitions._
+import core.Decorators.*
+import core.Constants.*
+import core.Definitions.*
import core.Annotations.BodyAnnotation
import typer.NoChecking
import inlines.Inlines
-import typer.ProtoTypes._
+import typer.ProtoTypes.*
import typer.ErrorReporting.errorTree
import typer.Checking.checkValue
-import core.TypeErasure._
-import core.Decorators._
+import core.TypeErasure.*
+import core.Decorators.*
import dotty.tools.dotc.ast.{tpd, untpd}
import ast.TreeTypeMap
import dotty.tools.dotc.core.{Constants, Flags}
-import ValueClasses._
-import TypeUtils._
-import ContextFunctionResults._
-import ExplicitOuter._
+import ValueClasses.*
+import TypeUtils.*
+import ContextFunctionResults.*
+import ExplicitOuter.*
import core.Mode
import util.Property
-import reporting._
+import reporting.*
class Erasure extends Phase with DenotTransformer {
@@ -202,8 +202,8 @@ class Erasure extends Phase with DenotTransformer {
}
object Erasure {
- import tpd._
- import TypeTestsCasts._
+ import tpd.*
+ import TypeTestsCasts.*
val name: String = "erasure"
val description: String = "rewrite types to JVM model"
@@ -541,7 +541,7 @@ object Erasure {
end Boxing
class Typer(erasurePhase: DenotTransformer) extends typer.ReTyper with NoChecking {
- import Boxing._
+ import Boxing.*
def isErased(tree: Tree)(using Context): Boolean = tree match {
case TypeApply(Select(qual, _), _) if tree.symbol == defn.Any_typeCast =>
diff --git a/compiler/src/dotty/tools/dotc/transform/EtaReduce.scala b/compiler/src/dotty/tools/dotc/transform/EtaReduce.scala
index cf62cffd4cdb..b8b10d355ede 100644
--- a/compiler/src/dotty/tools/dotc/transform/EtaReduce.scala
+++ b/compiler/src/dotty/tools/dotc/transform/EtaReduce.scala
@@ -28,7 +28,7 @@ import NameKinds.AdaptedClosureName
* to performance degradation, and in some cases, stack overflows.
*/
class EtaReduce extends MiniPhase:
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = EtaReduce.name
diff --git a/compiler/src/dotty/tools/dotc/transform/ExpandPrivate.scala b/compiler/src/dotty/tools/dotc/transform/ExpandPrivate.scala
index 41e5b76ca874..fa2492a261d5 100644
--- a/compiler/src/dotty/tools/dotc/transform/ExpandPrivate.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ExpandPrivate.scala
@@ -1,18 +1,18 @@
package dotty.tools.dotc
package transform
-import core._
+import core.*
import dotty.tools.dotc.core.DenotTransformers.IdentityDenotTransformer
-import Contexts._
-import Symbols._
-import Flags._
-import SymDenotations._
+import Contexts.*
+import Symbols.*
+import Flags.*
+import SymDenotations.*
-import Decorators._
-import MegaPhase._
+import Decorators.*
+import MegaPhase.*
import java.io.File.separatorChar
-import ValueClasses._
+import ValueClasses.*
/** Make private term members that are accessed from another class
* non-private by resetting the Private flag and expanding their name.
@@ -29,7 +29,7 @@ import ValueClasses._
* and https://github.com/lampepfl/dotty/issues/783
*/
class ExpandPrivate extends MiniPhase with IdentityDenotTransformer { thisPhase =>
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = ExpandPrivate.name
diff --git a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala
index 6dae564041ee..4347cca7f9d9 100644
--- a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala
@@ -2,13 +2,13 @@ package dotty.tools
package dotc
package transform
-import core._
+import core.*
import Scopes.newScope
-import Contexts._, Symbols._, Types._, Flags._, Decorators._, StdNames._, Constants._
-import MegaPhase._
+import Contexts.*, Symbols.*, Types.*, Flags.*, Decorators.*, StdNames.*, Constants.*
+import MegaPhase.*
import Names.TypeName
-import SymUtils._
-import NullOpsDecorator._
+import SymUtils.*
+import NullOpsDecorator.*
import ast.untpd
/** Expand SAM closures that cannot be represented by the JVM as lambdas to anonymous classes.
@@ -38,7 +38,7 @@ object ExpandSAMs:
case _ => false
class ExpandSAMs extends MiniPhase:
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = ExpandSAMs.name
diff --git a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala
index deb1f665c022..b197d23f0b94 100644
--- a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala
@@ -2,19 +2,19 @@ package dotty.tools
package dotc
package transform
-import MegaPhase._
-import core.DenotTransformers._
-import core.Symbols._
-import core.Contexts._
-import core.Phases._
-import core.Types._
-import core.Flags._
-import core.Decorators._
+import MegaPhase.*
+import core.DenotTransformers.*
+import core.Symbols.*
+import core.Contexts.*
+import core.Phases.*
+import core.Types.*
+import core.Flags.*
+import core.Decorators.*
import core.StdNames.nme
-import core.Names._
-import core.NameOps._
+import core.Names.*
+import core.NameOps.*
import core.NameKinds.SuperArgName
-import SymUtils._
+import SymUtils.*
import dotty.tools.dotc.ast.tpd
import collection.mutable
@@ -35,8 +35,8 @@ import scala.annotation.tailrec
* needs to run after pattern matcher as it can add outer checks and force creation of $outer
*/
class ExplicitOuter extends MiniPhase with InfoTransformer { thisPhase =>
- import ExplicitOuter._
- import ast.tpd._
+ import ExplicitOuter.*
+ import ast.tpd.*
override def phaseName: String = ExplicitOuter.name
@@ -122,7 +122,7 @@ class ExplicitOuter extends MiniPhase with InfoTransformer { thisPhase =>
}
object ExplicitOuter {
- import ast.tpd._
+ import ast.tpd.*
val name: String = "explicitOuter"
val description: String = "add accessors to outer classes from nested ones"
diff --git a/compiler/src/dotty/tools/dotc/transform/ExplicitSelf.scala b/compiler/src/dotty/tools/dotc/transform/ExplicitSelf.scala
index a6f7a29accd7..cd62a55cb8dc 100644
--- a/compiler/src/dotty/tools/dotc/transform/ExplicitSelf.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ExplicitSelf.scala
@@ -1,8 +1,8 @@
package dotty.tools.dotc
package transform
-import core._
-import Contexts._, Types._, MegaPhase._, ast.Trees._, Symbols._, Decorators._, Flags._
+import core.*
+import Contexts.*, Types.*, MegaPhase.*, ast.Trees.*, Symbols.*, Decorators.*, Flags.*
import SymUtils.*
/** Transform references of the form
@@ -20,7 +20,7 @@ import SymUtils.*
* Also replaces idents referring to the self type with ThisTypes.
*/
class ExplicitSelf extends MiniPhase {
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = ExplicitSelf.name
diff --git a/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala b/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala
index 19124357a0bd..8b3a783745fb 100644
--- a/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala
@@ -5,17 +5,17 @@
package dotty.tools.dotc
package transform
-import dotty.tools.dotc.transform.MegaPhase._
-import ValueClasses._
+import dotty.tools.dotc.transform.MegaPhase.*
+import ValueClasses.*
import dotty.tools.dotc.ast.tpd
import scala.collection.mutable
-import core._
-import Types._, Contexts._, Names._, Flags._, DenotTransformers._, Phases._
-import SymDenotations._, Symbols._, StdNames._, Denotations._
+import core.*
+import Types.*, Contexts.*, Names.*, Flags.*, DenotTransformers.*, Phases.*
+import SymDenotations.*, Symbols.*, StdNames.*, Denotations.*
import TypeErasure.{ valueErasure, ErasedValueType }
import NameKinds.{ExtMethName, BodyRetainerName}
-import Decorators._
-import TypeUtils._
+import Decorators.*
+import TypeUtils.*
/**
* Perform Step 1 in the inline classes SIP: Creates extension methods for all
@@ -38,8 +38,8 @@ import TypeUtils._
*/
class ExtensionMethods extends MiniPhase with DenotTransformer with FullParameterization { thisPhase =>
- import tpd._
- import ExtensionMethods._
+ import tpd.*
+ import ExtensionMethods.*
override def phaseName: String = ExtensionMethods.name
diff --git a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala
index 03639c8af689..ce14d3d3c457 100644
--- a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala
+++ b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala
@@ -1,21 +1,21 @@
package dotty.tools.dotc
package transform
-import core._
-import Names._
-import dotty.tools.dotc.transform.MegaPhase._
+import core.*
+import Names.*
+import dotty.tools.dotc.transform.MegaPhase.*
import ast.untpd
-import Flags._
-import Types._
+import Flags.*
+import Types.*
import Constants.Constant
-import Contexts._
-import Symbols._
-import Decorators._
+import Contexts.*
+import Symbols.*
+import Decorators.*
import scala.collection.mutable
-import DenotTransformers._
-import NameOps._
+import DenotTransformers.*
+import NameOps.*
import NameKinds.OuterSelectName
-import StdNames._
+import StdNames.*
import TypeUtils.isErasedValueType
import config.Feature
import inlines.Inlines.inInlineMethod
@@ -37,7 +37,7 @@ object FirstTransform {
* if (false) A else B ==> B
*/
class FirstTransform extends MiniPhase with InfoTransformer { thisPhase =>
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = FirstTransform.name
diff --git a/compiler/src/dotty/tools/dotc/transform/Flatten.scala b/compiler/src/dotty/tools/dotc/transform/Flatten.scala
index 25df51d0916d..31c31a0f16ed 100644
--- a/compiler/src/dotty/tools/dotc/transform/Flatten.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Flatten.scala
@@ -1,18 +1,20 @@
package dotty.tools.dotc
package transform
-import core._
+import core.*
import DenotTransformers.SymTransformer
-import Contexts._
-import Flags._
+import Contexts.*
+import Flags.*
import SymDenotations.SymDenotation
import collection.mutable
import MegaPhase.MiniPhase
import util.Store
+import scala.compiletime.uninitialized
+
/** Lift nested classes to toplevel */
class Flatten extends MiniPhase with SymTransformer {
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = Flatten.name
@@ -24,7 +26,7 @@ class Flatten extends MiniPhase with SymTransformer {
override def changesMembers: Boolean = true // the phase removes inner classes
- private var LiftedDefs: Store.Location[mutable.ListBuffer[Tree] | Null] = _
+ private var LiftedDefs: Store.Location[mutable.ListBuffer[Tree] | Null] = uninitialized
private def liftedDefs(using Context) = ctx.store(LiftedDefs)
override def initContext(ctx: FreshContext): Unit =
diff --git a/compiler/src/dotty/tools/dotc/transform/ForwardDepChecks.scala b/compiler/src/dotty/tools/dotc/transform/ForwardDepChecks.scala
index bf8a6fa6c7bf..afe78ce1296d 100644
--- a/compiler/src/dotty/tools/dotc/transform/ForwardDepChecks.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ForwardDepChecks.scala
@@ -10,6 +10,8 @@ import collection.immutable
import ast.tpd
import MegaPhase.MiniPhase
+import scala.compiletime.uninitialized
+
object ForwardDepChecks:
import tpd.*
@@ -37,8 +39,8 @@ object ForwardDepChecks:
(m1, idx + 1)
}._1
var maxIndex: Int = Int.MinValue
- var refSpan: Span = _
- var refSym: Symbol = _
+ var refSpan: Span = uninitialized
+ var refSym: Symbol = uninitialized
override def enterReference(sym: Symbol, span: Span): Unit =
if (sym.exists && sym.owner.isTerm)
@@ -63,7 +65,7 @@ class ForwardDepChecks extends MiniPhase:
override def runsAfter: Set[String] = Set(ElimByName.name)
- private var LevelInfo: Store.Location[OptLevelInfo] = _
+ private var LevelInfo: Store.Location[OptLevelInfo] = uninitialized
private def currentLevel(using Context): OptLevelInfo = ctx.store(LevelInfo)
override def initContext(ctx: FreshContext): Unit =
diff --git a/compiler/src/dotty/tools/dotc/transform/FullParameterization.scala b/compiler/src/dotty/tools/dotc/transform/FullParameterization.scala
index 8ca600577244..7258b532d87a 100644
--- a/compiler/src/dotty/tools/dotc/transform/FullParameterization.scala
+++ b/compiler/src/dotty/tools/dotc/transform/FullParameterization.scala
@@ -1,14 +1,14 @@
package dotty.tools.dotc
package transform
-import core._
-import Types._
-import Contexts._
-import Symbols._
-import Decorators._
-import TypeUtils._
+import core.*
+import Types.*
+import Contexts.*
+import Symbols.*
+import Decorators.*
+import TypeUtils.*
import StdNames.nme
-import ast._
+import ast.*
/** Provides methods to produce fully parameterized versions of instance methods,
* where the `this` of the enclosing class is abstracted out in an extra leading
@@ -49,7 +49,7 @@ import ast._
*/
trait FullParameterization {
- import tpd._
+ import tpd.*
/** If references to original symbol `referenced` from within fully parameterized method
* `derived` should be rewired to some fully parameterized method, the rewiring target symbol,
@@ -207,7 +207,7 @@ trait FullParameterization {
.subst(origLeadingTypeParamSyms ++ origOtherParamSyms, (trefs ++ argRefs).tpes)
.substThisUnlessStatic(origClass, thisRef.tpe),
treeMap = {
- case tree: This if tree.symbol == origClass => thisRef
+ case tree: This if tree.symbol == origClass => thisRef.withSpan(tree.span)
case tree => rewireTree(tree, Nil) orElse tree
},
oldOwners = origMeth :: Nil,
diff --git a/compiler/src/dotty/tools/dotc/transform/FunctionXXLForwarders.scala b/compiler/src/dotty/tools/dotc/transform/FunctionXXLForwarders.scala
index cc1c0048b68f..4cf176cfda3a 100644
--- a/compiler/src/dotty/tools/dotc/transform/FunctionXXLForwarders.scala
+++ b/compiler/src/dotty/tools/dotc/transform/FunctionXXLForwarders.scala
@@ -1,16 +1,16 @@
package dotty.tools.dotc
package transform
-import core._
+import core.*
import Constants.Constant
-import Contexts._
-import Flags._
-import Definitions._
-import DenotTransformers._
-import StdNames._
-import Symbols._
-import MegaPhase._
-import Types._
+import Contexts.*
+import Flags.*
+import Definitions.*
+import DenotTransformers.*
+import StdNames.*
+import Symbols.*
+import MegaPhase.*
+import Types.*
/** This phase adds forwarder for XXL functions `apply` methods that are implemented with a method
@@ -23,7 +23,7 @@ import Types._
* is generated.
*/
class FunctionXXLForwarders extends MiniPhase with IdentityDenotTransformer {
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = FunctionXXLForwarders.name
diff --git a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala
index 21c212e2a28a..77f4f76c33ba 100644
--- a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala
+++ b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala
@@ -2,20 +2,20 @@ package dotty.tools
package dotc
package transform
-import core.Annotations._
-import core.Contexts._
-import core.Phases._
+import core.Annotations.*
+import core.Contexts.*
+import core.Phases.*
import core.Decorators.*
import core.Definitions
-import core.Flags._
+import core.Flags.*
import core.Names.Name
-import core.Symbols._
+import core.Symbols.*
import core.TypeApplications.{EtaExpansion, TypeParamInfo}
import core.TypeErasure.{erasedGlb, erasure, fullErasure, isGenericArrayElement, tupleArity}
-import core.Types._
+import core.Types.*
import core.classfile.ClassfileConstants
-import SymUtils._
-import TypeUtils._
+import SymUtils.*
+import TypeUtils.*
import config.Printers.transforms
import reporting.trace
import java.lang.StringBuilder
diff --git a/compiler/src/dotty/tools/dotc/transform/Getters.scala b/compiler/src/dotty/tools/dotc/transform/Getters.scala
index ad06bfb0a504..eeb2e868ddc8 100644
--- a/compiler/src/dotty/tools/dotc/transform/Getters.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Getters.scala
@@ -1,17 +1,17 @@
package dotty.tools.dotc
package transform
-import core._
+import core.*
import DenotTransformers.SymTransformer
-import Contexts._
+import Contexts.*
import SymDenotations.SymDenotation
-import Types._
-import Symbols._
-import MegaPhase._
-import Flags._
-import ValueClasses._
-import SymUtils._
-import NameOps._
+import Types.*
+import Symbols.*
+import MegaPhase.*
+import Flags.*
+import ValueClasses.*
+import SymUtils.*
+import NameOps.*
/** Performs the following rewritings for fields of a class:
@@ -57,7 +57,7 @@ import NameOps._
* This allows subsequent code motions in Flatten.
*/
class Getters extends MiniPhase with SymTransformer { thisPhase =>
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = Getters.name
diff --git a/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala b/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala
index 9a36d65babe8..190150ca8a81 100644
--- a/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala
+++ b/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala
@@ -1,18 +1,18 @@
package dotty.tools.dotc
package transform
-import MegaPhase._
-import core.DenotTransformers._
-import core.Symbols._
-import core.Contexts._
+import MegaPhase.*
+import core.DenotTransformers.*
+import core.Symbols.*
+import core.Contexts.*
import ast.TreeTypeMap
-import core.Types._
-import core.Flags._
-import core.Decorators._
+import core.Types.*
+import core.Flags.*
+import core.Decorators.*
import collection.mutable
-import ast.Trees._
+import ast.Trees.*
import core.NameKinds.SuperArgName
-import SymUtils._
+import SymUtils.*
import core.Decorators.*
object HoistSuperArgs {
@@ -43,7 +43,7 @@ object HoistSuperArgs {
* or, if that is a package, it is made a static method of the class itself.
*/
class HoistSuperArgs extends MiniPhase with IdentityDenotTransformer { thisPhase =>
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = HoistSuperArgs.name
diff --git a/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala b/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala
index 798f34757b35..18333ae506fd 100644
--- a/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala
+++ b/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala
@@ -2,11 +2,11 @@ package dotty.tools
package dotc
package transform
-import core._
-import MegaPhase._
-import Symbols._, Contexts._, Types._, Decorators._
-import NameOps._
-import Names._
+import core.*
+import MegaPhase.*
+import Symbols.*, Contexts.*, Types.*, Decorators.*
+import NameOps.*
+import Names.*
import scala.collection.mutable.ListBuffer
@@ -26,7 +26,7 @@ import scala.collection.mutable.ListBuffer
* This removes placeholders added by inline `unapply`/`unapplySeq` patterns.
*/
class InlinePatterns extends MiniPhase:
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = InlinePatterns.name
diff --git a/compiler/src/dotty/tools/dotc/transform/InlineVals.scala b/compiler/src/dotty/tools/dotc/transform/InlineVals.scala
index 047a187bad68..cff1632ffcd2 100644
--- a/compiler/src/dotty/tools/dotc/transform/InlineVals.scala
+++ b/compiler/src/dotty/tools/dotc/transform/InlineVals.scala
@@ -2,17 +2,17 @@ package dotty.tools
package dotc
package transform
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Decorators._
-import dotty.tools.dotc.core.Symbols._
-import dotty.tools.dotc.core.Flags._
-import dotty.tools.dotc.core.Types._
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Decorators.*
+import dotty.tools.dotc.core.Symbols.*
+import dotty.tools.dotc.core.Flags.*
+import dotty.tools.dotc.core.Types.*
import dotty.tools.dotc.transform.MegaPhase.MiniPhase
import dotty.tools.dotc.inlines.Inlines
/** Check that `tree.rhs` can be right hand-side of an `inline` value definition. */
class InlineVals extends MiniPhase:
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = InlineVals.name
diff --git a/compiler/src/dotty/tools/dotc/transform/Inlining.scala b/compiler/src/dotty/tools/dotc/transform/Inlining.scala
index e36061a79820..a51ba93ab9ac 100644
--- a/compiler/src/dotty/tools/dotc/transform/Inlining.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Inlining.scala
@@ -1,14 +1,14 @@
package dotty.tools.dotc
package transform
-import core._
-import Flags._
-import Contexts._
-import Symbols._
-import SymUtils._
+import core.*
+import Flags.*
+import Contexts.*
+import Symbols.*
+import SymUtils.*
import dotty.tools.dotc.ast.tpd
-import dotty.tools.dotc.ast.Trees._
-import dotty.tools.dotc.quoted._
+import dotty.tools.dotc.ast.Trees.*
+import dotty.tools.dotc.quoted.*
import dotty.tools.dotc.inlines.Inlines
import dotty.tools.dotc.ast.TreeMapWithImplicits
import dotty.tools.dotc.core.DenotTransformers.IdentityDenotTransformer
@@ -20,7 +20,7 @@ import scala.collection.mutable.ListBuffer
class Inlining extends MacroTransform, IdentityDenotTransformer {
self =>
- import tpd._
+ import tpd.*
override def phaseName: String = Inlining.name
diff --git a/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala b/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala
index 046147f20d82..9f99e7a6fbd3 100644
--- a/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala
@@ -2,23 +2,25 @@ package dotty.tools
package dotc
package transform
-import core._
-import Contexts._
-import Symbols._
-import Flags._
-
-import Decorators._
-import MegaPhase._
-import Names._
+import core.*
+import Contexts.*
+import Symbols.*
+import Flags.*
+
+import Decorators.*
+import MegaPhase.*
+import Names.*
import Constants.Constant
+import scala.compiletime.uninitialized
+
/** The phase is enabled if the -Yinstrument option is set.
* If enabled, it counts the number of closures or allocations for each source position.
* It does this by generating a call to dotty.tools.dotc.util.Stats.doRecord.
*/
class Instrumentation extends MiniPhase { thisPhase =>
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = Instrumentation.name
@@ -40,11 +42,11 @@ class Instrumentation extends MiniPhase { thisPhase =>
"::", "+=", "toString", "newArray", "box", "toCharArray", "termName", "typeName",
"slice", "staticRef", "requiredClass")
- private var namesToRecord: Set[Name] = _
- private var collectionNamesToRecord: Set[Name] = _
- private var Stats_doRecord: Symbol = _
- private var Stats_doRecordSize: Symbol = _
- private var CollectionIterableClass: ClassSymbol = _
+ private var namesToRecord: Set[Name] = uninitialized
+ private var collectionNamesToRecord: Set[Name] = uninitialized
+ private var Stats_doRecord: Symbol = uninitialized
+ private var Stats_doRecordSize: Symbol = uninitialized
+ private var CollectionIterableClass: ClassSymbol = uninitialized
override def prepareForUnit(tree: Tree)(using Context): Context =
namesToRecord = namesOfInterest.map(_.toTermName).toSet
diff --git a/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala b/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala
index c95500d856be..c2fdccc2861e 100644
--- a/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala
+++ b/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala
@@ -3,11 +3,11 @@ package transform
import dotty.tools.dotc.ast.tpd
import dotty.tools.dotc.core.Constants.Constant
-import dotty.tools.dotc.core.Contexts._
+import dotty.tools.dotc.core.Contexts.*
import dotty.tools.dotc.core.Names.TermName
-import dotty.tools.dotc.core.StdNames._
-import dotty.tools.dotc.core.Symbols._
-import dotty.tools.dotc.core.Types._
+import dotty.tools.dotc.core.StdNames.*
+import dotty.tools.dotc.core.Symbols.*
+import dotty.tools.dotc.core.Types.*
import dotty.tools.dotc.transform.MegaPhase.MiniPhase
object InterceptedMethods {
@@ -23,7 +23,7 @@ object InterceptedMethods {
* using the most precise overload available
*/
class InterceptedMethods extends MiniPhase {
- import tpd._
+ import tpd.*
override def phaseName: String = InterceptedMethods.name
diff --git a/compiler/src/dotty/tools/dotc/transform/IsInstanceOfEvaluator.scala.disabled b/compiler/src/dotty/tools/dotc/transform/IsInstanceOfEvaluator.scala.disabled
index 68b493a0b9db..f6df2be19dc4 100644
--- a/compiler/src/dotty/tools/dotc/transform/IsInstanceOfEvaluator.scala.disabled
+++ b/compiler/src/dotty/tools/dotc/transform/IsInstanceOfEvaluator.scala.disabled
@@ -1,11 +1,11 @@
package dotty.tools.dotc
package transform
-import dotty.tools.dotc.util.Positions._
+import dotty.tools.dotc.util.Positions.*
import MegaPhase.MiniPhase
-import core._
-import Contexts._, Types._, Constants._, Decorators._, Symbols._
-import TypeUtils._, TypeErasure._, Flags._
+import core.*
+import Contexts.*, Types.*, Constants.*, Decorators.*, Symbols.*
+import TypeUtils.*, TypeErasure.*, Flags.*
/** Implements partial evaluation of `sc.isInstanceOf[Sel]` according to:
*
@@ -31,7 +31,7 @@ import TypeUtils._, TypeErasure._, Flags._
*/
class IsInstanceOfEvaluator extends MiniPhase {
- import dotty.tools.dotc.ast.tpd._
+ import dotty.tools.dotc.ast.tpd.*
val phaseName = "isInstanceOfEvaluator"
diff --git a/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala b/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala
index 6ec0f330efff..84f90e289e43 100644
--- a/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala
+++ b/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala
@@ -1,25 +1,27 @@
package dotty.tools.dotc
package transform
-import MegaPhase._
+import MegaPhase.*
import core.Denotations.NonSymSingleDenotation
-import core.DenotTransformers._
-import core.Symbols._
-import core.Contexts._
-import core.Types._
-import core.Flags._
-import core.Decorators._
+import core.DenotTransformers.*
+import core.Symbols.*
+import core.Contexts.*
+import core.Types.*
+import core.Flags.*
+import core.Decorators.*
import core.StdNames.nme
-import core.Names._
-import core.NameOps._
+import core.Names.*
+import core.NameOps.*
import core.NameKinds.ExpandPrefixName
-import SymUtils._
+import SymUtils.*
import ExplicitOuter.outer
import util.Store
import collection.mutable.{HashMap, LinkedHashMap, ListBuffer}
+import scala.compiletime.uninitialized
+
object LambdaLift:
- import ast.tpd._
+ import ast.tpd.*
val name: String = "lambdaLift"
val description: String = "lifts out nested functions to class scope"
@@ -249,8 +251,8 @@ end LambdaLift
* }
*/
class LambdaLift extends MiniPhase with IdentityDenotTransformer { thisPhase =>
- import LambdaLift._
- import ast.tpd._
+ import LambdaLift.*
+ import ast.tpd.*
override def phaseName: String = LambdaLift.name
@@ -266,7 +268,7 @@ class LambdaLift extends MiniPhase with IdentityDenotTransformer { thisPhase =>
// lambda lift for super calls right. Witness the implementation restrictions to
// this effect in scalac.
- private var Lifter: Store.Location[Lifter] = _
+ private var Lifter: Store.Location[Lifter] = uninitialized
private def lifter(using Context) = ctx.store(Lifter)
override def initContext(ctx: FreshContext): Unit =
diff --git a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala
index b433e37e39c0..0aaecd261387 100644
--- a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala
+++ b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala
@@ -19,10 +19,11 @@ import transform.MegaPhase.MiniPhase
import transform.SymUtils.*
import scala.collection.mutable
+import scala.compiletime.uninitialized
class LazyVals extends MiniPhase with IdentityDenotTransformer {
- import LazyVals._
- import tpd._
+ import LazyVals.*
+ import tpd.*
/**
* The map contains the list of the offset trees.
@@ -47,7 +48,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer {
val containerFlagsMask: FlagSet = Method | Lazy | Accessor | Module
/** A map of lazy values to the fields they should null after initialization. */
- private var lazyValNullables: IdentityHashMap[Symbol, mutable.ListBuffer[Symbol]] | Null = _
+ private var lazyValNullables: IdentityHashMap[Symbol, mutable.ListBuffer[Symbol]] | Null = uninitialized
private def nullableFor(sym: Symbol)(using Context) = {
// optimisation: value only used once, we can remove the value from the map
val nullables = lazyValNullables.nn.remove(sym)
@@ -455,8 +456,8 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer {
}
def transformMemberDefThreadSafeNew(x: ValOrDefDef)(using Context): Thicket = {
- import dotty.tools.dotc.core.Types._
- import dotty.tools.dotc.core.Flags._
+ import dotty.tools.dotc.core.Types.*
+ import dotty.tools.dotc.core.Flags.*
val claz = x.symbol.owner.asClass
val thizClass = Literal(Constant(claz.info))
diff --git a/compiler/src/dotty/tools/dotc/transform/LetOverApply.scala b/compiler/src/dotty/tools/dotc/transform/LetOverApply.scala
index e7ff6d10c222..302001347d67 100644
--- a/compiler/src/dotty/tools/dotc/transform/LetOverApply.scala
+++ b/compiler/src/dotty/tools/dotc/transform/LetOverApply.scala
@@ -2,9 +2,9 @@ package dotty.tools
package dotc
package transform
-import core._
-import Contexts._, Symbols._, Decorators._
-import MegaPhase._
+import core.*
+import Contexts.*, Symbols.*, Decorators.*
+import MegaPhase.*
/** Rewrite `{ stats; expr}.f(args)` to `{ stats; expr.f(args) }` and
* `{ stats; expr }(args)` to `{ stats; expr(args) }` before proceeding,
@@ -12,7 +12,7 @@ import MegaPhase._
* collapse applies of IFTs (this is done in Erasure).
*/
class LetOverApply extends MiniPhase:
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = LetOverApply.name
diff --git a/compiler/src/dotty/tools/dotc/transform/Literalize.scala.disabled b/compiler/src/dotty/tools/dotc/transform/Literalize.scala.disabled
index 626cb9687df4..b9e6efe1b06b 100644
--- a/compiler/src/dotty/tools/dotc/transform/Literalize.scala.disabled
+++ b/compiler/src/dotty/tools/dotc/transform/Literalize.scala.disabled
@@ -1,17 +1,17 @@
package dotty.tools.dotc
package transform
-import MegaPhase._
-import core.DenotTransformers._
-import core.Symbols._
-import core.Contexts._
-import core.Types._
-import core.Flags._
-import core.Decorators._
+import MegaPhase.*
+import core.DenotTransformers.*
+import core.Symbols.*
+import core.Contexts.*
+import core.Types.*
+import core.Flags.*
+import core.Decorators.*
import core.StdNames.nme
-import ast.Trees._
+import ast.Trees.*
import dotty.tools.dotc.ast.tpd
-import dotty.tools.dotc.core.Constants._
+import dotty.tools.dotc.core.Constants.*
/** This phase rewrites idempotent expressions with constant types to Literals.
* The constant types are eliminated by erasure, so we need to keep
@@ -21,7 +21,7 @@ import dotty.tools.dotc.core.Constants._
* in the type of the literal.
*/
class Literalize extends MiniPhase { thisTransform =>
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = "literalize"
diff --git a/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala b/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala
index 464f73fc486c..7e5f5dbace72 100644
--- a/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala
+++ b/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala
@@ -13,7 +13,7 @@ import dotty.tools.dotc.core.DenotTransformers.IdentityDenotTransformer
import dotty.tools.dotc.core.Flags.*
import dotty.tools.dotc.core.MacroClassLoader
import dotty.tools.dotc.core.Symbols.*
-import dotty.tools.dotc.core.Types._
+import dotty.tools.dotc.core.Types.*
import dotty.tools.dotc.quoted.*
import dotty.tools.dotc.util.SrcPos
import scala.quoted.runtime.impl.{QuotesImpl, SpliceScope}
diff --git a/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala b/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala
index 7bb7ed365ebe..887a962f7a65 100644
--- a/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala
+++ b/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala
@@ -1,17 +1,17 @@
package dotty.tools.dotc
package transform
-import core._
-import Phases._
-import ast.Trees._
-import Contexts._
+import core.*
+import Phases.*
+import ast.Trees.*
+import Contexts.*
/** A base class for transforms.
* A transform contains a compiler phase which applies a tree transformer.
*/
abstract class MacroTransform extends Phase {
- import ast.tpd._
+ import ast.tpd.*
override def run(using Context): Unit = {
val unit = ctx.compilationUnit
diff --git a/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala b/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala
index fe70a1659036..252babe7058f 100644
--- a/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala
+++ b/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala
@@ -2,8 +2,10 @@ package dotty.tools
package dotc
package transform
-import core._
-import Contexts._, Phases._, Symbols._, Decorators._
+import scala.compiletime.uninitialized
+
+import core.*
+import Contexts.*, Phases.*, Symbols.*, Decorators.*
import Flags.PackageVal
import staging.StagingLevel.*
@@ -14,7 +16,7 @@ import staging.StagingLevel.*
* is described in his thesis.
*/
object MegaPhase {
- import ast.tpd._
+ import ast.tpd.*
/** The base class of tree transforms. For each kind of tree K, there are
* two methods which can be overridden:
@@ -31,8 +33,8 @@ object MegaPhase {
*/
abstract class MiniPhase extends Phase {
- private[MegaPhase] var superPhase: MegaPhase = _
- private[MegaPhase] var idxInGroup: Int = _
+ private[MegaPhase] var superPhase: MegaPhase = uninitialized
+ private[MegaPhase] var idxInGroup: Int = uninitialized
/** List of names of phases that should have finished their processing of all compilation units
* before this phase starts
@@ -136,10 +138,10 @@ object MegaPhase {
singletonGroup.run
}
}
-import MegaPhase._
+import MegaPhase.*
class MegaPhase(val miniPhases: Array[MiniPhase]) extends Phase {
- import ast.tpd._
+ import ast.tpd.*
override val phaseName: String =
if (miniPhases.length == 1) miniPhases(0).phaseName
@@ -151,7 +153,7 @@ class MegaPhase(val miniPhases: Array[MiniPhase]) extends Phase {
else
s"MegaPhase{${miniPhases.head.phaseName},...,${miniPhases.last.phaseName}}"
- private var relaxedTypingCache: Boolean = _
+ private var relaxedTypingCache: Boolean = uninitialized
private var relaxedTypingKnown = false
override final def relaxedTyping: Boolean = {
diff --git a/compiler/src/dotty/tools/dotc/transform/Memoize.scala b/compiler/src/dotty/tools/dotc/transform/Memoize.scala
index af6533cfc17f..120f2f66cd80 100644
--- a/compiler/src/dotty/tools/dotc/transform/Memoize.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Memoize.scala
@@ -1,24 +1,25 @@
package dotty.tools.dotc
package transform
-import core._
-import DenotTransformers._
-import Contexts._
+import core.*
+import DenotTransformers.*
+import Contexts.*
import Phases.*
import SymDenotations.SymDenotation
-import Denotations._
-import Symbols._
-import SymUtils._
-import Constants._
-import MegaPhase._
-import NameOps._
-import Flags._
-import Decorators._
+import Denotations.*
+import Symbols.*
+import SymUtils.*
+import Constants.*
+import MegaPhase.*
+import NameOps.*
+import Flags.*
+import Decorators.*
import StdNames.nme
-import sjs.JSSymUtils._
+import sjs.JSSymUtils.*
import util.Store
+import scala.compiletime.uninitialized
object Memoize {
val name: String = "memoize"
@@ -46,13 +47,13 @@ object Memoize {
*/
class Memoize extends MiniPhase with IdentityDenotTransformer { thisPhase =>
import Memoize.MyState
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = Memoize.name
override def description: String = Memoize.description
- private var MyState: Store.Location[MyState] = _
+ private var MyState: Store.Location[MyState] = uninitialized
private def myState(using Context): MyState = ctx.store(MyState)
override def initContext(ctx: FreshContext): Unit =
diff --git a/compiler/src/dotty/tools/dotc/transform/Mixin.scala b/compiler/src/dotty/tools/dotc/transform/Mixin.scala
index 5ca09dd6188f..33864a33a047 100644
--- a/compiler/src/dotty/tools/dotc/transform/Mixin.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Mixin.scala
@@ -2,21 +2,21 @@ package dotty.tools
package dotc
package transform
-import core._
-import MegaPhase._
-import Contexts._
-import Flags._
-import SymUtils._
-import Symbols._
-import SymDenotations._
-import Types._
-import Decorators._
-import DenotTransformers._
-import StdNames._
-import Names._
-import NameKinds._
-import NameOps._
-import ast.Trees._
+import core.*
+import MegaPhase.*
+import Contexts.*
+import Flags.*
+import SymUtils.*
+import Symbols.*
+import SymDenotations.*
+import Types.*
+import Decorators.*
+import DenotTransformers.*
+import StdNames.*
+import Names.*
+import NameKinds.*
+import NameOps.*
+import ast.Trees.*
object Mixin {
val name: String = "mixin"
@@ -111,7 +111,7 @@ object Mixin {
* are symbolic.
*/
class Mixin extends MiniPhase with SymTransformer { thisPhase =>
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = Mixin.name
@@ -184,7 +184,7 @@ class Mixin extends MiniPhase with SymTransformer { thisPhase =>
override def transformTemplate(impl: Template)(using Context): Template = {
val cls = impl.symbol.owner.asClass
val ops = new MixinOps(cls, thisPhase)
- import ops._
+ import ops.*
def traitDefs(stats: List[Tree]): List[Tree] = {
stats.flatMap {
diff --git a/compiler/src/dotty/tools/dotc/transform/MixinOps.scala b/compiler/src/dotty/tools/dotc/transform/MixinOps.scala
index fa1c09806893..d40a2a7eb17e 100644
--- a/compiler/src/dotty/tools/dotc/transform/MixinOps.scala
+++ b/compiler/src/dotty/tools/dotc/transform/MixinOps.scala
@@ -1,15 +1,15 @@
package dotty.tools.dotc
package transform
-import core._
-import Symbols._, Types._, Contexts._, DenotTransformers._, Flags._
-import util.Spans._
-import SymUtils._
-import StdNames._, NameOps._
+import core.*
+import Symbols.*, Types.*, Contexts.*, DenotTransformers.*, Flags.*
+import util.Spans.*
+import SymUtils.*
+import StdNames.*, NameOps.*
import typer.Nullables
class MixinOps(cls: ClassSymbol, thisPhase: DenotTransformer)(using Context) {
- import ast.tpd._
+ import ast.tpd.*
val superCls: Symbol = cls.superClass
val mixins: List[ClassSymbol] = cls.mixins
diff --git a/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala b/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala
index db96aeefe231..a417d41ffd56 100644
--- a/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala
+++ b/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala
@@ -1,26 +1,26 @@
package dotty.tools.dotc
package transform
-import core._
-import Flags._
-import Contexts._
-import Symbols._
+import core.*
+import Flags.*
+import Contexts.*
+import Symbols.*
import DenotTransformers.SymTransformer
import Types.MethodType
import Annotations.Annotation
import SymDenotations.SymDenotation
import Names.Name
import StdNames.nme
-import NameOps._
+import NameOps.*
-import ast._
+import ast.*
-import SymUtils._
-import MegaPhase._
+import SymUtils.*
+import MegaPhase.*
/** Move static methods from companion to the class itself */
class MoveStatics extends MiniPhase with SymTransformer {
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = MoveStatics.name
diff --git a/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala b/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala
index a75d6da9dd6a..4bdcc8d9606d 100644
--- a/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala
+++ b/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala
@@ -1,15 +1,15 @@
package dotty.tools.dotc
package transform
-import core._
-import Contexts._, Symbols._, Types._, Flags._, StdNames._
-import MegaPhase._
+import core.*
+import Contexts.*, Symbols.*, Types.*, Flags.*, StdNames.*
+import MegaPhase.*
import NameKinds.NonLocalReturnKeyName
import config.SourceVersion.*
import Decorators.em
object NonLocalReturns {
- import ast.tpd._
+ import ast.tpd.*
val name: String = "nonLocalReturns"
val description: String = "expand non-local returns"
@@ -26,8 +26,8 @@ class NonLocalReturns extends MiniPhase {
override def description: String = NonLocalReturns.description
- import NonLocalReturns._
- import ast.tpd._
+ import NonLocalReturns.*
+ import ast.tpd.*
override def runsAfter: Set[String] = Set(ElimByName.name)
diff --git a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala
index 0e38e9c074cd..4020291dded0 100644
--- a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala
+++ b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala
@@ -2,14 +2,16 @@ package dotty.tools
package dotc
package transform
-import core._
-import Flags._, Symbols._, Contexts._, Scopes._, Decorators._, Types.Type
+import core.*
+import Flags.*, Symbols.*, Contexts.*, Scopes.*, Decorators.*, Types.Type
import NameKinds.DefaultGetterName
-import NullOpsDecorator._
+import NullOpsDecorator.*
import collection.immutable.BitSet
import scala.annotation.tailrec
import cc.isCaptureChecking
+import scala.compiletime.uninitialized
+
/** A module that can produce a kind of iterator (`Cursor`),
* which yields all pairs of overriding/overridden symbols
* that are visible in some baseclass, unless there's a parent class
@@ -118,10 +120,10 @@ object OverridingPairs:
private var nextEntry = curEntry
/** The current candidate symbol for overriding */
- var overriding: Symbol = _
+ var overriding: Symbol = uninitialized
/** If not null: The symbol overridden by overriding */
- var overridden: Symbol = _
+ var overridden: Symbol = uninitialized
//@M: note that next is called once during object initialization
final def hasNext: Boolean = nextEntry != null
diff --git a/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala b/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala
index 8c93ffb90232..82ba3b7a1b7f 100644
--- a/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala
@@ -2,10 +2,10 @@ package dotty.tools
package dotc
package transform
-import core._
-import Contexts._, Types._, Symbols._, Flags._, TypeUtils._, DenotTransformers._, StdNames._
-import Decorators._
-import MegaPhase._
+import core.*
+import Contexts.*, Types.*, Symbols.*, Flags.*, TypeUtils.*, DenotTransformers.*, StdNames.*
+import Decorators.*
+import MegaPhase.*
import NameKinds.ParamAccessorName
/** For all private parameter accessors
diff --git a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala
index a648a419d594..8f5eec693609 100644
--- a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala
+++ b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala
@@ -2,22 +2,22 @@ package dotty.tools
package dotc
package transform
-import core._
-import MegaPhase._
-import Symbols._, Contexts._, Types._, StdNames._, NameOps._
+import core.*
+import MegaPhase.*
+import Symbols.*, Contexts.*, Types.*, StdNames.*, NameOps.*
import patmat.SpaceEngine
-import util.Spans._
+import util.Spans.*
import typer.Applications.*
-import SymUtils._
+import SymUtils.*
import TypeUtils.*
import Annotations.*
-import Flags._, Constants._
-import Decorators._
+import Flags.*, Constants.*
+import Decorators.*
import NameKinds.{PatMatStdBinderName, PatMatAltsName, PatMatResultName}
import config.Printers.patmatch
-import reporting._
-import ast._
-import util.Property._
+import reporting.*
+import ast.*
+import util.Property.*
import scala.annotation.tailrec
import scala.collection.mutable
@@ -27,8 +27,8 @@ import scala.collection.mutable
* where every pattern is an integer or string constant
*/
class PatternMatcher extends MiniPhase {
- import ast.tpd._
- import PatternMatcher._
+ import ast.tpd.*
+ import PatternMatcher.*
override def phaseName: String = PatternMatcher.name
@@ -56,7 +56,7 @@ class PatternMatcher extends MiniPhase {
}
object PatternMatcher {
- import ast.tpd._
+ import ast.tpd.*
val name: String = "patternMatcher"
val description: String = "compile pattern matches"
diff --git a/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala b/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala
index b368e47bf0b3..3e3b14b6d0f3 100644
--- a/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala
+++ b/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala
@@ -1,25 +1,25 @@
package dotty.tools.dotc
package transform
-import core._
-import Decorators._
-import Flags._
-import Types._
-import Contexts._
-import Symbols._
-import Constants._
-import ast.Trees._
+import core.*
+import Decorators.*
+import Flags.*
+import Types.*
+import Contexts.*
+import Symbols.*
+import Constants.*
+import ast.Trees.*
import ast.untpd
import ast.TreeTypeMap
-import SymUtils._
-import NameKinds._
+import SymUtils.*
+import NameKinds.*
import dotty.tools.dotc.ast.tpd
import dotty.tools.dotc.ast.untpd
import dotty.tools.dotc.config.ScalaRelease.*
-import dotty.tools.dotc.core.Annotations._
-import dotty.tools.dotc.core.StdNames._
-import dotty.tools.dotc.quoted._
+import dotty.tools.dotc.core.Annotations.*
+import dotty.tools.dotc.core.StdNames.*
+import dotty.tools.dotc.quoted.*
import dotty.tools.dotc.inlines.Inlines
import scala.annotation.constructorOnly
@@ -69,8 +69,8 @@ import scala.collection.mutable
*
*/
class PickleQuotes extends MacroTransform {
- import PickleQuotes._
- import tpd._
+ import PickleQuotes.*
+ import tpd.*
override def phaseName: String = PickleQuotes.name
@@ -207,7 +207,7 @@ class PickleQuotes extends MacroTransform {
}
object PickleQuotes {
- import tpd._
+ import tpd.*
val name: String = "pickleQuotes"
val description: String = "turn quoted trees into explicit run-time data structures"
diff --git a/compiler/src/dotty/tools/dotc/transform/Pickler.scala b/compiler/src/dotty/tools/dotc/transform/Pickler.scala
index 252bb6daeae5..4aea14fed2fc 100644
--- a/compiler/src/dotty/tools/dotc/transform/Pickler.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Pickler.scala
@@ -2,15 +2,15 @@ package dotty.tools
package dotc
package transform
-import core._
-import Contexts._
-import Decorators._
-import tasty._
+import core.*
+import Contexts.*
+import Decorators.*
+import tasty.*
import config.Printers.{noPrinter, pickling}
import java.io.PrintStream
-import Periods._
-import Phases._
-import Symbols._
+import Periods.*
+import Phases.*
+import Symbols.*
import Flags.Module
import reporting.{ThrowingReporter, Profile, Message}
import collection.mutable
@@ -30,7 +30,7 @@ object Pickler {
/** This phase pickles trees */
class Pickler extends Phase {
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = Pickler.name
diff --git a/compiler/src/dotty/tools/dotc/transform/PostInlining.scala b/compiler/src/dotty/tools/dotc/transform/PostInlining.scala
index 0cfd3650ad0b..26c956d85d18 100644
--- a/compiler/src/dotty/tools/dotc/transform/PostInlining.scala
+++ b/compiler/src/dotty/tools/dotc/transform/PostInlining.scala
@@ -1,7 +1,7 @@
package dotty.tools.dotc
package transform
-import core._
+import core.*
import Contexts.*
import DenotTransformers.IdentityDenotTransformer
import SyntheticMembers.*
diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala
index ff322dad9ab6..90a3523561a9 100644
--- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala
+++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala
@@ -4,20 +4,20 @@ package transform
import dotty.tools.dotc.ast.{Trees, tpd, untpd, desugar}
import scala.collection.mutable
-import core._
+import core.*
import dotty.tools.dotc.typer.Checking
import dotty.tools.dotc.inlines.Inlines
import dotty.tools.dotc.typer.VarianceChecker
import typer.ErrorReporting.errorTree
-import Types._, Contexts._, Names._, Flags._, DenotTransformers._, Phases._
-import SymDenotations._, StdNames._, Annotations._, Trees._, Scopes._
-import Decorators._
-import Symbols._, SymUtils._, NameOps._
+import Types.*, Contexts.*, Names.*, Flags.*, DenotTransformers.*, Phases.*
+import SymDenotations.*, StdNames.*, Annotations.*, Trees.*, Scopes.*
+import Decorators.*
+import Symbols.*, SymUtils.*, NameOps.*
import ContextFunctionResults.annotateContextResults
import config.Printers.typr
import config.Feature
import util.SrcPos
-import reporting._
+import reporting.*
import NameKinds.WildcardParamName
object PostTyper {
@@ -61,7 +61,7 @@ object PostTyper {
* they do not warrant their own group of miniphases before pickling.
*/
class PostTyper extends MacroTransform with InfoTransformer { thisPhase =>
- import tpd._
+ import tpd.*
override def phaseName: String = PostTyper.name
diff --git a/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala b/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala
index 6d8f7bdb32cb..b6df581beee2 100644
--- a/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala
@@ -1,11 +1,11 @@
package dotty.tools.dotc
package transform
-import core.Contexts._
-import core.NameKinds._
-import core.Symbols._
-import core.Flags._
-import core.Decorators._
+import core.Contexts.*
+import core.NameKinds.*
+import core.Symbols.*
+import core.Flags.*
+import core.Decorators.*
import core.Names.TermName
import MegaPhase.MiniPhase
import config.Printers.transforms
@@ -48,7 +48,7 @@ object ProtectedAccessors {
}
class ProtectedAccessors extends MiniPhase {
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = ProtectedAccessors.name
diff --git a/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala b/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala
index 17f2d11ccfec..f0de71dfc239 100644
--- a/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala
+++ b/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala
@@ -1,16 +1,16 @@
package dotty.tools.dotc
package transform
-import core._
-import Contexts._
+import core.*
+import Contexts.*
import DenotTransformers.SymTransformer
-import Flags._
-import SymDenotations._
-import Symbols._
+import Flags.*
+import SymDenotations.*
+import Symbols.*
import typer.RefChecks
import MegaPhase.MiniPhase
import ast.tpd
-import SymUtils._
+import SymUtils.*
import config.Feature
import Decorators.*
import dotty.tools.dotc.core.Types.MethodType
@@ -23,8 +23,8 @@ import dotty.tools.dotc.core.Types.MethodType
* as IsInstanceOfChecker don't give false negatives.
*/
class PruneErasedDefs extends MiniPhase with SymTransformer { thisTransform =>
- import tpd._
- import PruneErasedDefs._
+ import tpd.*
+ import PruneErasedDefs.*
override def phaseName: String = PruneErasedDefs.name
@@ -66,7 +66,7 @@ class PruneErasedDefs extends MiniPhase with SymTransformer { thisTransform =>
}
object PruneErasedDefs {
- import tpd._
+ import tpd.*
val name: String = "pruneErasedDefs"
val description: String = "drop erased definitions and simplify erased expressions"
diff --git a/compiler/src/dotty/tools/dotc/transform/PureStats.scala b/compiler/src/dotty/tools/dotc/transform/PureStats.scala
index b747d7d6b9e4..22fdef3aaab6 100644
--- a/compiler/src/dotty/tools/dotc/transform/PureStats.scala
+++ b/compiler/src/dotty/tools/dotc/transform/PureStats.scala
@@ -2,10 +2,10 @@ package dotty.tools.dotc
package transform
import ast.{Trees, tpd}
-import core._, core.Decorators._
-import MegaPhase._
-import Types._, Contexts._, Flags._, DenotTransformers._
-import Symbols._, StdNames._, Trees._
+import core.*, core.Decorators.*
+import MegaPhase.*
+import Types.*, Contexts.*, Flags.*, DenotTransformers.*
+import Symbols.*, StdNames.*, Trees.*
object PureStats {
val name: String = "pureStats"
@@ -15,7 +15,7 @@ object PureStats {
/** Remove pure statements in blocks */
class PureStats extends MiniPhase {
- import tpd._
+ import tpd.*
override def phaseName: String = PureStats.name
diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala
index 9833b3cf177f..b15a58b98b6f 100644
--- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala
@@ -596,9 +596,9 @@ abstract class Recheck extends Phase, SymTransformer:
/** Show tree with rechecked types instead of the types stored in the `.tpe` field */
override def show(tree: untpd.Tree)(using Context): String =
- atPhase(thisPhase) {
- super.show(addRecheckedTypes.transform(tree.asInstanceOf[tpd.Tree]))
- }
+ atPhase(thisPhase):
+ withMode(Mode.Printing):
+ super.show(addRecheckedTypes.transform(tree.asInstanceOf[tpd.Tree]))
end Recheck
/** A class that can be used to test basic rechecking without any customaization */
diff --git a/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala b/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala
index 6e73d683fa2c..2fd9f923d98e 100644
--- a/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ReifiedReflect.scala
@@ -1,22 +1,22 @@
package dotty.tools.dotc
package transform
-import core._
-import Decorators._
-import Flags._
-import Types._
-import Contexts._
-import Symbols._
-import SymUtils._
-import NameKinds._
+import core.*
+import Decorators.*
+import Flags.*
+import Types.*
+import Contexts.*
+import Symbols.*
+import SymUtils.*
+import NameKinds.*
import dotty.tools.dotc.ast.tpd
-import tpd._
+import tpd.*
import scala.collection.mutable
-import dotty.tools.dotc.core.Annotations._
-import dotty.tools.dotc.core.Names._
-import dotty.tools.dotc.core.StdNames._
-import dotty.tools.dotc.quoted._
+import dotty.tools.dotc.core.Annotations.*
+import dotty.tools.dotc.core.Names.*
+import dotty.tools.dotc.core.StdNames.*
+import dotty.tools.dotc.quoted.*
import scala.annotation.constructorOnly
diff --git a/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala b/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala
index d6c11fe36748..8ed1edcd0308 100644
--- a/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala
+++ b/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala
@@ -1,16 +1,16 @@
package dotty.tools.dotc
package transform
-import core._
-import ast.tpd._
-import Contexts._
-import MegaPhase._
-import Annotations._
+import core.*
+import ast.tpd.*
+import Contexts.*
+import MegaPhase.*
+import Annotations.*
import Symbols.defn
-import Constants._
-import Types._
-import Decorators._
-import Flags._
+import Constants.*
+import Types.*
+import Decorators.*
+import Flags.*
import scala.collection.mutable
diff --git a/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala b/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala
index 99b6be1eea8a..e864178af658 100644
--- a/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala
@@ -1,19 +1,19 @@
package dotty.tools.dotc
package transform
-import core._
-import MegaPhase._
-import Contexts._
-import Flags._
-import SymUtils._
-import Symbols._
-import Decorators._
-import DenotTransformers._
-import Names._
-import NameOps._
-import NameKinds._
-import NullOpsDecorator._
-import ResolveSuper._
+import core.*
+import MegaPhase.*
+import Contexts.*
+import Flags.*
+import SymUtils.*
+import Symbols.*
+import Decorators.*
+import DenotTransformers.*
+import Names.*
+import NameOps.*
+import NameKinds.*
+import NullOpsDecorator.*
+import ResolveSuper.*
import reporting.IllegalSuperAccessor
/** This phase implements super accessors in classes that need them.
@@ -31,7 +31,7 @@ import reporting.IllegalSuperAccessor
* Mixin, which runs after erasure.
*/
class ResolveSuper extends MiniPhase with IdentityDenotTransformer { thisPhase =>
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = ResolveSuper.name
@@ -45,7 +45,7 @@ class ResolveSuper extends MiniPhase with IdentityDenotTransformer { thisPhase =
override def transformTemplate(impl: Template)(using Context): Template = {
val cls = impl.symbol.owner.asClass
val ops = new MixinOps(cls, thisPhase)
- import ops._
+ import ops.*
def superAccessors(mixin: ClassSymbol): List[Tree] =
for superAcc <- mixin.info.decls.filter(_.isSuperAccessor)
@@ -66,7 +66,7 @@ class ResolveSuper extends MiniPhase with IdentityDenotTransformer { thisPhase =
assert(ddef.rhs.isEmpty, ddef.symbol)
val cls = meth.owner.asClass
val ops = new MixinOps(cls, thisPhase)
- import ops._
+ import ops.*
DefDef(meth, forwarderRhsFn(rebindSuper(cls, meth)))
}
else ddef
diff --git a/compiler/src/dotty/tools/dotc/transform/RestoreScopes.scala b/compiler/src/dotty/tools/dotc/transform/RestoreScopes.scala
index d01be0419a4d..e66f5e4b37b8 100644
--- a/compiler/src/dotty/tools/dotc/transform/RestoreScopes.scala
+++ b/compiler/src/dotty/tools/dotc/transform/RestoreScopes.scala
@@ -1,11 +1,11 @@
package dotty.tools.dotc
package transform
-import core._
+import core.*
import DenotTransformers.IdentityDenotTransformer
-import Contexts._
-import Symbols._
-import Scopes._
+import Contexts.*
+import Symbols.*
+import Scopes.*
import MegaPhase.MiniPhase
/** The preceding lambda lift and flatten phases move symbols to different scopes
@@ -13,7 +13,7 @@ import MegaPhase.MiniPhase
* class scopes contain the symbols defined in them.
*/
class RestoreScopes extends MiniPhase with IdentityDenotTransformer { thisPhase =>
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = RestoreScopes.name
diff --git a/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala b/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala
index 1df9809c2f62..6177e5d0839d 100644
--- a/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala
+++ b/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala
@@ -2,13 +2,13 @@ package dotty.tools.dotc
package transform
import dotty.tools.dotc.ast.tpd
-import dotty.tools.dotc.core.Contexts._
+import dotty.tools.dotc.core.Contexts.*
import dotty.tools.dotc.core.DenotTransformers.IdentityDenotTransformer
-import dotty.tools.dotc.core.Flags._
-import dotty.tools.dotc.core.Symbols._
-import dotty.tools.dotc.core._
-import dotty.tools.dotc.transform.MegaPhase._
-import dotty.tools.dotc.transform.SymUtils._
+import dotty.tools.dotc.core.Flags.*
+import dotty.tools.dotc.core.Symbols.*
+import dotty.tools.dotc.core.*
+import dotty.tools.dotc.transform.MegaPhase.*
+import dotty.tools.dotc.transform.SymUtils.*
/** Removes `Select`s that would be compiled into `GetStatic`.
*
@@ -43,7 +43,7 @@ import dotty.tools.dotc.transform.SymUtils._
* @author Dmytro Petrashko
*/
class SelectStatic extends MiniPhase with IdentityDenotTransformer {
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = SelectStatic.name
diff --git a/compiler/src/dotty/tools/dotc/transform/SeqLiterals.scala b/compiler/src/dotty/tools/dotc/transform/SeqLiterals.scala
index 2f586104c4e3..20f4e6d85daa 100644
--- a/compiler/src/dotty/tools/dotc/transform/SeqLiterals.scala
+++ b/compiler/src/dotty/tools/dotc/transform/SeqLiterals.scala
@@ -1,9 +1,9 @@
package dotty.tools.dotc
package transform
-import core._
-import dotty.tools.dotc.transform.MegaPhase._
-import Contexts._
+import core.*
+import dotty.tools.dotc.transform.MegaPhase.*
+import Contexts.*
/** A transformer that eliminates SeqLiteral's, transforming `SeqLiteral(elems)` to an operation
* equivalent to
@@ -15,7 +15,7 @@ import Contexts._
* keep a precise type after erasure, whereas SeqLiterals only get the erased type `Seq`,
*/
class SeqLiterals extends MiniPhase {
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = SeqLiterals.name
diff --git a/compiler/src/dotty/tools/dotc/transform/SetRootTree.scala b/compiler/src/dotty/tools/dotc/transform/SetRootTree.scala
index d17dbbecc555..f62b1f5f01f2 100644
--- a/compiler/src/dotty/tools/dotc/transform/SetRootTree.scala
+++ b/compiler/src/dotty/tools/dotc/transform/SetRootTree.scala
@@ -1,7 +1,7 @@
package dotty.tools.dotc.transform
import dotty.tools.dotc.ast.tpd
-import dotty.tools.dotc.core.Contexts._
+import dotty.tools.dotc.core.Contexts.*
import dotty.tools.dotc.core.Phases.Phase
/** Set the `rootTreeOrProvider` property of class symbols. */
diff --git a/compiler/src/dotty/tools/dotc/transform/SpecializeApplyMethods.scala b/compiler/src/dotty/tools/dotc/transform/SpecializeApplyMethods.scala
index 5c5c02c1bc75..fd314b94e50c 100644
--- a/compiler/src/dotty/tools/dotc/transform/SpecializeApplyMethods.scala
+++ b/compiler/src/dotty/tools/dotc/transform/SpecializeApplyMethods.scala
@@ -1,9 +1,9 @@
package dotty.tools.dotc
package transform
-import ast.Trees._, ast.tpd, core._
-import Contexts._, Types._, Decorators._, Symbols._, DenotTransformers._
-import SymDenotations._, Scopes._, StdNames._, NameOps._, Names._
+import ast.Trees.*, ast.tpd, core.*
+import Contexts.*, Types.*, Decorators.*, Symbols.*, DenotTransformers.*
+import SymDenotations.*, Scopes.*, StdNames.*, NameOps.*, Names.*
import MegaPhase.MiniPhase
import scala.collection.mutable
@@ -18,7 +18,7 @@ import scala.collection.mutable
* different standard library.
*/
class SpecializeApplyMethods extends MiniPhase with InfoTransformer {
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = SpecializeApplyMethods.name
diff --git a/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala b/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala
index c50eaddd3213..f41900d31b66 100644
--- a/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala
+++ b/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala
@@ -1,9 +1,9 @@
package dotty.tools.dotc
package transform
-import ast.Trees._, ast.tpd, core._
-import Contexts._, Types._, Decorators._, Symbols._, DenotTransformers._
-import SymDenotations._, Scopes._, StdNames._, NameOps._, Names._
+import ast.Trees.*, ast.tpd, core.*
+import Contexts.*, Types.*, Decorators.*, Symbols.*, DenotTransformers.*
+import SymDenotations.*, Scopes.*, StdNames.*, NameOps.*, Names.*
import MegaPhase.MiniPhase
@@ -11,7 +11,7 @@ import MegaPhase.MiniPhase
* specialized form.
*/
class SpecializeFunctions extends MiniPhase {
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = SpecializeFunctions.name
diff --git a/compiler/src/dotty/tools/dotc/transform/Splicer.scala b/compiler/src/dotty/tools/dotc/transform/Splicer.scala
index 46a8571b4f30..1f7b65ba66a6 100644
--- a/compiler/src/dotty/tools/dotc/transform/Splicer.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Splicer.scala
@@ -7,14 +7,14 @@ import java.io.{PrintWriter, StringWriter}
import java.lang.reflect.{InvocationTargetException, Method => JLRMethod}
import dotty.tools.dotc.ast.tpd
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Decorators._
-import dotty.tools.dotc.core.Flags._
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Decorators.*
+import dotty.tools.dotc.core.Flags.*
import dotty.tools.dotc.core.NameKinds.FlatName
import dotty.tools.dotc.core.Names.Name
-import dotty.tools.dotc.core.StdNames._
-import dotty.tools.dotc.core.Types._
-import dotty.tools.dotc.core.Symbols._
+import dotty.tools.dotc.core.StdNames.*
+import dotty.tools.dotc.core.Types.*
+import dotty.tools.dotc.core.Symbols.*
import dotty.tools.dotc.core.Denotations.staticRef
import dotty.tools.dotc.core.TypeErasure
import dotty.tools.dotc.core.Constants.Constant
@@ -30,7 +30,7 @@ import scala.reflect.ClassTag
import dotty.tools.dotc.quoted.{PickledQuotes, QuoteUtils}
import scala.quoted.Quotes
-import scala.quoted.runtime.impl._
+import scala.quoted.runtime.impl.*
import dotty.tools.dotc.core.NameKinds
/** Utility class to splice quoted expressions */
diff --git a/compiler/src/dotty/tools/dotc/transform/Splicing.scala b/compiler/src/dotty/tools/dotc/transform/Splicing.scala
index dd95d5a9ca1e..0c64a366686d 100644
--- a/compiler/src/dotty/tools/dotc/transform/Splicing.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Splicing.scala
@@ -1,25 +1,25 @@
package dotty.tools.dotc
package transform
-import core._
-import Decorators._
-import Flags._
-import Types._
-import Contexts._
-import Symbols._
-import Constants._
-import ast.Trees._
+import core.*
+import Decorators.*
+import Flags.*
+import Types.*
+import Contexts.*
+import Symbols.*
+import Constants.*
+import ast.Trees.*
import ast.{TreeTypeMap, untpd}
-import util.Spans._
-import SymUtils._
-import NameKinds._
+import util.Spans.*
+import SymUtils.*
+import NameKinds.*
import dotty.tools.dotc.ast.tpd
import scala.collection.mutable
-import dotty.tools.dotc.core.Annotations._
-import dotty.tools.dotc.core.Names._
-import dotty.tools.dotc.core.StdNames._
-import dotty.tools.dotc.quoted._
+import dotty.tools.dotc.core.Annotations.*
+import dotty.tools.dotc.core.Names.*
+import dotty.tools.dotc.core.StdNames.*
+import dotty.tools.dotc.quoted.*
import dotty.tools.dotc.config.ScalaRelease.*
import dotty.tools.dotc.staging.StagingLevel.*
import dotty.tools.dotc.staging.QuoteTypeTags
@@ -72,7 +72,7 @@ object Splicing:
*
*/
class Splicing extends MacroTransform:
- import tpd._
+ import tpd.*
override def phaseName: String = Splicing.name
diff --git a/compiler/src/dotty/tools/dotc/transform/Staging.scala b/compiler/src/dotty/tools/dotc/transform/Staging.scala
index 43cbe80ce8c4..f7fac1981fb2 100644
--- a/compiler/src/dotty/tools/dotc/transform/Staging.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Staging.scala
@@ -2,15 +2,15 @@ package dotty.tools.dotc
package transform
import dotty.tools.dotc.ast.tpd
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Phases._
-import dotty.tools.dotc.core.Decorators._
-import dotty.tools.dotc.core.Flags._
-import dotty.tools.dotc.core.Symbols._
-import dotty.tools.dotc.core.Types._
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Phases.*
+import dotty.tools.dotc.core.Decorators.*
+import dotty.tools.dotc.core.Flags.*
+import dotty.tools.dotc.core.Symbols.*
+import dotty.tools.dotc.core.Types.*
import dotty.tools.dotc.inlines.Inlines
import dotty.tools.dotc.util.SrcPos
-import dotty.tools.dotc.transform.SymUtils._
+import dotty.tools.dotc.transform.SymUtils.*
import dotty.tools.dotc.staging.StagingLevel.*
import dotty.tools.dotc.staging.CrossStageSafety
import dotty.tools.dotc.staging.HealType
@@ -20,7 +20,7 @@ import dotty.tools.dotc.staging.HealType
* See `CrossStageSafety`
*/
class Staging extends MacroTransform {
- import tpd._
+ import tpd.*
override def phaseName: String = Staging.name
diff --git a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala
index b15c50158b75..2d8d51b4059f 100644
--- a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala
+++ b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala
@@ -5,13 +5,13 @@ package transform
import dotty.tools.dotc.ast.{Trees, tpd}
import scala.collection.mutable
import ValueClasses.isMethodWithExtension
-import core._
-import Contexts._, Flags._, Symbols._, Names._, StdNames._, NameOps._, Trees._
-import TypeUtils._, SymUtils._
+import core.*
+import Contexts.*, Flags.*, Symbols.*, Names.*, StdNames.*, NameOps.*, Trees.*
+import TypeUtils.*, SymUtils.*
import DenotTransformers.DenotTransformer
-import Symbols._
-import util.Spans._
-import Decorators._
+import Symbols.*
+import util.Spans.*
+import Decorators.*
import NameKinds.{ SuperAccessorName, ExpandPrefixName }
/** This class adds super accessors for all super calls that either
@@ -32,7 +32,7 @@ import NameKinds.{ SuperAccessorName, ExpandPrefixName }
*/
class SuperAccessors(thisPhase: DenotTransformer) {
- import tpd._
+ import tpd.*
/** Some parts of trees will get a new owner in subsequent phases.
* These are value class methods, which will become extension methods.
diff --git a/compiler/src/dotty/tools/dotc/transform/SymUtils.scala b/compiler/src/dotty/tools/dotc/transform/SymUtils.scala
index 79b5ecbf30c7..ddee2588b152 100644
--- a/compiler/src/dotty/tools/dotc/transform/SymUtils.scala
+++ b/compiler/src/dotty/tools/dotc/transform/SymUtils.scala
@@ -1,21 +1,21 @@
package dotty.tools.dotc
package transform
-import core._
-import Types._
-import Contexts._
-import Symbols._
-import SymDenotations._
-import Names._
-import NameOps._
-import StdNames._
-import NameKinds._
-import Flags._
+import core.*
+import Types.*
+import Contexts.*
+import Symbols.*
+import SymDenotations.*
+import Names.*
+import NameOps.*
+import StdNames.*
+import NameKinds.*
+import Flags.*
import ValueClasses.isDerivedValueClass
-import Decorators._
+import Decorators.*
import Constants.Constant
import Annotations.Annotation
-import Phases._
+import Phases.*
import ast.tpd.Literal
import dotty.tools.dotc.transform.sjs.JSSymUtils.sjsNeedsField
diff --git a/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala b/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala
index 200e7eb2c215..9d19251638db 100644
--- a/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala
+++ b/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala
@@ -1,21 +1,21 @@
package dotty.tools.dotc
package transform
-import core._
-import Symbols._, Types._, Contexts._, Names._, StdNames._, Constants._, SymUtils._
-import Flags._
-import DenotTransformers._
-import Decorators._
-import NameOps._
+import core.*
+import Symbols.*, Types.*, Contexts.*, Names.*, StdNames.*, Constants.*, SymUtils.*
+import Flags.*
+import DenotTransformers.*
+import Decorators.*
+import NameOps.*
import Annotations.Annotation
import typer.ProtoTypes.constrained
import ast.untpd
import ValueClasses.isDerivedValueClass
-import SymUtils._
+import SymUtils.*
import util.Property
import util.Spans.Span
import config.Printers.derive
-import NullOpsDecorator._
+import NullOpsDecorator.*
object SyntheticMembers {
@@ -53,8 +53,8 @@ object SyntheticMembers {
* def hashCode(): Int
*/
class SyntheticMembers(thisPhase: DenotTransformer) {
- import SyntheticMembers._
- import ast.tpd._
+ import SyntheticMembers.*
+ import ast.tpd.*
private var myValueSymbols: List[Symbol] = Nil
private var myCaseSymbols: List[Symbol] = Nil
diff --git a/compiler/src/dotty/tools/dotc/transform/TailRec.scala b/compiler/src/dotty/tools/dotc/transform/TailRec.scala
index 741b9d1627fe..43c740ce7d38 100644
--- a/compiler/src/dotty/tools/dotc/transform/TailRec.scala
+++ b/compiler/src/dotty/tools/dotc/transform/TailRec.scala
@@ -13,6 +13,8 @@ import transform.MegaPhase.MiniPhase
import util.LinearSet
import dotty.tools.uncheckedNN
+import scala.compiletime.uninitialized
+
/** A Tail Rec Transformer.
*
* What it does:
@@ -105,7 +107,7 @@ import dotty.tools.uncheckedNN
* moved after erasure and adapted to emit `Labeled` blocks by Sébastien Doeraene
*/
class TailRec extends MiniPhase {
- import tpd._
+ import tpd.*
override def phaseName: String = TailRec.name
@@ -232,7 +234,7 @@ class TailRec extends MiniPhase {
var failureReported: Boolean = false
/** The `tailLabelN` label symbol, used to encode a `continue` from the infinite `while` loop. */
- private var myContinueLabel: Symbol | Null = _
+ private var myContinueLabel: Symbol | Null = uninitialized
def continueLabel(using Context): Symbol = {
if (myContinueLabel == null)
myContinueLabel = newSymbol(method, TailLabelName.fresh(), Label, defn.UnitType)
diff --git a/compiler/src/dotty/tools/dotc/transform/TransformWildcards.scala b/compiler/src/dotty/tools/dotc/transform/TransformWildcards.scala
index ffed65f7676e..2be41ba208f1 100644
--- a/compiler/src/dotty/tools/dotc/transform/TransformWildcards.scala
+++ b/compiler/src/dotty/tools/dotc/transform/TransformWildcards.scala
@@ -1,9 +1,9 @@
package dotty.tools.dotc
package transform
-import MegaPhase._
-import core.DenotTransformers._
-import core.Contexts._
+import MegaPhase.*
+import core.DenotTransformers.*
+import core.Contexts.*
import ast.tpd
/** This phase transforms wildcards in valdefs with their default value.
@@ -12,7 +12,7 @@ import ast.tpd
*
*/
class TransformWildcards extends MiniPhase with IdentityDenotTransformer {
- import tpd._
+ import tpd.*
override def phaseName: String = TransformWildcards.name
diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala
index 2badb4cfc1c3..8b7fdaf301d0 100644
--- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala
+++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala
@@ -3,24 +3,24 @@ package dotc
package transform
import core.Names.Name
-import core.DenotTransformers._
-import core.SymDenotations._
-import core.Contexts._
-import core.Symbols._
-import core.Types._
-import core.Flags._
-import core.StdNames._
+import core.DenotTransformers.*
+import core.SymDenotations.*
+import core.Contexts.*
+import core.Symbols.*
+import core.Types.*
+import core.Flags.*
+import core.StdNames.*
import core.NameKinds.{DocArtifactName, OuterSelectName}
-import core.Decorators._
-import core.Phases._
+import core.Decorators.*
+import core.Phases.*
import core.Mode
-import typer._
-import reporting._
-import ast.Trees._
+import typer.*
+import reporting.*
+import ast.Trees.*
import ast.{tpd, untpd}
-import util.Chars._
+import util.Chars.*
import collection.mutable
-import ProtoTypes._
+import ProtoTypes.*
import staging.StagingLevel
import inlines.Inlines.inInlineMethod
@@ -39,8 +39,8 @@ import scala.util.control.NonFatal
* represented as TypeTrees then).
*/
class TreeChecker extends Phase with SymTransformer {
- import ast.tpd._
- import TreeChecker._
+ import ast.tpd.*
+ import TreeChecker.*
private val seenClasses = collection.mutable.HashMap[String, Symbol]()
private val seenModuleVals = collection.mutable.HashMap[String, Symbol]()
@@ -186,7 +186,7 @@ object TreeChecker {
* tpt, SeqLiteral elemtpt, ValDef tpt, DefDef tpt, and TypeDef rhs.
*/
object TreeNodeChecker extends untpd.TreeTraverser:
- import untpd._
+ import untpd.*
def traverse(tree: Tree)(using Context) = tree match
case t: TypeTree => assert(assertion = false, i"TypeTree not expected: $t")
case t @ TypeApply(fun, _targs) => traverse(fun)
@@ -207,7 +207,7 @@ object TreeChecker {
class Checker(phasesToCheck: Seq[Phase]) extends ReTyper with Checking {
- import ast.tpd._
+ import ast.tpd.*
protected val nowDefinedSyms = util.HashSet[Symbol]()
private val patBoundSyms = util.HashSet[Symbol]()
diff --git a/compiler/src/dotty/tools/dotc/transform/TreeExtractors.scala b/compiler/src/dotty/tools/dotc/transform/TreeExtractors.scala
index aec44d5987bf..8d5b7c28bbbc 100644
--- a/compiler/src/dotty/tools/dotc/transform/TreeExtractors.scala
+++ b/compiler/src/dotty/tools/dotc/transform/TreeExtractors.scala
@@ -2,12 +2,12 @@ package dotty.tools.dotc
package transform
import ast.{Trees, tpd}
-import core._
-import Contexts._, Trees._, Types._, StdNames._, Symbols._
-import ValueClasses._
+import core.*
+import Contexts.*, Trees.*, Types.*, StdNames.*, Symbols.*
+import ValueClasses.*
object TreeExtractors {
- import tpd._
+ import tpd.*
/** Match arg1.op(arg2) and extract (arg1, op.symbol, arg2) */
object BinaryOp {
diff --git a/compiler/src/dotty/tools/dotc/transform/TryCatchPatterns.scala b/compiler/src/dotty/tools/dotc/transform/TryCatchPatterns.scala
index 92d22b1cc57e..095c6af60766 100644
--- a/compiler/src/dotty/tools/dotc/transform/TryCatchPatterns.scala
+++ b/compiler/src/dotty/tools/dotc/transform/TryCatchPatterns.scala
@@ -1,12 +1,12 @@
package dotty.tools.dotc
package transform
-import core.Symbols._
-import core.StdNames._
-import core.Types._
+import core.Symbols.*
+import core.StdNames.*
+import core.Types.*
import core.NameKinds.ExceptionBinderName
import dotty.tools.dotc.core.Flags
-import dotty.tools.dotc.core.Contexts._
+import dotty.tools.dotc.core.Contexts.*
import dotty.tools.dotc.transform.MegaPhase.MiniPhase
import dotty.tools.dotc.util.Spans.Span
@@ -39,7 +39,7 @@ import dotty.tools.dotc.util.Spans.Span
*
*/
class TryCatchPatterns extends MiniPhase {
- import dotty.tools.dotc.ast.tpd._
+ import dotty.tools.dotc.ast.tpd.*
override def phaseName: String = TryCatchPatterns.name
diff --git a/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala b/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala
index 6fba0bca4ce3..fee7bb19e0be 100644
--- a/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala
+++ b/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala
@@ -1,21 +1,21 @@
package dotty.tools.dotc
package transform
-import core._
-import Contexts._
-import Decorators._
-import Definitions._
-import DenotTransformers._
-import StdNames._
-import Symbols._
-import MegaPhase._
-import Types._
+import core.*
+import Contexts.*
+import Decorators.*
+import Definitions.*
+import DenotTransformers.*
+import StdNames.*
+import Symbols.*
+import MegaPhase.*
+import Types.*
import dotty.tools.dotc.ast.tpd
/** Optimize generic operations on tuples */
class TupleOptimizations extends MiniPhase with IdentityDenotTransformer {
- import tpd._
+ import tpd.*
override def phaseName: String = TupleOptimizations.name
diff --git a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala
index 556204ab89ab..f8092ba51c2a 100644
--- a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala
+++ b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala
@@ -4,16 +4,16 @@ package transform
import scala.language.unsafeNulls as _
-import core._
-import Contexts._, Symbols._, Types._, Constants._, StdNames._, Decorators._
+import core.*
+import Contexts.*, Symbols.*, Types.*, Constants.*, StdNames.*, Decorators.*
import ast.untpd
-import Erasure.Boxing._
-import TypeErasure._
-import ValueClasses._
-import SymUtils._
-import core.Flags._
-import util.Spans._
-import reporting._
+import Erasure.Boxing.*
+import TypeErasure.*
+import ValueClasses.*
+import SymUtils.*
+import core.Flags.*
+import util.Spans.*
+import reporting.*
import config.Printers.{ transforms => debug }
import patmat.Typ
@@ -29,7 +29,7 @@ import patmat.Typ
* cannot be rewritten before erasure. That's why TypeTestsCasts is called from Erasure.
*/
object TypeTestsCasts {
- import ast.tpd._
+ import ast.tpd.*
import typer.Inferencing.maximizeType
import typer.ProtoTypes.constrained
diff --git a/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala b/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala
index 779552a3d46f..9528e683cc55 100644
--- a/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala
+++ b/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala
@@ -2,11 +2,11 @@ package dotty.tools
package dotc
package transform
-import core._
+import core.*
import TypeErasure.ErasedValueType
-import Types._
-import Contexts._
-import Symbols._
+import Types.*
+import Contexts.*
+import Symbols.*
import Names.Name
import dotty.tools.dotc.core.Decorators.*
diff --git a/compiler/src/dotty/tools/dotc/transform/UncacheGivenAliases.scala b/compiler/src/dotty/tools/dotc/transform/UncacheGivenAliases.scala
index 95d40102c5a7..6e02ea7c227c 100644
--- a/compiler/src/dotty/tools/dotc/transform/UncacheGivenAliases.scala
+++ b/compiler/src/dotty/tools/dotc/transform/UncacheGivenAliases.scala
@@ -1,12 +1,12 @@
package dotty.tools.dotc
package transform
-import MegaPhase._
+import MegaPhase.*
import core.DenotTransformers.{IdentityDenotTransformer}
-import core.Symbols._
-import core.Contexts._
-import core.Types._
-import core.Flags._
+import core.Symbols.*
+import core.Contexts.*
+import core.Types.*
+import core.Flags.*
import ast.tpd
object UncacheGivenAliases:
@@ -23,7 +23,7 @@ object UncacheGivenAliases:
*/
class UncacheGivenAliases extends MiniPhase with IdentityDenotTransformer:
thisPhase =>
- import tpd._
+ import tpd.*
override def phaseName: String = UncacheGivenAliases.name
diff --git a/compiler/src/dotty/tools/dotc/transform/UninitializedDefs.scala b/compiler/src/dotty/tools/dotc/transform/UninitializedDefs.scala
index a7ccaa19d90a..f22fc53e9b6e 100644
--- a/compiler/src/dotty/tools/dotc/transform/UninitializedDefs.scala
+++ b/compiler/src/dotty/tools/dotc/transform/UninitializedDefs.scala
@@ -1,10 +1,10 @@
package dotty.tools.dotc
package transform
-import core._
-import Contexts._
-import Flags._
-import Symbols._
+import core.*
+import Contexts.*
+import Flags.*
+import Symbols.*
import MegaPhase.MiniPhase
import StdNames.nme
import ast.tpd
@@ -19,7 +19,7 @@ import ast.tpd
* @syntax markdown
*/
class UninitializedDefs extends MiniPhase:
- import tpd._
+ import tpd.*
override def phaseName: String = UninitializedDefs.name
diff --git a/compiler/src/dotty/tools/dotc/transform/VCElideAllocations.scala b/compiler/src/dotty/tools/dotc/transform/VCElideAllocations.scala
index 879a885d626e..6430dd7248b1 100644
--- a/compiler/src/dotty/tools/dotc/transform/VCElideAllocations.scala
+++ b/compiler/src/dotty/tools/dotc/transform/VCElideAllocations.scala
@@ -2,10 +2,10 @@ package dotty.tools.dotc
package transform
import ast.tpd
-import core._
-import Contexts._, Symbols._, Types._, Flags._, Phases._
-import DenotTransformers._, MegaPhase._
-import TreeExtractors._, ValueClasses._
+import core.*
+import Contexts.*, Symbols.*, Types.*, Flags.*, Phases.*
+import DenotTransformers.*, MegaPhase.*
+import TreeExtractors.*, ValueClasses.*
/** This phase elides unnecessary value class allocations
*
@@ -16,7 +16,7 @@ import TreeExtractors._, ValueClasses._
* (new V(u)).underlying() => u
*/
class VCElideAllocations extends MiniPhase with IdentityDenotTransformer {
- import tpd._
+ import tpd.*
override def phaseName: String = VCElideAllocations.name
diff --git a/compiler/src/dotty/tools/dotc/transform/VCInlineMethods.scala b/compiler/src/dotty/tools/dotc/transform/VCInlineMethods.scala
index 219945d4ebb1..fb1dd04bd6ad 100644
--- a/compiler/src/dotty/tools/dotc/transform/VCInlineMethods.scala
+++ b/compiler/src/dotty/tools/dotc/transform/VCInlineMethods.scala
@@ -3,10 +3,10 @@ package dotc
package transform
import ast.{Trees, tpd}
-import core._
-import Contexts._, Trees._, Types._
-import DenotTransformers._, MegaPhase._
-import ExtensionMethods._, ValueClasses._
+import core.*
+import Contexts.*, Trees.*, Types.*
+import DenotTransformers.*, MegaPhase.*
+import ExtensionMethods.*, ValueClasses.*
/** This phase inlines calls to methods of value classes.
@@ -40,7 +40,7 @@ import ExtensionMethods._, ValueClasses._
* need to have any knowledge of the name mangling done by other phases.
*/
class VCInlineMethods extends MiniPhase with IdentityDenotTransformer {
- import tpd._
+ import tpd.*
override def phaseName: String = VCInlineMethods.name
diff --git a/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala b/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala
index 28d1255eaa72..d0c012322fce 100644
--- a/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala
@@ -1,14 +1,14 @@
package dotty.tools.dotc
package transform
-import core._
-import Types._
-import Symbols._
-import Contexts._
-import Phases._
-import Flags._
-import StdNames._
-import SymUtils._
+import core.*
+import Types.*
+import Symbols.*
+import Contexts.*
+import Phases.*
+import Flags.*
+import StdNames.*
+import SymUtils.*
/** Methods that apply to user-defined value classes */
object ValueClasses {
diff --git a/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala b/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala
index 3cf74ee3fdb3..1365bc36e958 100644
--- a/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala
+++ b/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala
@@ -2,16 +2,16 @@ package dotty.tools.dotc
package transform
import dotty.tools.dotc.ast.{tpd, untpd}
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Decorators._
-import dotty.tools.dotc.core.Flags._
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Decorators.*
+import dotty.tools.dotc.core.Flags.*
import dotty.tools.dotc.core.Phases.{Phase, postTyperPhase}
-import dotty.tools.dotc.core.Symbols._
+import dotty.tools.dotc.core.Symbols.*
import dotty.tools.dotc.util.SourceFile
/** Ycheck inlined positions */
class YCheckPositions extends Phase {
- import tpd._
+ import tpd.*
override def phaseName: String = YCheckPositions.name
diff --git a/compiler/src/dotty/tools/dotc/transform/init/Checker.scala b/compiler/src/dotty/tools/dotc/transform/init/Checker.scala
index 8478cefbc764..7cf028c95064 100644
--- a/compiler/src/dotty/tools/dotc/transform/init/Checker.scala
+++ b/compiler/src/dotty/tools/dotc/transform/init/Checker.scala
@@ -2,22 +2,22 @@ package dotty.tools.dotc
package transform
package init
-import dotty.tools.dotc._
+import dotty.tools.dotc.*
import ast.tpd
-import tpd._
+import tpd.*
-import dotty.tools.dotc.core._
-import Contexts._
-import Types._
-import Symbols._
-import StdNames._
+import dotty.tools.dotc.core.*
+import Contexts.*
+import Types.*
+import Symbols.*
+import StdNames.*
-import dotty.tools.dotc.transform._
-import Phases._
+import dotty.tools.dotc.transform.*
+import Phases.*
import scala.collection.mutable
-import Semantic._
+import Semantic.*
import dotty.tools.unsupported
class Checker extends Phase:
diff --git a/compiler/src/dotty/tools/dotc/transform/init/Errors.scala b/compiler/src/dotty/tools/dotc/transform/init/Errors.scala
index 366fd6be96a2..85feb609c90a 100644
--- a/compiler/src/dotty/tools/dotc/transform/init/Errors.scala
+++ b/compiler/src/dotty/tools/dotc/transform/init/Errors.scala
@@ -3,11 +3,11 @@ package dotc
package transform
package init
-import ast.tpd._
-import core._
+import ast.tpd.*
+import core.*
import util.Property
import util.SourcePosition
-import Types._, Symbols._, Contexts._
+import Types.*, Symbols.*, Contexts.*
import Trace.Trace
diff --git a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala
index 8556ced63008..53c210a0f0e4 100644
--- a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala
+++ b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala
@@ -1418,7 +1418,7 @@ object Objects:
val applyDenot = getMemberMethod(scrutineeType, nme.apply, applyType(elemType))
val applyRes = call(scrutinee, applyDenot.symbol, TraceValue(Bottom, summon[Trace]) :: Nil, scrutineeType, superType = NoType, needResolve = true)
- if isWildcardStarArg(pats.last) then
+ if isWildcardStarArgList(pats) then
if pats.size == 1 then
// call .toSeq
val toSeqDenot = scrutineeType.member(nme.toSeq).suchThat(_.info.isParameterless)
@@ -1433,7 +1433,8 @@ object Objects:
end if
else
// no patterns like `xs*`
- for pat <- pats do evalPattern(applyRes, pat)
+ for pat <- pats do evalPattern(applyRes, pat)
+ end if
end evalSeqPatterns
diff --git a/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala b/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala
index b75a688d6e6c..499c2d289783 100644
--- a/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala
+++ b/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala
@@ -1383,11 +1383,11 @@ object Semantic:
case tpl: Template =>
init(tpl, thisV, klass)
- case _: Import | _: Export =>
+ case _: Import | _: Export | _: Quote | _: Splice | _: QuotePattern | _: SplicePattern =>
Hot
case _ =>
- report.warning("[Internal error] unexpected tree" + Trace.show, expr)
+ report.warning("[Internal error] unexpected tree: " + expr.getClass + ", trace:\n" + Trace.show, expr)
Hot
/** Handle semantics of leaf nodes
diff --git a/compiler/src/dotty/tools/dotc/transform/init/Trace.scala b/compiler/src/dotty/tools/dotc/transform/init/Trace.scala
index 98380686c6aa..dc9ab3bfc7a1 100644
--- a/compiler/src/dotty/tools/dotc/transform/init/Trace.scala
+++ b/compiler/src/dotty/tools/dotc/transform/init/Trace.scala
@@ -7,7 +7,7 @@ import Contexts.*
import ast.tpd.*
import util.SourcePosition
-import Decorators._, printing.SyntaxHighlighting
+import Decorators.*, printing.SyntaxHighlighting
import scala.collection.mutable
diff --git a/compiler/src/dotty/tools/dotc/transform/localopt/FormatChecker.scala b/compiler/src/dotty/tools/dotc/transform/localopt/FormatChecker.scala
index ff8d89920791..9e40792895c0 100644
--- a/compiler/src/dotty/tools/dotc/transform/localopt/FormatChecker.scala
+++ b/compiler/src/dotty/tools/dotc/transform/localopt/FormatChecker.scala
@@ -10,9 +10,9 @@ import scala.util.matching.Regex.Match
import PartialFunction.cond
import dotty.tools.dotc.ast.tpd.{Match => _, *}
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Symbols._
-import dotty.tools.dotc.core.Types._
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Symbols.*
+import dotty.tools.dotc.core.Types.*
import dotty.tools.dotc.core.Phases.typerPhase
import dotty.tools.dotc.util.Spans.Span
diff --git a/compiler/src/dotty/tools/dotc/transform/localopt/StringInterpolatorOpt.scala b/compiler/src/dotty/tools/dotc/transform/localopt/StringInterpolatorOpt.scala
index 5cad7ba72831..7743054f5487 100644
--- a/compiler/src/dotty/tools/dotc/transform/localopt/StringInterpolatorOpt.scala
+++ b/compiler/src/dotty/tools/dotc/transform/localopt/StringInterpolatorOpt.scala
@@ -4,12 +4,12 @@ package transform.localopt
import scala.language.unsafeNulls
import dotty.tools.dotc.ast.tpd
-import dotty.tools.dotc.core.Decorators._
+import dotty.tools.dotc.core.Decorators.*
import dotty.tools.dotc.core.Constants.Constant
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.StdNames._
-import dotty.tools.dotc.core.Symbols._
-import dotty.tools.dotc.core.Types._
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.StdNames.*
+import dotty.tools.dotc.core.Symbols.*
+import dotty.tools.dotc.core.Types.*
import dotty.tools.dotc.transform.MegaPhase.MiniPhase
import dotty.tools.dotc.typer.ConstFold
diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala
index 88c09eb4a521..50f6a6becef6 100644
--- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala
+++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala
@@ -3,23 +3,23 @@ package dotc
package transform
package patmat
-import core._
-import Types._
-import TypeUtils._
-import Contexts._
-import Flags._
-import ast._
+import core.*
+import Types.*
+import TypeUtils.*
+import Contexts.*
+import Flags.*
+import ast.*
import Decorators.{ show => _, * }
-import Symbols._
-import StdNames._
-import NameOps._
-import Constants._
-import typer._
-import Applications._
-import Inferencing._
-import ProtoTypes._
-import transform.SymUtils._
-import reporting._
+import Symbols.*
+import StdNames.*
+import NameOps.*
+import Constants.*
+import typer.*
+import Applications.*
+import Inferencing.*
+import ProtoTypes.*
+import transform.SymUtils.*
+import reporting.*
import config.Printers.{exhaustivity => debug}
import util.{SrcPos, NoSourcePosition}
@@ -116,7 +116,7 @@ case class Prod(tp: Type, unappTp: TermRef, params: List[Space]) extends Space
case class Or(spaces: Seq[Space]) extends Space
object SpaceEngine {
- import tpd._
+ import tpd.*
def simplify(space: Space)(using Context): Space = space.simplify
def isSubspace(a: Space, b: Space)(using Context): Boolean = a.isSubspace(b)
@@ -149,7 +149,7 @@ object SpaceEngine {
if (spaces.lengthCompare(1) <= 0 || spaces.lengthCompare(10) >= 0) spaces
else {
val res = spaces.map(sp => (sp, spaces.filter(_ ne sp))).find {
- case (sp, sps) => isSubspace(sp, Or(LazyList(sps: _*)))
+ case (sp, sps) => isSubspace(sp, Or(LazyList(sps*)))
}
if (res.isEmpty) spaces
else res.get._2
@@ -158,7 +158,7 @@ object SpaceEngine {
/** Flatten space to get rid of `Or` for pretty print */
def flatten(space: Space)(using Context): Seq[Space] = space match {
case Prod(tp, fun, spaces) =>
- val ss = LazyList(spaces: _*).map(flatten)
+ val ss = LazyList(spaces*).map(flatten)
ss.foldLeft(LazyList(Nil : List[Space])) { (acc, flat) =>
for { sps <- acc; s <- flat }
@@ -168,7 +168,7 @@ object SpaceEngine {
}
case Or(spaces) =>
- LazyList(spaces: _*).flatMap(flatten)
+ LazyList(spaces*).flatMap(flatten)
case _ =>
List(space)
@@ -272,7 +272,7 @@ object SpaceEngine {
else if cache.forall(sub => isSubspace(sub.nn, Empty)) then Empty
else
// `(_, _, _) - (Some, None, _)` becomes `(None, _, _) | (_, Some, _) | (_, _, Empty)`
- val spaces = LazyList(range: _*).flatMap { i =>
+ val spaces = LazyList(range*).flatMap { i =>
flatten(sub(i)).map(s => Prod(tp1, fun1, ss1.updated(i, s)))
}
Or(spaces)
@@ -483,7 +483,7 @@ object SpaceEngine {
case _ => tp
})
- /** Space of the pattern: unapplySeq(a, b, c: _*)
+ /** Space of the pattern: unapplySeq(a, b, c*)
*/
def projectSeq(pats: List[Tree])(using Context): Space = {
if (pats.isEmpty) return Typ(defn.NilType, false)
diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/AddLocalJSFakeNews.scala b/compiler/src/dotty/tools/dotc/transform/sjs/AddLocalJSFakeNews.scala
index 6471e58d4ddc..951024f3d4db 100644
--- a/compiler/src/dotty/tools/dotc/transform/sjs/AddLocalJSFakeNews.scala
+++ b/compiler/src/dotty/tools/dotc/transform/sjs/AddLocalJSFakeNews.scala
@@ -3,12 +3,12 @@ package dotc
package transform
package sjs
-import MegaPhase._
+import MegaPhase.*
import core.Constants
-import core.Contexts._
-import core.Decorators._
+import core.Contexts.*
+import core.Decorators.*
import core.StdNames.nme
-import core.Symbols._
+import core.Symbols.*
import dotty.tools.backend.sjs.JSDefinitions.jsdefn
@@ -46,7 +46,7 @@ import dotty.tools.backend.sjs.JSDefinitions.jsdefn
*/
class AddLocalJSFakeNews extends MiniPhase { thisPhase =>
import ExplicitOuter.outer
- import ast.tpd._
+ import ast.tpd.*
override def phaseName: String = AddLocalJSFakeNews.name
diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala b/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala
index 705b3cc404a8..4d27ecee12fa 100644
--- a/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala
+++ b/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala
@@ -3,28 +3,29 @@ package dotc
package transform
package sjs
-
-import MegaPhase._
-import core.Annotations._
-import core.Constants._
-import core.Denotations._
-import core.DenotTransformers._
-import core.Symbols._
-import core.Contexts._
-import core.Types._
-import core.Flags._
-import core.Decorators._
+import scala.compiletime.uninitialized
+
+import MegaPhase.*
+import core.Annotations.*
+import core.Constants.*
+import core.Denotations.*
+import core.DenotTransformers.*
+import core.Symbols.*
+import core.Contexts.*
+import core.Types.*
+import core.Flags.*
+import core.Decorators.*
import core.StdNames.nme
import core.SymDenotations.SymDenotation
-import core.Names._
-import core.NameKinds._
-import SymUtils._
+import core.Names.*
+import core.NameKinds.*
+import SymUtils.*
import util.Store
import dotty.tools.backend.sjs.JSDefinitions.jsdefn
-import JSSymUtils._
+import JSSymUtils.*
/** This phase makes all JS classes explicit (their definitions and references to them).
*
@@ -229,14 +230,14 @@ import JSSymUtils._
* created by step (C).
*/
class ExplicitJSClasses extends MiniPhase with InfoTransformer { thisPhase =>
- import ExplicitJSClasses._
- import ast.tpd._
+ import ExplicitJSClasses.*
+ import ast.tpd.*
override def phaseName: String = ExplicitJSClasses.name
override def description: String = ExplicitJSClasses.description
- private var MyState: Store.Location[MyState] = _
+ private var MyState: Store.Location[MyState] = uninitialized
private def myState(using Context) = ctx.store(MyState)
override def initContext(ctx: FreshContext): Unit =
diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/JSExportUtils.scala b/compiler/src/dotty/tools/dotc/transform/sjs/JSExportUtils.scala
index 9abf9a919d6d..2b0ed3c4880e 100644
--- a/compiler/src/dotty/tools/dotc/transform/sjs/JSExportUtils.scala
+++ b/compiler/src/dotty/tools/dotc/transform/sjs/JSExportUtils.scala
@@ -4,9 +4,9 @@ package sjs
import scala.language.unsafeNulls
-import core._
+import core.*
import NameKinds.DefaultGetterName
-import Names._
+import Names.*
/** Utilities for JS exports handling. */
diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala b/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala
index ae6635bce622..fafa1eb3cf79 100644
--- a/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala
+++ b/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala
@@ -2,18 +2,18 @@ package dotty.tools.dotc
package transform
package sjs
-import core._
-import Constants._
-import Contexts._
-import Flags._
-import NameOps._
-import Names._
-import Phases._
-import StdNames._
-import Symbols._
-import SymUtils._
-import ast.Trees._
-import Types._
+import core.*
+import Constants.*
+import Contexts.*
+import Flags.*
+import NameOps.*
+import Names.*
+import Phases.*
+import StdNames.*
+import Symbols.*
+import SymUtils.*
+import ast.Trees.*
+import Types.*
import dotty.tools.backend.sjs.JSDefinitions.jsdefn
diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/JUnitBootstrappers.scala b/compiler/src/dotty/tools/dotc/transform/sjs/JUnitBootstrappers.scala
index b911d7dfab96..7655eb79d6d4 100644
--- a/compiler/src/dotty/tools/dotc/transform/sjs/JUnitBootstrappers.scala
+++ b/compiler/src/dotty/tools/dotc/transform/sjs/JUnitBootstrappers.scala
@@ -4,18 +4,18 @@ package sjs
import scala.annotation.tailrec
-import dotty.tools.dotc.core._
-import Constants._
-import Contexts._
-import Flags._
-import Names._
-import Scopes._
-import Symbols._
-import StdNames._
-import Types._
+import dotty.tools.dotc.core.*
+import Constants.*
+import Contexts.*
+import Flags.*
+import Names.*
+import Scopes.*
+import Symbols.*
+import StdNames.*
+import Types.*
import Decorators.em
-import dotty.tools.dotc.transform.MegaPhase._
+import dotty.tools.dotc.transform.MegaPhase.*
import dotty.tools.backend.sjs.JSDefinitions.jsdefn
@@ -106,8 +106,8 @@ import dotty.tools.backend.sjs.JSDefinitions.jsdefn
* some point in the future.
*/
class JUnitBootstrappers extends MiniPhase {
- import JUnitBootstrappers._
- import ast.tpd._
+ import JUnitBootstrappers.*
+ import ast.tpd.*
override def phaseName: String = JUnitBootstrappers.name
diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala
index 25ab46712e70..d7073ac2e261 100644
--- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala
+++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala
@@ -3,29 +3,29 @@ package transform
package sjs
import dotty.tools.dotc.ast.tpd
-import dotty.tools.dotc.core._
-import Contexts._
-import Decorators._
-import Denotations._
-import Flags._
+import dotty.tools.dotc.core.*
+import Contexts.*
+import Decorators.*
+import Denotations.*
+import Flags.*
import NameKinds.DefaultGetterName
-import StdNames._
-import Symbols._
-import SymUtils._
-import Types._
+import StdNames.*
+import Symbols.*
+import SymUtils.*
+import Types.*
import util.Spans.Span
import util.SrcPos
import dotty.tools.backend.sjs.JSDefinitions.jsdefn
-import JSExportUtils._
-import JSSymUtils._
+import JSExportUtils.*
+import JSSymUtils.*
import org.scalajs.ir.Names.DefaultModuleID
import org.scalajs.ir.Trees.TopLevelExportDef.isValidTopLevelExportName
object PrepJSExports {
- import tpd._
+ import tpd.*
import PrepJSInterop.{checkSetterSignature, isJSAny, isPrivateMaybeWithin}
private sealed abstract class ExportDestination
diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala
index a2f9a0fb45a3..2da2a98837c7 100644
--- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala
+++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala
@@ -5,23 +5,23 @@ package sjs
import scala.collection.mutable
import ast.tpd
-import core._
+import core.*
import typer.Checking
import util.SrcPos
-import Annotations._
-import Constants._
-import Contexts._
-import Decorators._
-import DenotTransformers._
-import Flags._
+import Annotations.*
+import Constants.*
+import Contexts.*
+import Decorators.*
+import DenotTransformers.*
+import Flags.*
import NameKinds.{DefaultGetterName, ModuleClassName}
-import NameOps._
-import StdNames._
-import Symbols._
-import SymUtils._
-import Types._
+import NameOps.*
+import StdNames.*
+import Symbols.*
+import SymUtils.*
+import Types.*
-import JSSymUtils._
+import JSSymUtils.*
import org.scalajs.ir.Trees.JSGlobalRef
@@ -52,8 +52,8 @@ import dotty.tools.backend.sjs.JSDefinitions.jsdefn
* pickling.
*/
class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisPhase =>
- import PrepJSInterop._
- import tpd._
+ import PrepJSInterop.*
+ import tpd.*
override def phaseName: String = PrepJSInterop.name
@@ -68,7 +68,7 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP
new ScalaJSPrepJSInteropTransformer
class ScalaJSPrepJSInteropTransformer extends Transformer with Checking {
- import PrepJSExports._
+ import PrepJSExports.*
/** Kind of the directly enclosing (most nested) owner. */
private var enclosingOwner: OwnerKind = OwnerKind.None
diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala
index fb05af087a19..21f245da9485 100644
--- a/compiler/src/dotty/tools/dotc/typer/Applications.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala
@@ -2,33 +2,33 @@ package dotty.tools
package dotc
package typer
-import core._
+import core.*
import ast.{Trees, tpd, untpd, desugar}
import util.Stats.record
import util.{SrcPos, NoSourcePosition}
-import Contexts._
-import Flags._
-import Symbols._
+import Contexts.*
+import Flags.*
+import Symbols.*
import Denotations.Denotation
-import Types._
-import Decorators._
-import ErrorReporting._
-import Trees._
-import Names._
-import StdNames._
-import ContextOps._
+import Types.*
+import Decorators.*
+import ErrorReporting.*
+import Trees.*
+import Names.*
+import StdNames.*
+import ContextOps.*
import NameKinds.DefaultGetterName
-import ProtoTypes._
-import Inferencing._
-import reporting._
-import transform.TypeUtils._
-import transform.SymUtils._
-import Nullables._, NullOpsDecorator.*
+import ProtoTypes.*
+import Inferencing.*
+import reporting.*
+import transform.TypeUtils.*
+import transform.SymUtils.*
+import Nullables.*, NullOpsDecorator.*
import config.Feature
import collection.mutable
import config.Printers.{overload, typr, unapp}
-import TypeApplications._
+import TypeApplications.*
import Annotations.Annotation
import Constants.{Constant, IntTag}
@@ -38,7 +38,7 @@ import annotation.threadUnsafe
import scala.util.control.NonFatal
object Applications {
- import tpd._
+ import tpd.*
def extractorMember(tp: Type, name: Name)(using Context): SingleDenotation =
tp.member(name).suchThat(sym => sym.info.isParameterless && sym.info.widenExpr.isValueType)
@@ -352,7 +352,7 @@ object Applications {
trait Applications extends Compatibility {
self: Typer & Dynamic =>
- import Applications._
+ import Applications.*
import tpd.{ cpy => _, _ }
import untpd.cpy
diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala
index c8026ad5784b..75871f2ab16a 100644
--- a/compiler/src/dotty/tools/dotc/typer/Checking.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala
@@ -2,18 +2,18 @@ package dotty.tools
package dotc
package typer
-import core._
-import ast._
-import Contexts._
-import Types._
-import Flags._
-import Names._
-import StdNames._
-import Symbols._
-import Trees._
-import ProtoTypes._
-import Scopes._
-import CheckRealizable._
+import core.*
+import ast.*
+import Contexts.*
+import Types.*
+import Flags.*
+import Names.*
+import StdNames.*
+import Symbols.*
+import Trees.*
+import ProtoTypes.*
+import Scopes.*
+import CheckRealizable.*
import ErrorReporting.errorTree
import util.Spans.Span
import Phases.refchecksPhase
@@ -23,29 +23,29 @@ import util.SrcPos
import util.Spans.Span
import rewrites.Rewrites.patch
import inlines.Inlines
-import transform.SymUtils._
-import transform.ValueClasses._
-import Decorators._
+import transform.SymUtils.*
+import transform.ValueClasses.*
+import Decorators.*
import ErrorReporting.{err, errorType}
import config.Printers.{typr, patmatch}
import NameKinds.DefaultGetterName
-import NameOps._
+import NameOps.*
import SymDenotations.{NoCompleter, NoDenotation}
import Applications.unapplyArgs
import Inferencing.isFullyDefined
import transform.patmat.SpaceEngine.{isIrrefutable, isIrrefutableQuotePattern}
import config.Feature
import config.Feature.sourceVersion
-import config.SourceVersion._
+import config.SourceVersion.*
import printing.Formatting.hlAsKeyword
import transform.TypeUtils.*
import cc.isCaptureChecking
import collection.mutable
-import reporting._
+import reporting.*
object Checking {
- import tpd._
+ import tpd.*
/** Add further information for error messages involving applied types if the
* type is inferred:
@@ -853,7 +853,7 @@ object Checking {
trait Checking {
- import tpd._
+ import tpd.*
def checkNonCyclic(sym: Symbol, info: TypeBounds, reportErrors: Boolean)(using Context): Type =
Checking.checkNonCyclic(sym, info, reportErrors)
@@ -884,7 +884,7 @@ trait Checking {
case NonConforming, RefutableExtractor
def fail(pat: Tree, pt: Type, reason: Reason): Boolean = {
- import Reason._
+ import Reason.*
val message = reason match
case NonConforming =>
var reportedPt = pt.dropAnnot(defn.UncheckedAnnot)
@@ -1579,7 +1579,7 @@ trait Checking {
}
trait ReChecking extends Checking {
- import tpd._
+ import tpd.*
override def checkEnumParent(cls: Symbol, firstParent: Symbol)(using Context): Unit = ()
override def checkEnum(cdef: untpd.TypeDef, cls: Symbol, firstParent: Symbol)(using Context): Unit = ()
override def checkRefsLegal(tree: tpd.Tree, badOwner: Symbol, allowed: (Name, Symbol) => Boolean, where: String)(using Context): Unit = ()
@@ -1595,7 +1595,7 @@ trait ReChecking extends Checking {
}
trait NoChecking extends ReChecking {
- import tpd._
+ import tpd.*
override def checkNonCyclic(sym: Symbol, info: TypeBounds, reportErrors: Boolean)(using Context): Type = info
override def checkNonCyclicInherited(joint: Type, parents: List[Type], decls: Scope, pos: SrcPos)(using Context): Unit = ()
override def checkStable(tp: Type, pos: SrcPos, kind: String)(using Context): Unit = ()
diff --git a/compiler/src/dotty/tools/dotc/typer/ConstFold.scala b/compiler/src/dotty/tools/dotc/typer/ConstFold.scala
index b55c8c64e3b1..75894d2dd5b9 100644
--- a/compiler/src/dotty/tools/dotc/typer/ConstFold.scala
+++ b/compiler/src/dotty/tools/dotc/typer/ConstFold.scala
@@ -3,19 +3,19 @@ package typer
import java.lang.ArithmeticException
-import ast._
-import core._
-import Symbols._
-import Types._
-import Constants._
-import Names._
-import StdNames._
-import Contexts._
-import transform.TypeUtils._
+import ast.*
+import core.*
+import Symbols.*
+import Types.*
+import Constants.*
+import Names.*
+import StdNames.*
+import Contexts.*
+import transform.TypeUtils.*
object ConstFold:
- import tpd._
+ import tpd.*
private val foldedBinops = Set[Name](
nme.ZOR, nme.OR, nme.XOR, nme.ZAND, nme.AND, nme.EQ, nme.NE,
diff --git a/compiler/src/dotty/tools/dotc/typer/Deriving.scala b/compiler/src/dotty/tools/dotc/typer/Deriving.scala
index 8fdc468780ba..818781ae7ccb 100644
--- a/compiler/src/dotty/tools/dotc/typer/Deriving.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Deriving.scala
@@ -2,13 +2,13 @@ package dotty.tools
package dotc
package typer
-import core._
-import ast._
-import ast.Trees._
-import StdNames._
-import Contexts._, Symbols._, Types._, SymDenotations._, Names._, NameOps._, Flags._, Decorators._
-import ProtoTypes._, ContextOps._
-import util.Spans._
+import core.*
+import ast.*
+import ast.Trees.*
+import StdNames.*
+import Contexts.*, Symbols.*, Types.*, SymDenotations.*, Names.*, NameOps.*, Flags.*, Decorators.*
+import ProtoTypes.*, ContextOps.*
+import util.Spans.*
import util.SrcPos
import collection.mutable
import ErrorReporting.errorTree
@@ -266,7 +266,7 @@ trait Deriving {
/** The synthesized type class instance definitions */
def syntheticDefs: List[tpd.Tree] = {
- import tpd._
+ import tpd.*
/** The type class instance definition with symbol `sym` */
def typeclassInstance(sym: Symbol)(using Context): List[List[tpd.Tree]] => tpd.Tree =
diff --git a/compiler/src/dotty/tools/dotc/typer/Docstrings.scala b/compiler/src/dotty/tools/dotc/typer/Docstrings.scala
index d819528ff556..33ef3e85e14e 100644
--- a/compiler/src/dotty/tools/dotc/typer/Docstrings.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Docstrings.scala
@@ -2,8 +2,8 @@ package dotty.tools
package dotc
package typer
-import core._
-import Contexts._, Symbols._, Decorators._, Comments.{_, given}
+import core.*
+import Contexts.*, Symbols.*, Decorators.*, Comments.{_, given}
import ast.tpd
object Docstrings {
diff --git a/compiler/src/dotty/tools/dotc/typer/Dynamic.scala b/compiler/src/dotty/tools/dotc/typer/Dynamic.scala
index 51734e1a5d4b..71b32b639997 100644
--- a/compiler/src/dotty/tools/dotc/typer/Dynamic.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Dynamic.scala
@@ -67,8 +67,8 @@ object DynamicUnapply {
trait Dynamic {
self: Typer & Applications =>
- import Dynamic._
- import tpd._
+ import Dynamic.*
+ import tpd.*
/** Translate selection that does not typecheck according to the normal rules into a applyDynamic/applyDynamicNamed.
* foo.bar(baz0, baz1, ...) ~~> foo.applyDynamic(bar)(baz0, baz1, ...)
diff --git a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala
index 499d57e0518e..68143dfd2ba0 100644
--- a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala
+++ b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala
@@ -2,22 +2,22 @@ package dotty.tools
package dotc
package typer
-import ast._
-import core._
-import Types._, ProtoTypes._, Contexts._, Decorators._, Denotations._, Symbols._
-import Implicits._, Flags._, Constants.Constant
-import Trees._
-import NameOps._
+import ast.*
+import core.*
+import Types.*, ProtoTypes.*, Contexts.*, Decorators.*, Denotations.*, Symbols.*
+import Implicits.*, Flags.*, Constants.Constant
+import Trees.*
+import NameOps.*
import util.Spans.NoSpan
import util.SrcPos
import config.Feature
-import reporting._
+import reporting.*
import collection.mutable
object ErrorReporting {
- import tpd._
+ import tpd.*
def errorTree(tree: untpd.Tree, msg: Message, pos: SrcPos)(using Context): tpd.Tree =
tree.withType(errorType(msg, pos))
diff --git a/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala b/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala
index 8e5ec7525d48..2c441c2f915e 100644
--- a/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala
+++ b/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala
@@ -2,18 +2,18 @@ package dotty.tools
package dotc
package typer
-import core._
+import core.*
import ast.{Trees, untpd, tpd}
-import Contexts._
-import Types._
-import Flags._
-import Symbols._
-import Names._
+import Contexts.*
+import Types.*
+import Flags.*
+import Symbols.*
+import Names.*
import NameKinds.UniqueName
-import util.Spans._
+import util.Spans.*
import util.Property
import collection.mutable
-import Trees._
+import Trees.*
/** A class that handles argument lifting. Argument lifting is needed in the following
* scenarios:
@@ -25,7 +25,7 @@ import Trees._
* arguments can be duplicated as arguments to default argument methods.
*/
abstract class Lifter {
- import tpd._
+ import tpd.*
/** Test indicating `expr` does not need lifting */
def noLift(expr: Tree)(using Context): Boolean
@@ -208,7 +208,7 @@ object LiftToDefs extends LiftComplex {
/** Lifter for eta expansion */
object EtaExpansion extends LiftImpure {
- import tpd._
+ import tpd.*
/** Eta-expanding a tree means converting a method reference to a function value.
* @param tree The tree to expand
@@ -264,7 +264,7 @@ object EtaExpansion extends LiftImpure {
* But see comment on the `ExprType` case in function `prune` in class `ConstraintHandling`.
*/
def etaExpand(tree: Tree, mt: MethodType, xarity: Int)(using Context): untpd.Tree = {
- import untpd._
+ import untpd.*
assert(!ctx.isAfterTyper || (ctx.phase eq ctx.base.inliningPhase), ctx.phase)
val defs = new mutable.ListBuffer[tpd.Tree]
val lifted: Tree = TypedSplice(liftApp(defs, tree))
diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala
index 5cba406a302e..83b92f3b2342 100644
--- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala
@@ -3,44 +3,45 @@ package dotc
package typer
import backend.sjs.JSDefinitions
-import core._
+import core.*
import ast.{TreeTypeMap, untpd, tpd}
-import util.Spans._
+import util.Spans.*
import util.Stats.{record, monitored}
import printing.{Showable, Printer}
-import printing.Texts._
-import Contexts._
-import Types._
-import Flags._
+import printing.Texts.*
+import Contexts.*
+import Types.*
+import Flags.*
import Mode.ImplicitsEnabled
import NameKinds.{LazyImplicitName, ContextBoundParamName}
-import Symbols._
-import Types._
-import Decorators._
-import Names._
-import StdNames._
-import ProtoTypes._
-import ErrorReporting._
+import Symbols.*
+import Types.*
+import Decorators.*
+import Names.*
+import StdNames.*
+import ProtoTypes.*
+import ErrorReporting.*
import Inferencing.{fullyDefinedType, isFullyDefined}
import Scopes.newScope
import Typer.BindingPrec, BindingPrec.*
-import transform.TypeUtils._
-import Hashable._
+import transform.TypeUtils.*
+import Hashable.*
import util.{EqHashMap, Stats}
import config.{Config, Feature}
import Feature.migrateTo3
import config.Printers.{implicits, implicitsDetailed}
import collection.mutable
-import reporting._
+import reporting.*
import transform.Splicer
import annotation.tailrec
import scala.annotation.internal.sharable
import scala.annotation.threadUnsafe
+import scala.compiletime.uninitialized
/** Implicit resolution */
object Implicits:
- import tpd._
+ import tpd.*
/** An implicit definition `implicitRef` that is visible under a different name, `alias`.
* Gets generated if an implicit ref is imported via a renaming import.
@@ -431,6 +432,7 @@ object Implicits:
/** A failed search */
case class SearchFailure(tree: Tree) extends SearchResult {
+ require(tree.tpe.isInstanceOf[SearchFailureType], s"unexpected type for ${tree}")
final def isAmbiguous: Boolean = tree.tpe.isInstanceOf[AmbiguousImplicits | TooUnspecific]
final def reason: SearchFailureType = tree.tpe.asInstanceOf[SearchFailureType]
}
@@ -595,7 +597,7 @@ object Implicits:
}
end Implicits
-import Implicits._
+import Implicits.*
/** Info relating to implicits that is kept for one run */
trait ImplicitRunInfo:
@@ -620,7 +622,7 @@ trait ImplicitRunInfo:
object collectParts extends TypeTraverser:
- private var parts: mutable.LinkedHashSet[Type] = _
+ private var parts: mutable.LinkedHashSet[Type] = uninitialized
private val partSeen = util.HashSet[Type]()
def traverse(t: Type) = try
@@ -843,7 +845,7 @@ end ImplicitRunInfo
trait Implicits:
self: Typer =>
- import tpd._
+ import tpd.*
override def viewExists(from: Type, to: Type)(using Context): Boolean =
!from.isError
@@ -1840,7 +1842,7 @@ final class SearchRoot extends SearchHistory:
result match {
case failure: SearchFailure => failure
case success: SearchSuccess =>
- import tpd._
+ import tpd.*
// We might have accumulated dictionary entries for by name implicit arguments
// which are not in fact used recursively either directly in the outermost result
diff --git a/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala b/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala
index ba05cba229ae..78cba674bfff 100644
--- a/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala
+++ b/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala
@@ -3,15 +3,17 @@ package dotc
package typer
import ast.{tpd, untpd}
-import core._
+import core.*
import printing.{Printer, Showable}
import util.SimpleIdentityMap
-import Symbols._, Names._, Types._, Contexts._, StdNames._, Flags._
+import Symbols.*, Names.*, Types.*, Contexts.*, StdNames.*, Flags.*
import Implicits.RenamedImplicitRef
import StdNames.nme
import printing.Texts.Text
import NameKinds.QualifiedName
+import scala.compiletime.uninitialized
+
object ImportInfo {
case class RootRef(refFn: () => TermRef, isPredef: Boolean = false)
@@ -66,7 +68,7 @@ class ImportInfo(symf: Context ?=> Symbol,
}
mySym.uncheckedNN
}
- private var mySym: Symbol | Null = _
+ private var mySym: Symbol | Null = uninitialized
/** The (TermRef) type of the qualifier of the import clause */
def site(using Context): Type = importSym.info match {
@@ -180,7 +182,7 @@ class ImportInfo(symf: Context ?=> Symbol,
private val isLanguageImport: Boolean = untpd.languageImport(qualifier).isDefined
- private var myUnimported: Symbol | Null = _
+ private var myUnimported: Symbol | Null = uninitialized
private var featureCache: SimpleIdentityMap[TermName, java.lang.Boolean] = SimpleIdentityMap.empty
diff --git a/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala b/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala
index 66d4a803494d..7615fbda9f0a 100644
--- a/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala
+++ b/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala
@@ -3,11 +3,11 @@ package dotc
package typer
import backend.sjs.JSDefinitions
-import core._
-import Contexts._, Types._, Symbols._, Names._, Decorators._, ProtoTypes._
-import Flags._, SymDenotations._
+import core.*
+import Contexts.*, Types.*, Symbols.*, Names.*, Decorators.*, ProtoTypes.*
+import Flags.*, SymDenotations.*
import NameKinds.FlatName
-import StdNames._
+import StdNames.*
import config.Printers.{implicits, implicitsDetailed}
import ast.{untpd, tpd}
import Implicits.{hasExtMethod, Candidate}
@@ -25,7 +25,7 @@ trait ImportSuggestions:
/** The maximal number of suggested imports to make */
inline val MaxSuggestions = 10
- import tpd._
+ import tpd.*
/** Timeout to test a single implicit value as a suggestion, in ms */
private inline val testOneImplicitTimeOut = 500
diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala
index 89368d948448..2f43792efe8b 100644
--- a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala
@@ -2,23 +2,23 @@ package dotty.tools
package dotc
package typer
-import core._
-import ast._
-import Contexts._, Types._, Flags._, Symbols._
-import ProtoTypes._
+import core.*
+import ast.*
+import Contexts.*, Types.*, Flags.*, Symbols.*
+import ProtoTypes.*
import NameKinds.UniqueName
-import util.Spans._
+import util.Spans.*
import util.{Stats, SimpleIdentityMap, SimpleIdentitySet, SrcPos}
-import Decorators._
+import Decorators.*
import config.Printers.{gadts, typr}
import annotation.tailrec
-import reporting._
+import reporting.*
import collection.mutable
import scala.annotation.internal.sharable
object Inferencing {
- import tpd._
+ import tpd.*
/** Is type fully defined, meaning the type does not contain wildcard types
* or uninstantiated type variables. As a side effect, this will minimize
@@ -550,8 +550,8 @@ object Inferencing {
}
trait Inferencing { this: Typer =>
- import Inferencing._
- import tpd._
+ import Inferencing.*
+ import tpd.*
/** Interpolate undetermined type variables in the widened type of this tree.
* @param tree the tree whose type is interpolated
diff --git a/compiler/src/dotty/tools/dotc/typer/JavaChecks.scala b/compiler/src/dotty/tools/dotc/typer/JavaChecks.scala
index 89caf5e1c474..bbc34bc692f9 100644
--- a/compiler/src/dotty/tools/dotc/typer/JavaChecks.scala
+++ b/compiler/src/dotty/tools/dotc/typer/JavaChecks.scala
@@ -1,8 +1,8 @@
package dotty.tools.dotc
package typer
-import core.Contexts._
-import ast.tpd._
+import core.Contexts.*
+import ast.tpd.*
/** PostTyper doesn't run on java sources,
* but some checks still need to be applied.
diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala
index 44d8077e44db..1e8460764b9b 100644
--- a/compiler/src/dotty/tools/dotc/typer/Namer.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala
@@ -2,15 +2,15 @@ package dotty.tools
package dotc
package typer
-import core._
-import ast._
-import Trees._, StdNames._, Scopes._, Denotations._, NamerOps._, ContextOps._
-import Contexts._, Symbols._, Types._, SymDenotations._, Names._, NameOps._, Flags._
-import Decorators._, Comments.{_, given}
+import core.*
+import ast.*
+import Trees.*, StdNames.*, Scopes.*, Denotations.*, NamerOps.*, ContextOps.*
+import Contexts.*, Symbols.*, Types.*, SymDenotations.*, Names.*, NameOps.*, Flags.*
+import Decorators.*, Comments.{_, given}
import NameKinds.DefaultGetterName
-import ast.desugar, ast.desugar._
-import ProtoTypes._
-import util.Spans._
+import ast.desugar, ast.desugar.*
+import ProtoTypes.*
+import util.Spans.*
import util.Property
import collection.mutable
import tpd.tpes
@@ -20,16 +20,17 @@ import config.Printers.typr
import inlines.{Inlines, PrepareInlineable}
import parsing.JavaParsers.JavaParser
import parsing.Parsers.Parser
-import Annotations._
-import Inferencing._
-import transform.ValueClasses._
-import transform.TypeUtils._
-import transform.SymUtils._
+import Annotations.*
+import Inferencing.*
+import transform.ValueClasses.*
+import transform.TypeUtils.*
+import transform.SymUtils.*
import TypeErasure.erasure
-import reporting._
+import reporting.*
import config.Feature.sourceVersion
-import config.SourceVersion._
+import config.SourceVersion.*
+import scala.compiletime.uninitialized
/** This class creates symbols from definitions and imports and gives them
* lazy types.
@@ -53,7 +54,7 @@ import config.SourceVersion._
*/
class Namer { typer: Typer =>
- import untpd._
+ import untpd.*
val TypedAhead : Property.Key[tpd.Tree] = new Property.Key
val ExpandedTree : Property.Key[untpd.Tree] = new Property.Key
@@ -1041,7 +1042,14 @@ class Namer { typer: Typer =>
tp
val rhs1 = typedAheadType(rhs)
- val rhsBodyType: TypeBounds = addVariances(rhs1.tpe).toBounds
+ val rhsBodyType: TypeBounds =
+ val bounds = addVariances(rhs1.tpe).toBounds
+ if sym.is(Sealed) then
+ sym.resetFlag(Sealed)
+ bounds.derivedTypeBounds(bounds.lo,
+ AnnotatedType(bounds.hi, Annotation(defn.Caps_SealedAnnot, rhs1.span)))
+ else bounds
+
val unsafeInfo = if (isDerived) rhsBodyType else abstracted(rhsBodyType)
def opaqueToBounds(info: Type): Type =
@@ -1078,7 +1086,7 @@ class Namer { typer: Typer =>
protected implicit val completerCtx: Context = localContext(cls)
- private var localCtx: Context = _
+ private var localCtx: Context = uninitialized
/** info to be used temporarily while completing the class, to avoid cyclic references. */
private var tempInfo: TempClassInfo | Null = null
@@ -1252,7 +1260,7 @@ class Namer { typer: Typer =>
if forwarder.isType then
buf += tpd.TypeDef(forwarder.asType).withSpan(span)
else
- import tpd._
+ import tpd.*
def extensionParamsCount(pt: Type): Int = pt match
case pt: MethodOrPoly => 1 + extensionParamsCount(pt.resType)
case _ => 0
@@ -1297,7 +1305,7 @@ class Namer { typer: Typer =>
.foreach(addForwarder(name, _, span)) // ignore if any are not added
def addWildcardForwarders(seen: List[TermName], span: Span): Unit =
- val nonContextual = mutable.HashSet(seen: _*)
+ val nonContextual = mutable.HashSet(seen*)
val fromCaseClass = pathType.widen.classSymbols.exists(_.is(Case))
def isCaseClassSynthesized(mbr: Symbol) =
fromCaseClass && defn.caseClassSynthesized.contains(mbr)
diff --git a/compiler/src/dotty/tools/dotc/typer/Nullables.scala b/compiler/src/dotty/tools/dotc/typer/Nullables.scala
index 387e58294dc6..cc3fac3a6ffd 100644
--- a/compiler/src/dotty/tools/dotc/typer/Nullables.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Nullables.scala
@@ -2,15 +2,15 @@ package dotty.tools
package dotc
package typer
-import core._
-import Types._, Contexts._, Symbols._, Decorators._, Constants._
+import core.*
+import Types.*, Contexts.*, Symbols.*, Decorators.*, Constants.*
import annotation.tailrec
import StdNames.nme
import util.Property
import Names.Name
import util.Spans.Span
-import Flags._
-import NullOpsDecorator._
+import Flags.*
+import NullOpsDecorator.*
import collection.mutable
import config.Printers.nullables
import ast.{tpd, untpd}
@@ -18,7 +18,7 @@ import ast.Trees.mods
/** Operations for implementing a flow analysis for nullability */
object Nullables:
- import ast.tpd._
+ import ast.tpd.*
def importUnsafeNulls(using Context): Import = Import(
ref(defn.LanguageModule),
@@ -422,7 +422,7 @@ object Nullables:
* because of shadowing.
*/
def assignmentSpans(using Context): Map[Int, List[Span]] =
- import ast.untpd._
+ import ast.untpd.*
object populate extends UntypedTreeTraverser:
diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala
index 6a3f0d0ea73b..6cbddeb964cf 100644
--- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala
+++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala
@@ -2,15 +2,15 @@ package dotty.tools
package dotc
package typer
-import core._
-import ast._
-import Contexts._, Types._, Denotations._, Names._, StdNames._, NameOps._, Symbols._
+import core.*
+import ast.*
+import Contexts.*, Types.*, Denotations.*, Names.*, StdNames.*, NameOps.*, Symbols.*
import NameKinds.DepParamName
-import Trees._
-import Constants._
+import Trees.*
+import Constants.*
import util.{Stats, SimpleIdentityMap, SimpleIdentitySet}
-import Decorators._
-import Uniques._
+import Decorators.*
+import Uniques.*
import inlines.Inlines
import config.Printers.typr
import Inferencing.*
@@ -25,7 +25,7 @@ import dotty.tools.dotc.util.Spans.{NoSpan, Span}
object ProtoTypes {
- import tpd._
+ import tpd.*
/** A trait defining an `isCompatible` method. */
trait Compatibility {
diff --git a/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala b/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala
index bda2c25c26b8..75e1aed9da21 100644
--- a/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala
+++ b/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala
@@ -1,28 +1,28 @@
package dotty.tools.dotc
package typer
-import dotty.tools.dotc.ast._
-import dotty.tools.dotc.config.Feature._
-import dotty.tools.dotc.config.SourceVersion._
-import dotty.tools.dotc.core._
-import dotty.tools.dotc.core.Annotations._
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Decorators._
-import dotty.tools.dotc.core.Flags._
+import dotty.tools.dotc.ast.*
+import dotty.tools.dotc.config.Feature.*
+import dotty.tools.dotc.config.SourceVersion.*
+import dotty.tools.dotc.core.*
+import dotty.tools.dotc.core.Annotations.*
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Decorators.*
+import dotty.tools.dotc.core.Flags.*
import dotty.tools.dotc.core.NameKinds.PatMatGivenVarName
-import dotty.tools.dotc.core.Names._
-import dotty.tools.dotc.core.StdNames._
-import dotty.tools.dotc.core.Symbols._
-import dotty.tools.dotc.core.Types._
+import dotty.tools.dotc.core.Names.*
+import dotty.tools.dotc.core.StdNames.*
+import dotty.tools.dotc.core.Symbols.*
+import dotty.tools.dotc.core.Types.*
import dotty.tools.dotc.inlines.PrepareInlineable
import dotty.tools.dotc.quoted.QuotePatterns
import dotty.tools.dotc.staging.StagingLevel.*
-import dotty.tools.dotc.transform.SymUtils._
+import dotty.tools.dotc.transform.SymUtils.*
import dotty.tools.dotc.typer.ErrorReporting.errorTree
-import dotty.tools.dotc.typer.Implicits._
-import dotty.tools.dotc.typer.Inferencing._
+import dotty.tools.dotc.typer.Implicits.*
+import dotty.tools.dotc.typer.Inferencing.*
import dotty.tools.dotc.util.Property
-import dotty.tools.dotc.util.Spans._
+import dotty.tools.dotc.util.Spans.*
import dotty.tools.dotc.util.Stats.record
import dotty.tools.dotc.reporting.IllegalVariableInPatternAlternative
import scala.collection.mutable
@@ -266,7 +266,7 @@ trait QuotesAndSplices {
}
object QuotesAndSplices {
- import tpd._
+ import tpd.*
/** Key for mapping from quoted pattern type variable names into their symbol */
private val TypeVariableKey = new Property.Key[collection.mutable.Map[TypeName, Symbol]]
diff --git a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala
index 20dfe07c3be5..e152b5e6b9c7 100644
--- a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala
+++ b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala
@@ -1,17 +1,17 @@
package dotty.tools.dotc
package typer
-import core._
-import Contexts._
-import Types._
-import Symbols._
-import StdNames._
-import Decorators._
-import typer.ProtoTypes._
+import core.*
+import Contexts.*
+import Types.*
+import Symbols.*
+import StdNames.*
+import Decorators.*
+import typer.ProtoTypes.*
import ast.{tpd, untpd}
import scala.util.control.NonFatal
import util.Spans.Span
-import Nullables._
+import Nullables.*
import staging.StagingLevel.*
/** A version of Typer that keeps all symbols defined and referenced in a
@@ -23,7 +23,7 @@ import staging.StagingLevel.*
* Otherwise, everything is as in Typer.
*/
class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking {
- import tpd._
+ import tpd.*
private def assertTyped(tree: untpd.Tree)(using Context): Unit =
assert(tree.hasType, i"$tree ${tree.getClass} ${tree.uniqueId}")
diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala
index eef88e76971e..12694bf4c6cc 100644
--- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala
+++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala
@@ -2,27 +2,27 @@ package dotty.tools
package dotc
package typer
-import transform._
-import core._
-import Symbols._, Types._, Contexts._, Flags._, Names._, NameOps._, NameKinds._
-import StdNames._, Denotations._, SymUtils._, Phases._, SymDenotations._
+import transform.*
+import core.*
+import Symbols.*, Types.*, Contexts.*, Flags.*, Names.*, NameOps.*, NameKinds.*
+import StdNames.*, Denotations.*, SymUtils.*, Phases.*, SymDenotations.*
import NameKinds.DefaultGetterName
-import util.Spans._
+import util.Spans.*
import scala.collection.mutable
-import ast._
-import MegaPhase._
+import ast.*
+import MegaPhase.*
import config.Printers.{checks, noPrinter, capt}
-import Decorators._
+import Decorators.*
import OverridingPairs.isOverridingPair
-import typer.ErrorReporting._
+import typer.ErrorReporting.*
import config.Feature.{warnOnMigration, migrateTo3, sourceVersion}
import config.SourceVersion.{`3.0`, `future`}
import config.Printers.refcheck
-import reporting._
+import reporting.*
import Constants.Constant
object RefChecks {
- import tpd._
+ import tpd.*
val name: String = "refchecks"
val description: String = "checks related to abstract members and overriding"
@@ -267,6 +267,9 @@ object RefChecks {
if !other.is(Deferred) then
checkOverride(subtypeChecker, dcl, other)
end checkAll
+
+ // Disabled for capture checking since traits can get different parameter refinements
+ def checkInheritedTraitParameters: Boolean = true
end OverridingPairsChecker
/** 1. Check all members of class `clazz` for overriding conditions.
@@ -851,7 +854,7 @@ object RefChecks {
checkCaseClassInheritanceInvariant()
}
- if (!clazz.is(Trait)) {
+ if (!clazz.is(Trait) && checker.checkInheritedTraitParameters) {
// check that parameterized base classes and traits are typed in the same way as from the superclass
// I.e. say we have
//
@@ -1128,7 +1131,7 @@ object RefChecks {
report.warning(UnqualifiedCallToAnyRefMethod(tree, tree.symbol), tree)
}
-import RefChecks._
+import RefChecks.*
/** Post-attribution checking and transformation, which fulfills the following roles
*
@@ -1162,7 +1165,7 @@ import RefChecks._
*/
class RefChecks extends MiniPhase { thisPhase =>
- import tpd._
+ import tpd.*
override def phaseName: String = RefChecks.name
diff --git a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala
index 363d6e5ba411..6e1302c88398 100644
--- a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala
@@ -2,25 +2,25 @@ package dotty.tools
package dotc
package typer
-import core._
+import core.*
import util.Spans.Span
-import Contexts._
-import Types._, Flags._, Symbols._, Names._, StdNames._, Constants._
+import Contexts.*
+import Types.*, Flags.*, Symbols.*, Names.*, StdNames.*, Constants.*
import TypeErasure.{erasure, hasStableErasure}
-import Decorators._
-import ProtoTypes._
+import Decorators.*
+import ProtoTypes.*
import Inferencing.{fullyDefinedType, isFullyDefined}
import ast.untpd
-import transform.SymUtils._
-import transform.TypeUtils._
-import transform.SyntheticMembers._
+import transform.SymUtils.*
+import transform.TypeUtils.*
+import transform.SyntheticMembers.*
import util.Property
import ast.Trees.genericEmptyTree
import annotation.{tailrec, constructorOnly}
-import ast.tpd._
-import Synthesizer._
+import ast.tpd.*
+import Synthesizer.*
import sbt.ExtractDependencies.*
-import xsbti.api.DependencyContext._
+import xsbti.api.DependencyContext.*
/** Synthesize terms for special classes */
class Synthesizer(typer: Typer)(using @constructorOnly c: Context):
diff --git a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala
index 8ded39030a1e..d2b21ea9e4a8 100644
--- a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala
+++ b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala
@@ -2,14 +2,14 @@ package dotty.tools
package dotc
package typer
-import core._
-import ast._
-import Contexts._, ContextOps._, Constants._, Types._, Symbols._, Names._, Flags._, Decorators._
-import ErrorReporting._, Annotations._, Denotations._, SymDenotations._, StdNames._
+import core.*
+import ast.*
+import Contexts.*, ContextOps.*, Constants.*, Types.*, Symbols.*, Names.*, Flags.*, Decorators.*
+import ErrorReporting.*, Annotations.*, Denotations.*, SymDenotations.*, StdNames.*
import util.SrcPos
-import NameOps._
+import NameOps.*
import collection.mutable
-import reporting._
+import reporting.*
import Checking.{checkNoPrivateLeaks, checkNoWildcard}
import cc.CaptureSet
diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala
index ed688d182913..ac6eec11ac23 100644
--- a/compiler/src/dotty/tools/dotc/typer/Typer.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala
@@ -3,53 +3,53 @@ package dotc
package typer
import backend.sjs.JSDefinitions
-import core._
-import ast._
-import Trees._
-import Constants._
-import StdNames._
-import Scopes._
-import Denotations._
-import ProtoTypes._
-import Contexts._
-import Symbols._
-import Types._
-import SymDenotations._
-import Annotations._
-import Names._
-import NameOps._
-import NameKinds._
-import NamerOps._
-import ContextOps._
-import Flags._
-import Decorators._
-import ErrorReporting._
-import Checking._
-import Inferencing._
+import core.*
+import ast.*
+import Trees.*
+import Constants.*
+import StdNames.*
+import Scopes.*
+import Denotations.*
+import ProtoTypes.*
+import Contexts.*
+import Symbols.*
+import Types.*
+import SymDenotations.*
+import Annotations.*
+import Names.*
+import NameOps.*
+import NameKinds.*
+import NamerOps.*
+import ContextOps.*
+import Flags.*
+import Decorators.*
+import ErrorReporting.*
+import Checking.*
+import Inferencing.*
import Dynamic.isDynamicExpansion
import EtaExpansion.etaExpand
import TypeComparer.CompareResult
import inlines.{Inlines, PrepareInlineable}
-import util.Spans._
-import util.common._
+import util.Spans.*
+import util.common.*
import util.{Property, SimpleIdentityMap, SrcPos}
import Applications.{tupleComponentTypes, wrapDefs, defaultArgument}
import collection.mutable
import annotation.tailrec
-import Implicits._
+import Implicits.*
import util.Stats.record
import config.Printers.{gadts, typr}
import config.Feature
import config.Feature.{sourceVersion, migrateTo3}
-import config.SourceVersion._
+import config.SourceVersion.*
import rewrites.Rewrites.patch
import staging.StagingLevel
-import transform.SymUtils._
-import transform.TypeUtils._
-import reporting._
-import Nullables._
-import NullOpsDecorator._
+import transform.SymUtils.*
+import transform.TypeUtils.*
+import reporting.*
+import Nullables.*
+import NullOpsDecorator.*
import cc.CheckCaptures
import config.Config
@@ -130,7 +130,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer
with QuotesAndSplices
with Deriving {
- import Typer._
+ import Typer.*
import tpd.{cpy => _, _}
import untpd.cpy
@@ -217,7 +217,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer
* or else `NoContext` if nothing was found yet.
*/
def findRefRecur(previous: Type, prevPrec: BindingPrec, prevCtx: Context)(using Context): Type = {
- import BindingPrec._
+ import BindingPrec.*
/** Check that any previously found result from an inner context
* does properly shadow the new one from an outer context.
@@ -821,8 +821,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer
}
def typedNumber(tree: untpd.Number, pt: Type)(using Context): Tree = {
- import scala.util.FromDigits._
- import untpd.NumberKind._
+ import scala.util.FromDigits.*
+ import untpd.NumberKind.*
record("typedNumber")
val digits = tree.digits
val target = pt.dealias
@@ -904,7 +904,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer
def typedNew(tree: untpd.New, pt: Type)(using Context): Tree =
tree.tpt match {
case templ: untpd.Template =>
- import untpd._
+ import untpd.*
var templ1 = templ
def isEligible(tp: Type) =
tp.exists
@@ -948,7 +948,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer
case id: untpd.Ident if (ctx.mode is Mode.Pattern) && untpd.isVarPattern(id) =>
if (id.name == nme.WILDCARD || id.name == nme.WILDCARD_STAR) ifPat
else {
- import untpd._
+ import untpd.*
typed(Bind(id.name, Typed(Ident(wildName), tree.tpt)).withSpan(tree.span), pt)
}
case _ => ifExpr
@@ -3625,8 +3625,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer
val remembered = // report AmbiguousReferences as priority, otherwise last error
(errs.filter(_.msg.isInstanceOf[AmbiguousReference]) ++ errs).take(1)
for err <- remembered do
+ val tree = if app.isEmpty then qual else app
rememberSearchFailure(qual,
- SearchFailure(app.withType(FailedExtension(app, selectionProto, err.msg))))
+ SearchFailure(tree.withType(FailedExtension(tree, selectionProto, err.msg))))
catch case ex: TypeError => nestedFailure(ex)
// try an implicit conversion or given extension
diff --git a/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala b/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala
index 857ed1bad4d9..b79235f4f819 100644
--- a/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala
+++ b/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala
@@ -2,16 +2,16 @@ package dotty.tools
package dotc
package typer
-import core._
+import core.*
import Run.SubPhase
-import Phases._
-import Contexts._
-import Symbols._
+import Phases.*
+import Contexts.*
+import Symbols.*
import ImportInfo.withRootImports
import parsing.{Parser => ParserPhase}
import config.Printers.typr
import inlines.PrepareInlineable
-import util.Stats._
+import util.Stats.*
/**
*
diff --git a/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala b/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala
index 21fa9eed0df4..3699ca80d011 100644
--- a/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala
+++ b/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala
@@ -2,17 +2,19 @@ package dotty.tools.dotc
package typer
import dotty.tools.dotc.ast.{ Trees, tpd }
-import core._
-import Types._, Contexts._, Flags._, Symbols._, Trees._
-import Decorators._
-import Variances._
-import NameKinds._
+import core.*
+import Types.*, Contexts.*, Flags.*, Symbols.*, Trees.*
+import Decorators.*
+import Variances.*
+import NameKinds.*
import util.SrcPos
import config.Printers.variances
import config.Feature.migrateTo3
import reporting.trace
import printing.Formatting.hl
+import scala.compiletime.uninitialized
+
/** Provides `check` method to check that all top-level definitions
* in tree are variance correct. Does not recurse inside methods.
* The method should be invoked once for each Template.
@@ -63,11 +65,11 @@ object VarianceChecker {
}
class VarianceChecker(using Context) {
- import VarianceChecker._
- import tpd._
+ import VarianceChecker.*
+ import tpd.*
private object Validator extends TypeAccumulator[Option[VarianceError]] {
- private var base: Symbol = _
+ private var base: Symbol = uninitialized
/** The variance of a symbol occurrence of `tvar` seen at the level of the definition of `base`.
* The search proceeds from `base` to the owner of `tvar`.
diff --git a/compiler/src/dotty/tools/dotc/util/CommentParsing.scala b/compiler/src/dotty/tools/dotc/util/CommentParsing.scala
index b4af59c09310..7224e28fe477 100644
--- a/compiler/src/dotty/tools/dotc/util/CommentParsing.scala
+++ b/compiler/src/dotty/tools/dotc/util/CommentParsing.scala
@@ -17,7 +17,7 @@ import scala.collection.mutable
* handled by scaladoc.
*/
object CommentParsing {
- import Chars._
+ import Chars.*
/** Returns index of string `str` following `start` skipping longest
* sequence of whitespace characters characters (but no newlines)
diff --git a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala
index 6d013717ec52..8c0506573109 100644
--- a/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala
+++ b/compiler/src/dotty/tools/dotc/util/GenericHashMap.scala
@@ -1,6 +1,8 @@
package dotty.tools
package dotc.util
+import scala.compiletime.uninitialized
+
object GenericHashMap:
/** The number of elements up to which dense packing is used.
@@ -27,9 +29,9 @@ abstract class GenericHashMap[Key, Value]
(initialCapacity: Int, capacityMultiple: Int) extends MutableMap[Key, Value]:
import GenericHashMap.DenseLimit
- protected var used: Int = _
- protected var limit: Int = _
- protected var table: Array[AnyRef | Null] = _
+ protected var used: Int = uninitialized
+ protected var limit: Int = uninitialized
+ protected var table: Array[AnyRef | Null] = uninitialized
clear()
private def allocate(capacity: Int) =
diff --git a/compiler/src/dotty/tools/dotc/util/GenericHashSet.scala b/compiler/src/dotty/tools/dotc/util/GenericHashSet.scala
index 7abe40a8e13d..3c30e8e73300 100644
--- a/compiler/src/dotty/tools/dotc/util/GenericHashSet.scala
+++ b/compiler/src/dotty/tools/dotc/util/GenericHashSet.scala
@@ -2,6 +2,8 @@ package dotty.tools.dotc.util
import dotty.tools.uncheckedNN
+import scala.compiletime.uninitialized
+
object GenericHashSet:
/** The number of elements up to which dense packing is used.
@@ -24,9 +26,9 @@ object GenericHashSet:
abstract class GenericHashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends MutableSet[T] {
import GenericHashSet.DenseLimit
- protected var used: Int = _
- protected var limit: Int = _
- protected var table: Array[AnyRef | Null] = _
+ protected var used: Int = uninitialized
+ protected var limit: Int = uninitialized
+ protected var table: Array[AnyRef | Null] = uninitialized
clear()
diff --git a/compiler/src/dotty/tools/dotc/util/IntMap.scala b/compiler/src/dotty/tools/dotc/util/IntMap.scala
index 008ea866f70e..1d04567e99c7 100644
--- a/compiler/src/dotty/tools/dotc/util/IntMap.scala
+++ b/compiler/src/dotty/tools/dotc/util/IntMap.scala
@@ -1,5 +1,7 @@
package dotty.tools.dotc.util
+import scala.compiletime.uninitialized
+
/** A dense map from some `Key` type to `Int. Dense means: All keys and values
* are stored in arrays from 0 up to the size of the map. Keys and values
* can be obtained by index using `key(index)` and `value(index)`. Values
@@ -19,7 +21,7 @@ package dotty.tools.dotc.util
*/
final class IntMap[Key](initialCapacity: Int = 8, capacityMultiple: Int = 2)
extends PerfectHashing[Key](initialCapacity, capacityMultiple):
- private var values: Array[Int] = _
+ private var values: Array[Int] = uninitialized
def default: Int = -1
diff --git a/compiler/src/dotty/tools/dotc/util/LRUCache.scala b/compiler/src/dotty/tools/dotc/util/LRUCache.scala
index 99ee8a80227b..e124159575e7 100644
--- a/compiler/src/dotty/tools/dotc/util/LRUCache.scala
+++ b/compiler/src/dotty/tools/dotc/util/LRUCache.scala
@@ -18,7 +18,7 @@ import annotation.tailrec
* at the `last` position.
*/
class LRUCache[Key >: Null <: AnyRef : ClassTag, Value >: Null: ClassTag] {
- import LRUCache._
+ import LRUCache.*
val keys: Array[Key] = new Array[Key](Retained)
val values: Array[Value] = new Array(Retained)
var next: SixteenNibbles = new SixteenNibbles(initialRing.bits)
diff --git a/compiler/src/dotty/tools/dotc/util/NameTransformer.scala b/compiler/src/dotty/tools/dotc/util/NameTransformer.scala
index 61cf238fbc7f..f641ea90dcdd 100644
--- a/compiler/src/dotty/tools/dotc/util/NameTransformer.scala
+++ b/compiler/src/dotty/tools/dotc/util/NameTransformer.scala
@@ -4,7 +4,7 @@ package util
import scala.language.unsafeNulls
-import core.Names._
+import core.Names.*
import scala.annotation.internal.sharable
diff --git a/compiler/src/dotty/tools/dotc/util/ParsedComment.scala b/compiler/src/dotty/tools/dotc/util/ParsedComment.scala
index ac724f7e336f..7509ef66e533 100644
--- a/compiler/src/dotty/tools/dotc/util/ParsedComment.scala
+++ b/compiler/src/dotty/tools/dotc/util/ParsedComment.scala
@@ -3,9 +3,9 @@ package dotty.tools.dotc.util
import scala.language.unsafeNulls
import dotty.tools.dotc.core.Comments.{Comment, CommentsContext}
-import dotty.tools.dotc.core.Contexts._
+import dotty.tools.dotc.core.Contexts.*
import dotty.tools.dotc.core.Names.TermName
-import dotty.tools.dotc.core.Symbols._
+import dotty.tools.dotc.core.Symbols.*
import dotty.tools.dotc.printing.SyntaxHighlighting
import scala.Console.{BOLD, RESET}
diff --git a/compiler/src/dotty/tools/dotc/util/PerfectHashing.scala b/compiler/src/dotty/tools/dotc/util/PerfectHashing.scala
index fca790837959..1351afb68f3d 100644
--- a/compiler/src/dotty/tools/dotc/util/PerfectHashing.scala
+++ b/compiler/src/dotty/tools/dotc/util/PerfectHashing.scala
@@ -1,5 +1,7 @@
package dotty.tools.dotc.util
+import scala.compiletime.uninitialized
+
object PerfectHashing:
/** The number of elements up to which dense packing is used.
@@ -22,9 +24,9 @@ object PerfectHashing:
class PerfectHashing[Key](initialCapacity: Int = 8, capacityMultiple: Int = 2):
import PerfectHashing.DenseLimit
- private var used: Int = _
- private var table: Array[Int] = _
- private var keys: Array[AnyRef] = _
+ private var used: Int = uninitialized
+ private var table: Array[Int] = uninitialized
+ private var keys: Array[AnyRef] = uninitialized
clear()
diff --git a/compiler/src/dotty/tools/dotc/util/ReusableInstance.scala b/compiler/src/dotty/tools/dotc/util/ReusableInstance.scala
index 4dd897dd082a..ec88b5880745 100644
--- a/compiler/src/dotty/tools/dotc/util/ReusableInstance.scala
+++ b/compiler/src/dotty/tools/dotc/util/ReusableInstance.scala
@@ -1,7 +1,7 @@
package dotty.tools.dotc.util
import scala.collection.mutable.ArrayBuffer
-import scala.util.chaining._
+import scala.util.chaining.*
/** A wrapper for a list of cached instances of a type `T`.
* The wrapper is recursion-reentrant: several instances are kept, so
diff --git a/compiler/src/dotty/tools/dotc/util/ShowPickled.scala b/compiler/src/dotty/tools/dotc/util/ShowPickled.scala
index f85a57a8f812..8d5d0c27ab0e 100644
--- a/compiler/src/dotty/tools/dotc/util/ShowPickled.scala
+++ b/compiler/src/dotty/tools/dotc/util/ShowPickled.scala
@@ -8,10 +8,10 @@ import java.lang.Float.intBitsToFloat
import java.lang.Double.longBitsToDouble
import core.unpickleScala2.PickleBuffer
-import core.Names._
+import core.Names.*
object ShowPickled {
- import core.unpickleScala2.PickleFormat._
+ import core.unpickleScala2.PickleFormat.*
case class PickleBufferEntry(num: Int, startIndex: Int, tag: Int, bytes: Array[Byte]) {
def isName: Boolean = tag == TERMname || tag == TYPEname
diff --git a/compiler/src/dotty/tools/dotc/util/Signatures.scala b/compiler/src/dotty/tools/dotc/util/Signatures.scala
index 5bb79642278d..c3779d3473cf 100644
--- a/compiler/src/dotty/tools/dotc/util/Signatures.scala
+++ b/compiler/src/dotty/tools/dotc/util/Signatures.scala
@@ -1,21 +1,21 @@
package dotty.tools.dotc
package util
-import ast.Trees._
+import ast.Trees.*
import ast.tpd
import core.Constants.Constant
-import core.Contexts._
+import core.Contexts.*
import core.Denotations.{SingleDenotation, Denotation}
import core.Flags
import core.NameOps.isUnapplyName
-import core.Names._
+import core.Names.*
import core.NameKinds
-import core.Types._
+import core.Types.*
import core.Symbols.NoSymbol
import interactive.Interactive
import transform.SymUtils.isLocalToBlock
import util.Spans.Span
-import reporting._
+import reporting.*
object Signatures {
diff --git a/compiler/src/dotty/tools/dotc/util/SixteenNibbles.scala b/compiler/src/dotty/tools/dotc/util/SixteenNibbles.scala
index 42286aef5d31..b51f6bdcac61 100644
--- a/compiler/src/dotty/tools/dotc/util/SixteenNibbles.scala
+++ b/compiler/src/dotty/tools/dotc/util/SixteenNibbles.scala
@@ -5,7 +5,7 @@ package dotty.tools.dotc.util
*
*/
class SixteenNibbles(val bits: Long) extends AnyVal {
- import SixteenNibbles._
+ import SixteenNibbles.*
def apply(idx: Int): Int =
(bits >>> (idx * Width)).toInt & Mask
diff --git a/compiler/src/dotty/tools/dotc/util/SourceFile.scala b/compiler/src/dotty/tools/dotc/util/SourceFile.scala
index 3462036d7ba6..9da4f58f2deb 100644
--- a/compiler/src/dotty/tools/dotc/util/SourceFile.scala
+++ b/compiler/src/dotty/tools/dotc/util/SourceFile.scala
@@ -4,15 +4,16 @@ package util
import scala.language.unsafeNulls
-import dotty.tools.io._
-import Spans._
-import core.Contexts._
+import dotty.tools.io.*
+import Spans.*
+import core.Contexts.*
import scala.io.Codec
-import Chars._
+import Chars.*
import scala.annotation.internal.sharable
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
+import scala.compiletime.uninitialized
import scala.util.chaining.given
import java.io.File.separator
@@ -61,7 +62,7 @@ object ScriptSourceFile {
}
class SourceFile(val file: AbstractFile, computeContent: => Array[Char]) extends interfaces.SourceFile {
- import SourceFile._
+ import SourceFile.*
private var myContent: Array[Char] | Null = null
@@ -137,7 +138,7 @@ class SourceFile(val file: AbstractFile, computeContent: => Array[Char]) extends
buf.toArray
}
- private var lineIndicesCache: Array[Int] = _
+ private var lineIndicesCache: Array[Int] = uninitialized
private def lineIndices: Array[Int] =
if lineIndicesCache eq null then
lineIndicesCache = calculateLineIndicesFromContents()
@@ -259,7 +260,7 @@ object SourceFile {
// and use both slashes as separators, or on other OS and use forward slash
// as separator, backslash as file name character.
- import scala.jdk.CollectionConverters._
+ import scala.jdk.CollectionConverters.*
val path = refPath.relativize(sourcePath)
path.iterator.asScala.mkString("/")
else
diff --git a/compiler/src/dotty/tools/dotc/util/SourcePosition.scala b/compiler/src/dotty/tools/dotc/util/SourcePosition.scala
index 29f9a34d2292..904704b2349c 100644
--- a/compiler/src/dotty/tools/dotc/util/SourcePosition.scala
+++ b/compiler/src/dotty/tools/dotc/util/SourcePosition.scala
@@ -5,7 +5,7 @@ package util
import scala.language.unsafeNulls
import printing.{Showable, Printer}
-import printing.Texts._
+import printing.Texts.*
import core.Contexts.Context
import Spans.{Span, NoSpan}
import scala.annotation.internal.sharable
diff --git a/compiler/src/dotty/tools/dotc/util/Stats.scala b/compiler/src/dotty/tools/dotc/util/Stats.scala
index 5e136856b718..750a799a9f0a 100644
--- a/compiler/src/dotty/tools/dotc/util/Stats.scala
+++ b/compiler/src/dotty/tools/dotc/util/Stats.scala
@@ -4,7 +4,7 @@ package util
import scala.annotation.internal.sharable
-import core.Contexts._
+import core.Contexts.*
import collection.mutable
@sharable object Stats {
diff --git a/compiler/src/dotty/tools/dotc/util/Store.scala b/compiler/src/dotty/tools/dotc/util/Store.scala
index d8c9230b9272..8605b9021980 100644
--- a/compiler/src/dotty/tools/dotc/util/Store.scala
+++ b/compiler/src/dotty/tools/dotc/util/Store.scala
@@ -8,7 +8,7 @@ object Store {
}
class Store(private val elems: Array[AnyRef | Null]) extends AnyVal {
- import Store._
+ import Store.*
def newLocation[T](): (Location[T], Store) = {
val elems1 = new Array[AnyRef | Null](elems.length + 1)
diff --git a/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala b/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala
index 975826a87a37..d93505f6f3c2 100644
--- a/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala
+++ b/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala
@@ -6,7 +6,7 @@ import java.lang.ref.{ReferenceQueue, WeakReference}
import scala.annotation.{ constructorOnly, tailrec }
-import dotty.tools._
+import dotty.tools.*
/**
* A HashSet where the elements are stored weakly. Elements in this set are eligible for GC if no other
@@ -20,7 +20,7 @@ import dotty.tools._
*/
abstract class WeakHashSet[A <: AnyRef](initialCapacity: Int = 8, loadFactor: Double = 0.5) extends MutableSet[A] {
- import WeakHashSet._
+ import WeakHashSet.*
type This = WeakHashSet[A]
diff --git a/compiler/src/dotty/tools/dotc/util/kwords.sc b/compiler/src/dotty/tools/dotc/util/kwords.sc
index 961be3b0aa23..377be9dbcb65 100644
--- a/compiler/src/dotty/tools/dotc/util/kwords.sc
+++ b/compiler/src/dotty/tools/dotc/util/kwords.sc
@@ -1,8 +1,8 @@
package dotty.tools.dotc.util
-import dotty.tools.dotc.parsing._
-import Scanners._
-import Tokens._
+import dotty.tools.dotc.parsing.*
+import Scanners.*
+import Tokens.*
object kwords {
println("Welcome to the Scala worksheet") //> Welcome to the Scala worksheet
diff --git a/compiler/src/dotty/tools/io/ClassPath.scala b/compiler/src/dotty/tools/io/ClassPath.scala
index b45de57f9850..8df4015a53c2 100644
--- a/compiler/src/dotty/tools/io/ClassPath.scala
+++ b/compiler/src/dotty/tools/io/ClassPath.scala
@@ -21,7 +21,7 @@ import dotc.classpath.{ PackageEntry, ClassPathEntries, PackageName }
* A representation of the compiler's class- or sourcepath.
*/
trait ClassPath {
- import dotty.tools.dotc.classpath._
+ import dotty.tools.dotc.classpath.*
def asURLs: Seq[URL]
final def hasPackage(pkg: String): Boolean = hasPackage(PackageName(pkg))
@@ -94,7 +94,7 @@ trait ClassPath {
/** The whole classpath in the form of one String.
*/
- def asClassPathString: String = ClassPath.join(asClassPathStrings: _*)
+ def asClassPathString: String = ClassPath.join(asClassPathStrings*)
// for compatibility purposes
@deprecated("use asClassPathString instead of this one", "2.11.5")
def asClasspathString: String = asClassPathString
@@ -152,7 +152,7 @@ object ClassPath {
def join(paths: String*): String = paths.filterNot(_ == "").mkString(pathSeparator)
/** Split the classpath, apply a transformation function, and reassemble it. */
- def map(cp: String, f: String => String): String = join(split(cp) map f: _*)
+ def map(cp: String, f: String => String): String = join(split(cp).map(f)*)
/** Expand path and possibly expanding stars */
def expandPath(path: String, expandStar: Boolean = true): List[String] =
diff --git a/compiler/src/dotty/tools/io/File.scala b/compiler/src/dotty/tools/io/File.scala
index 27f2c077dd6a..efce60d3f86d 100644
--- a/compiler/src/dotty/tools/io/File.scala
+++ b/compiler/src/dotty/tools/io/File.scala
@@ -12,7 +12,7 @@ import scala.language.unsafeNulls
import java.io.{File => JavaIoFile, _}
import java.nio.file.{Files, Paths}
-import java.nio.file.StandardOpenOption._
+import java.nio.file.StandardOpenOption.*
import scala.io.Codec
/**
diff --git a/compiler/src/dotty/tools/io/Jar.scala b/compiler/src/dotty/tools/io/Jar.scala
index 9979a9ca9379..3e65d2f7635d 100644
--- a/compiler/src/dotty/tools/io/Jar.scala
+++ b/compiler/src/dotty/tools/io/Jar.scala
@@ -10,8 +10,8 @@ package io
import scala.language.unsafeNulls
import java.io.{ InputStream, OutputStream, DataOutputStream }
-import java.util.jar._
-import scala.jdk.CollectionConverters._
+import java.util.jar.*
+import scala.jdk.CollectionConverters.*
import scala.collection.mutable
import Attributes.Name
import scala.annotation.tailrec
@@ -42,7 +42,7 @@ class Jar(file: File) {
protected def errorFn(msg: String): Unit = Console println msg
- import Jar._
+ import Jar.*
lazy val jarFile: JarFile = new JarFile(file.jpath.toFile)
lazy val manifest: Option[Manifest] = withJarInput(s => Option(s.getManifest))
@@ -62,7 +62,7 @@ class Jar(file: File) {
finally in.close()
}
def jarWriter(mainAttrs: (Attributes.Name, String)*): JarWriter = {
- new JarWriter(file, Jar.WManifest.apply(mainAttrs: _*).underlying)
+ new JarWriter(file, Jar.WManifest.apply(mainAttrs*).underlying)
}
def toList: List[JarEntry] = withJarInput { in =>
@@ -142,7 +142,7 @@ object Jar {
def underlying: JManifest = manifest
def attrs: mutable.Map[Name, String] = manifest.getMainAttributes().asInstanceOf[AttributeMap].asScala withDefaultValue null
def initialMainAttrs: Map[Attributes.Name, String] = {
- import scala.util.Properties._
+ import scala.util.Properties.*
Map(
Name.MANIFEST_VERSION -> "1.0",
ScalaCompilerVersion -> versionNumberString
diff --git a/compiler/src/dotty/tools/io/JarArchive.scala b/compiler/src/dotty/tools/io/JarArchive.scala
index 8f3b5d8010e4..49b743e83074 100644
--- a/compiler/src/dotty/tools/io/JarArchive.scala
+++ b/compiler/src/dotty/tools/io/JarArchive.scala
@@ -4,7 +4,7 @@ import scala.language.unsafeNulls
import java.nio.file.{FileSystemAlreadyExistsException, FileSystems}
-import scala.jdk.CollectionConverters._
+import scala.jdk.CollectionConverters.*
/**
* This class implements an [[AbstractFile]] backed by a jar
diff --git a/compiler/src/dotty/tools/io/Path.scala b/compiler/src/dotty/tools/io/Path.scala
index dddb870afc65..c8420c5e381d 100644
--- a/compiler/src/dotty/tools/io/Path.scala
+++ b/compiler/src/dotty/tools/io/Path.scala
@@ -8,11 +8,11 @@ package dotty.tools.io
import scala.language.unsafeNulls
import java.io.RandomAccessFile
-import java.nio.file._
+import java.nio.file.*
import java.net.{URI, URL}
import java.nio.file.attribute.{BasicFileAttributes, FileTime}
import java.io.IOException
-import scala.jdk.CollectionConverters._
+import scala.jdk.CollectionConverters.*
import scala.util.Random.alphanumeric
/** An abstraction for filesystem paths. The differences between
@@ -63,7 +63,7 @@ object Path {
private[io] def randomPrefix: String = alphanumeric take 6 mkString ""
private[io] def fail(msg: String): Nothing = throw FileOperationException(msg)
}
-import Path._
+import Path.*
/** The Path constructor is private so we can enforce some
* semantics regarding how a Path might relate to the world.
diff --git a/compiler/src/dotty/tools/io/ZipArchive.scala b/compiler/src/dotty/tools/io/ZipArchive.scala
index e28b7de7983d..9e6d5fe4796b 100644
--- a/compiler/src/dotty/tools/io/ZipArchive.scala
+++ b/compiler/src/dotty/tools/io/ZipArchive.scala
@@ -13,7 +13,7 @@ import java.nio.file.Files
import java.util.zip.{ ZipEntry, ZipFile }
import java.util.jar.Manifest
import scala.collection.mutable
-import scala.jdk.CollectionConverters._
+import scala.jdk.CollectionConverters.*
/** An abstraction for zip files and streams. Everything is written the way
* it is for performance: we come through here a lot on every run. Be careful
@@ -52,7 +52,7 @@ object ZipArchive {
else path.substring(idx + 1)
}
}
-import ZipArchive._
+import ZipArchive.*
/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
abstract class ZipArchive(override val jpath: JPath, release: Option[String]) extends AbstractFile with Equals {
self =>
diff --git a/compiler/src/dotty/tools/repl/CollectTopLevelImports.scala b/compiler/src/dotty/tools/repl/CollectTopLevelImports.scala
index d539c1986f8d..860c4a9372f9 100644
--- a/compiler/src/dotty/tools/repl/CollectTopLevelImports.scala
+++ b/compiler/src/dotty/tools/repl/CollectTopLevelImports.scala
@@ -1,20 +1,22 @@
package dotty.tools.repl
import dotty.tools.dotc.ast.tpd
-import dotty.tools.dotc.core.Contexts._
+import dotty.tools.dotc.core.Contexts.*
import dotty.tools.dotc.core.Phases.Phase
+import scala.compiletime.uninitialized
+
/** A phase that collects user defined top level imports.
*
* These imports must be collected as typed trees and therefore
* after Typer.
*/
class CollectTopLevelImports extends Phase {
- import tpd._
+ import tpd.*
def phaseName: String = "collectTopLevelImports"
- private var myImports: List[Import] = _
+ private var myImports: List[Import] = uninitialized
def imports: List[Import] = myImports
def run(using Context): Unit = {
diff --git a/compiler/src/dotty/tools/repl/JLineTerminal.scala b/compiler/src/dotty/tools/repl/JLineTerminal.scala
index 8e048d786ae1..294f0a331ec2 100644
--- a/compiler/src/dotty/tools/repl/JLineTerminal.scala
+++ b/compiler/src/dotty/tools/repl/JLineTerminal.scala
@@ -2,15 +2,15 @@ package dotty.tools.repl
import scala.language.unsafeNulls
-import dotty.tools.dotc.core.Contexts._
+import dotty.tools.dotc.core.Contexts.*
import dotty.tools.dotc.parsing.Scanners.Scanner
-import dotty.tools.dotc.parsing.Tokens._
+import dotty.tools.dotc.parsing.Tokens.*
import dotty.tools.dotc.printing.SyntaxHighlighting
import dotty.tools.dotc.reporting.Reporter
import dotty.tools.dotc.util.SourceFile
import org.jline.reader
import org.jline.reader.Parser.ParseContext
-import org.jline.reader._
+import org.jline.reader.*
import org.jline.reader.impl.LineReaderImpl
import org.jline.reader.impl.history.DefaultHistory
import org.jline.terminal.TerminalBuilder
@@ -50,8 +50,8 @@ class JLineTerminal extends java.io.Closeable {
def readLine(
completer: Completer // provide auto-completions
)(using Context): String = {
- import LineReader.Option._
- import LineReader._
+ import LineReader.Option.*
+ import LineReader.*
val userHome = System.getProperty("user.home")
val lineReader = LineReaderBuilder
.builder()
diff --git a/compiler/src/dotty/tools/repl/ParseResult.scala b/compiler/src/dotty/tools/repl/ParseResult.scala
index a67b247066f7..b9139343bca1 100644
--- a/compiler/src/dotty/tools/repl/ParseResult.scala
+++ b/compiler/src/dotty/tools/repl/ParseResult.scala
@@ -3,7 +3,7 @@ package repl
import dotc.CompilationUnit
import dotc.ast.untpd
-import dotc.core.Contexts._
+import dotc.core.Contexts.*
import dotc.core.StdNames.str
import dotc.parsing.Parsers.Parser
import dotc.parsing.Tokens
diff --git a/compiler/src/dotty/tools/repl/Rendering.scala b/compiler/src/dotty/tools/repl/Rendering.scala
index c647ef302bb9..487b6ce3924f 100644
--- a/compiler/src/dotty/tools/repl/Rendering.scala
+++ b/compiler/src/dotty/tools/repl/Rendering.scala
@@ -10,6 +10,7 @@ import reporting.Diagnostic
import transform.ValueClasses
import util.StackTraceOps.*
+import scala.compiletime.uninitialized
import scala.util.control.NonFatal
/** This rendering object uses `ClassLoader`s to accomplish crossing the 4th
@@ -22,12 +23,12 @@ import scala.util.control.NonFatal
*/
private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None):
- import Rendering._
+ import Rendering.*
- var myClassLoader: AbstractFileClassLoader = _
+ var myClassLoader: AbstractFileClassLoader = uninitialized
/** (value, maxElements, maxCharacters) => String */
- var myReplStringOf: (Object, Int, Int) => String = _
+ var myReplStringOf: (Object, Int, Int) => String = uninitialized
/** Class loader used to load compiled code */
private[repl] def classLoader()(using Context) =
diff --git a/compiler/src/dotty/tools/repl/ReplCommand.scala b/compiler/src/dotty/tools/repl/ReplCommand.scala
index 3e46106acc2c..0b40a7cec0b3 100644
--- a/compiler/src/dotty/tools/repl/ReplCommand.scala
+++ b/compiler/src/dotty/tools/repl/ReplCommand.scala
@@ -1,6 +1,6 @@
package dotty.tools.repl
-import dotty.tools.dotc.config.Properties._
+import dotty.tools.dotc.config.Properties.*
import dotty.tools.dotc.config.CompilerCommand
object ReplCommand extends CompilerCommand:
diff --git a/compiler/src/dotty/tools/repl/ReplCompiler.scala b/compiler/src/dotty/tools/repl/ReplCompiler.scala
index d3a5561b6080..af3fb32c3e86 100644
--- a/compiler/src/dotty/tools/repl/ReplCompiler.scala
+++ b/compiler/src/dotty/tools/repl/ReplCompiler.scala
@@ -1,23 +1,23 @@
package dotty.tools.repl
-import dotty.tools.dotc.ast.Trees._
+import dotty.tools.dotc.ast.Trees.*
import dotty.tools.dotc.ast.{tpd, untpd}
import dotty.tools.dotc.ast.tpd.TreeOps
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Decorators._
-import dotty.tools.dotc.core.Flags._
-import dotty.tools.dotc.core.Names._
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Decorators.*
+import dotty.tools.dotc.core.Flags.*
+import dotty.tools.dotc.core.Names.*
import dotty.tools.dotc.core.Phases.Phase
-import dotty.tools.dotc.core.StdNames._
-import dotty.tools.dotc.core.Symbols._
+import dotty.tools.dotc.core.StdNames.*
+import dotty.tools.dotc.core.Symbols.*
import dotty.tools.dotc.reporting.Diagnostic
import dotty.tools.dotc.transform.PostTyper
import dotty.tools.dotc.typer.ImportInfo.{withRootImports, RootRef}
import dotty.tools.dotc.typer.TyperPhase
-import dotty.tools.dotc.util.Spans._
+import dotty.tools.dotc.util.Spans.*
import dotty.tools.dotc.util.{ParsedComment, Property, SourceFile}
import dotty.tools.dotc.{CompilationUnit, Compiler, Run}
-import dotty.tools.repl.results._
+import dotty.tools.repl.results.*
import scala.collection.mutable
import scala.util.chaining.given
@@ -156,7 +156,7 @@ class ReplCompiler extends Compiler:
def wrapped(expr: String, sourceFile: SourceFile, state: State)(using Context): Result[untpd.PackageDef] = {
def wrap(trees: List[untpd.Tree]): untpd.PackageDef = {
- import untpd._
+ import untpd.*
val valdef = ValDef("expr".toTermName, TypeTree(), Block(trees, unitLiteral).withSpan(Span(0, expr.length)))
val tmpl = Template(emptyConstructor, Nil, Nil, EmptyValDef, List(valdef))
@@ -186,7 +186,7 @@ class ReplCompiler extends Compiler:
sourceFile.atSpan(Span(0, sourceFile.content.length)))).errors
def unwrappedTypeTree(tree: tpd.Tree, sourceFile0: SourceFile)(using Context): Result[tpd.ValDef] = {
- import tpd._
+ import tpd.*
tree match {
case PackageDef(_, List(TypeDef(_, tmpl: Template))) =>
tmpl.body
@@ -198,7 +198,7 @@ class ReplCompiler extends Compiler:
}
def unwrappedUntypedTree(tree: untpd.Tree, sourceFile0: SourceFile)(using Context): Result[untpd.ValDef] =
- import untpd._
+ import untpd.*
tree match {
case PackageDef(_, List(TypeDef(_, tmpl: Template))) =>
tmpl.body
diff --git a/compiler/src/dotty/tools/repl/ReplDriver.scala b/compiler/src/dotty/tools/repl/ReplDriver.scala
index 2471f6bece42..5226ef0b4546 100644
--- a/compiler/src/dotty/tools/repl/ReplDriver.scala
+++ b/compiler/src/dotty/tools/repl/ReplDriver.scala
@@ -5,21 +5,21 @@ import scala.language.unsafeNulls
import java.io.{File => JFile, PrintStream}
import java.nio.charset.StandardCharsets
-import dotty.tools.dotc.ast.Trees._
+import dotty.tools.dotc.ast.Trees.*
import dotty.tools.dotc.ast.{tpd, untpd}
import dotty.tools.dotc.config.CommandLineParser.tokenize
import dotty.tools.dotc.config.Properties.{javaVersion, javaVmName, simpleVersionString}
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Decorators._
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Decorators.*
import dotty.tools.dotc.core.Phases.{unfusedPhases, typerPhase}
import dotty.tools.dotc.core.Denotations.Denotation
-import dotty.tools.dotc.core.Flags._
+import dotty.tools.dotc.core.Flags.*
import dotty.tools.dotc.core.Mode
import dotty.tools.dotc.core.NameKinds.SimpleNameKind
import dotty.tools.dotc.core.NameKinds.DefaultGetterName
-import dotty.tools.dotc.core.NameOps._
+import dotty.tools.dotc.core.NameOps.*
import dotty.tools.dotc.core.Names.Name
-import dotty.tools.dotc.core.StdNames._
+import dotty.tools.dotc.core.StdNames.*
import dotty.tools.dotc.core.Symbols.{Symbol, defn}
import dotty.tools.dotc.interfaces
import dotty.tools.dotc.interactive.Completion
@@ -30,13 +30,14 @@ import dotty.tools.dotc.util.Spans.Span
import dotty.tools.dotc.util.{SourceFile, SourcePosition}
import dotty.tools.dotc.{CompilationUnit, Driver}
import dotty.tools.dotc.config.CompilerCommand
-import dotty.tools.io._
+import dotty.tools.io.*
import dotty.tools.runner.ScalaClassLoader.*
-import org.jline.reader._
+import org.jline.reader.*
import scala.annotation.tailrec
import scala.collection.mutable
-import scala.jdk.CollectionConverters._
+import scala.compiletime.uninitialized
+import scala.jdk.CollectionConverters.*
import scala.util.control.NonFatal
import scala.util.Using
@@ -116,10 +117,10 @@ class ReplDriver(settings: Array[String],
rendering = new Rendering(classLoader)
}
- private var rootCtx: Context = _
- private var shouldStart: Boolean = _
- private var compiler: ReplCompiler = _
- protected var rendering: Rendering = _
+ private var rootCtx: Context = uninitialized
+ private var shouldStart: Boolean = uninitialized
+ private var compiler: ReplCompiler = uninitialized
+ protected var rendering: Rendering = uninitialized
// initialize the REPL session as part of the constructor so that once `run`
// is called, we're in business
diff --git a/compiler/src/dotty/tools/runner/ObjectRunner.scala b/compiler/src/dotty/tools/runner/ObjectRunner.scala
index cb8f9d791dfa..62dbcc32f30d 100644
--- a/compiler/src/dotty/tools/runner/ObjectRunner.scala
+++ b/compiler/src/dotty/tools/runner/ObjectRunner.scala
@@ -19,7 +19,7 @@ trait CommonRunner {
* @throws java.lang.reflect.InvocationTargetException
*/
def run(urls: Seq[URL], objectName: String, arguments: Seq[String]): Unit = {
- import RichClassLoader._
+ import RichClassLoader.*
ScalaClassLoader.fromURLsParallelCapable(urls).run(objectName, arguments)
}
diff --git a/compiler/src/dotty/tools/runner/ScalaClassLoader.scala b/compiler/src/dotty/tools/runner/ScalaClassLoader.scala
index 9ec0199abcbb..2c0976fac1ac 100644
--- a/compiler/src/dotty/tools/runner/ScalaClassLoader.scala
+++ b/compiler/src/dotty/tools/runner/ScalaClassLoader.scala
@@ -33,7 +33,7 @@ final class RichClassLoader(private val self: ClassLoader) extends AnyVal {
val method = clsToRun.getMethod("main", classOf[Array[String]])
if !Modifier.isStatic(method.getModifiers) then
throw new NoSuchMethodException(s"$objectName.main is not static")
- try asContext(method.invoke(null, Array(arguments.toArray: AnyRef): _*))
+ try asContext(method.invoke(null, Array(arguments.toArray: AnyRef)*))
catch unwrapHandler({ case ex => throw ex })
}
diff --git a/compiler/src/dotty/tools/scripting/Main.scala b/compiler/src/dotty/tools/scripting/Main.scala
index 8db12f400c64..0b954cb79b26 100755
--- a/compiler/src/dotty/tools/scripting/Main.scala
+++ b/compiler/src/dotty/tools/scripting/Main.scala
@@ -73,7 +73,7 @@ object Main:
)
import dotty.tools.io.{Jar, Directory}
val jar = new Jar(jarPath)
- val writer = jar.jarWriter(manifestAttributes:_*)
+ val writer = jar.jarWriter(manifestAttributes*)
try
writer.writeAllFrom(Directory(outDir))
finally
diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala
index 0f64d6e23b8e..04472647b9fc 100644
--- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala
+++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala
@@ -7,20 +7,20 @@ import dotty.tools.dotc
import dotty.tools.dotc.ast.tpd
import dotty.tools.dotc.ast.untpd
import dotty.tools.dotc.core.Annotations
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Decorators._
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Decorators.*
import dotty.tools.dotc.core.NameKinds
-import dotty.tools.dotc.core.NameOps._
-import dotty.tools.dotc.core.StdNames._
+import dotty.tools.dotc.core.NameOps.*
+import dotty.tools.dotc.core.StdNames.*
import dotty.tools.dotc.core.Types
import dotty.tools.dotc.NoCompilationUnit
import dotty.tools.dotc.quoted.MacroExpansion
import dotty.tools.dotc.quoted.PickledQuotes
import dotty.tools.dotc.quoted.QuotePatterns
-import dotty.tools.dotc.quoted.reflect._
+import dotty.tools.dotc.quoted.reflect.*
import scala.quoted.runtime.{QuoteUnpickler, QuoteMatching}
-import scala.quoted.runtime.impl.printers._
+import scala.quoted.runtime.impl.printers.*
import scala.reflect.TypeTest
diff --git a/compiler/src/scala/quoted/runtime/impl/ScopeException.scala b/compiler/src/scala/quoted/runtime/impl/ScopeException.scala
index d65328bb5405..705efc5ffab1 100644
--- a/compiler/src/scala/quoted/runtime/impl/ScopeException.scala
+++ b/compiler/src/scala/quoted/runtime/impl/ScopeException.scala
@@ -1,7 +1,7 @@
package scala.quoted.runtime.impl
import dotty.tools.dotc.ast.tpd.Tree
-import dotty.tools.dotc.core.Contexts._
+import dotty.tools.dotc.core.Contexts.*
class ScopeException(msg: String) extends Exception(msg)
diff --git a/compiler/src/scala/quoted/runtime/impl/SpliceScope.scala b/compiler/src/scala/quoted/runtime/impl/SpliceScope.scala
index 797b38be2743..397ad49a309b 100644
--- a/compiler/src/scala/quoted/runtime/impl/SpliceScope.scala
+++ b/compiler/src/scala/quoted/runtime/impl/SpliceScope.scala
@@ -1,7 +1,7 @@
package scala.quoted
package runtime.impl
-import dotty.tools.dotc.core.Contexts._
+import dotty.tools.dotc.core.Contexts.*
import dotty.tools.dotc.util.Property
import dotty.tools.dotc.util.SourcePosition
diff --git a/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala b/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala
index c229338ad228..eac85244d97b 100644
--- a/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala
+++ b/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala
@@ -1,7 +1,7 @@
package scala.quoted
package runtime.impl.printers
-import scala.quoted._
+import scala.quoted.*
object Extractors {
@@ -18,7 +18,7 @@ object Extractors {
new ExtractorsPrinter[quotes.type]().visitSymbol(symbol).result()
def showFlags(using Quotes)(flags: quotes.reflect.Flags): String = {
- import quotes.reflect._
+ import quotes.reflect.*
val flagList = List.newBuilder[String]
if (flags.is(Flags.Abstract)) flagList += "Flags.Abstract"
if (flags.is(Flags.Artifact)) flagList += "Flags.Artifact"
@@ -64,7 +64,7 @@ object Extractors {
}
private class ExtractorsPrinter[Q <: Quotes & Singleton](using val quotes: Q) { self =>
- import quotes.reflect._
+ import quotes.reflect.*
private val sb: StringBuilder = new StringBuilder
diff --git a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala
index cd36e31716a7..4dfb61a59722 100644
--- a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala
+++ b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala
@@ -19,7 +19,7 @@ object SourceCode {
symbol.fullName
def showFlags(using Quotes)(flags: quotes.reflect.Flags)(syntaxHighlight: SyntaxHighlight): String = {
- import quotes.reflect._
+ import quotes.reflect.*
val flagList = List.newBuilder[String]
if (flags.is(Flags.Abstract)) flagList += "abstract"
if (flags.is(Flags.Artifact)) flagList += "artifact"
@@ -64,8 +64,8 @@ object SourceCode {
}
private class SourceCodePrinter[Q <: Quotes & Singleton](syntaxHighlight: SyntaxHighlight, fullNames: Boolean)(using val quotes: Q) {
- import syntaxHighlight._
- import quotes.reflect._
+ import syntaxHighlight.*
+ import quotes.reflect.*
private[this] val sb: StringBuilder = new StringBuilder
diff --git a/compiler/test-resources/repl/i3966 b/compiler/test-resources/repl/i3966
index 204a7685f854..44f42e75aecb 100644
--- a/compiler/test-resources/repl/i3966
+++ b/compiler/test-resources/repl/i3966
@@ -1,2 +1,2 @@
-scala> val List(x: _*) = List(1, 2): @unchecked
+scala> val List(x*) = List(1, 2): @unchecked
val x: Seq[Int] = List(1, 2)
diff --git a/compiler/test/dotty/tools/CheckTypesTests.scala b/compiler/test/dotty/tools/CheckTypesTests.scala
index df4bc1636d82..290d8523df49 100644
--- a/compiler/test/dotty/tools/CheckTypesTests.scala
+++ b/compiler/test/dotty/tools/CheckTypesTests.scala
@@ -24,7 +24,7 @@ class CheckTypeTest extends DottyTest {
"List[B]"
)
- checkTypes(source, types: _*) {
+ checkTypes(source, types*) {
case (List(a, b, lu, li, lr, ls, la, lb), context) =>
given Context = context
diff --git a/compiler/test/dotty/tools/backend/jvm/ArrayApplyOptTest.scala b/compiler/test/dotty/tools/backend/jvm/ArrayApplyOptTest.scala
index e7cd20ba98b2..c55398c13cd1 100644
--- a/compiler/test/dotty/tools/backend/jvm/ArrayApplyOptTest.scala
+++ b/compiler/test/dotty/tools/backend/jvm/ArrayApplyOptTest.scala
@@ -109,7 +109,7 @@ class ArrayApplyOptTest extends DottyBytecodeTest {
@Test def testArrayInlined = test(
"""{
- | inline def array(inline xs: Int*): Array[Int] = Array(xs: _*)
+ | inline def array(inline xs: Int*): Array[Int] = Array(xs*)
| array(1, 2)
|}""".stripMargin,
newArray2Opcodes(T_INT, List(Op(DUP), Op(ICONST_0), Op(ICONST_1), Op(IASTORE), Op(DUP), Op(ICONST_1), Op(ICONST_2), Op(IASTORE), TypeOp(CHECKCAST, "[I")))
@@ -117,7 +117,7 @@ class ArrayApplyOptTest extends DottyBytecodeTest {
@Test def testArrayInlined2 = test(
"""{
- | inline def array(inline x: Int, inline xs: Int*): Array[Int] = Array(x, xs: _*)
+ | inline def array(inline x: Int, inline xs: Int*): Array[Int] = Array(x, xs*)
| array(1, 2)
|}""".stripMargin,
newArray2Opcodes(T_INT, List(Op(DUP), Op(ICONST_0), Op(ICONST_1), Op(IASTORE), Op(DUP), Op(ICONST_1), Op(ICONST_2), Op(IASTORE)))
@@ -125,7 +125,7 @@ class ArrayApplyOptTest extends DottyBytecodeTest {
@Test def testArrayInlined3 = test(
"""{
- | inline def array[T](inline xs: T*)(using inline ct: scala.reflect.ClassTag[T]): Array[T] = Array(xs: _*)
+ | inline def array[T](inline xs: T*)(using inline ct: scala.reflect.ClassTag[T]): Array[T] = Array(xs*)
| array(1, 2)
|}""".stripMargin,
newArray2Opcodes(T_INT, List(Op(DUP), Op(ICONST_0), Op(ICONST_1), Op(IASTORE), Op(DUP), Op(ICONST_1), Op(ICONST_2), Op(IASTORE), TypeOp(CHECKCAST, "[I")))
diff --git a/compiler/test/dotty/tools/backend/jvm/AsmConverters.scala b/compiler/test/dotty/tools/backend/jvm/AsmConverters.scala
index a7900994d402..c751937bd9f9 100644
--- a/compiler/test/dotty/tools/backend/jvm/AsmConverters.scala
+++ b/compiler/test/dotty/tools/backend/jvm/AsmConverters.scala
@@ -245,7 +245,7 @@ object ASMConverters {
case Jump(op, label) => method.visitJumpInsn(op, asmLabel(label))
case Ldc(op, cst) => method.visitLdcInsn(cst)
case LookupSwitch(op, dflt, keys, labels) => method.visitLookupSwitchInsn(asmLabel(dflt), keys.toArray, (labels map asmLabel).toArray)
- case TableSwitch(op, min, max, dflt, labels) => method.visitTableSwitchInsn(min, max, asmLabel(dflt), (labels map asmLabel).toArray: _*)
+ case TableSwitch(op, min, max, dflt, labels) => method.visitTableSwitchInsn(min, max, asmLabel(dflt), (labels map asmLabel).toArray*)
case Invoke(op, owner, name, desc, itf) => method.visitMethodInsn(op, owner, name, desc, itf)
case InvokeDynamic(op, name, desc, bsm, bsmArgs) => method.visitInvokeDynamicInsn(name, desc, unconvertMethodHandle(bsm), unconvertBsmArgs(bsmArgs))
case NewArray(op, desc, dims) => method.visitMultiANewArrayInsn(desc, dims)
diff --git a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala
index 29119c8d081f..84c7b916fa74 100644
--- a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala
+++ b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala
@@ -308,7 +308,7 @@ class DottyBytecodeTests extends DottyBytecodeTest {
|import java.nio.file._
|class Test {
| def test(xs: Array[String]) = {
- | val p4 = Paths.get("Hello", xs: _*)
+ | val p4 = Paths.get("Hello", xs*)
| }
|}
""".stripMargin
@@ -1701,6 +1701,38 @@ class DottyBytecodeTests extends DottyBytecodeTest {
}
}
+
+ @Test def i18816 = {
+ // The primary goal of this test is to check that `LineNumber` have correct numbers
+ val source =
+ """trait Context
+ |
+ |class A(x: Context) extends AnyVal:
+ | given [T]: Context = x
+ |
+ | def m1 =
+ | println(m3)
+ | def m2 =
+ | m3 // line 9
+ | println(m2)
+ |
+ | def m3(using Context): String = ""
+ """.stripMargin
+
+ checkBCode(source) { dir =>
+ val clsIn = dir.lookupName("A$.class", directory = false).input
+ val clsNode = loadClassNode(clsIn, skipDebugInfo = false)
+ val method = getMethod(clsNode, "m2$1")
+ val instructions = instructionsFromMethod(method).filter(_.isInstanceOf[LineNumber])
+
+ // There used to be references to line 7 here
+ val expected = List(
+ LineNumber(9, Label(0)),
+ )
+
+ assertSameCode(instructions, expected)
+ }
+ }
}
object invocationReceiversTestCode {
diff --git a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala
index 9529f94a3890..88b9fe0c649d 100644
--- a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala
+++ b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala
@@ -135,7 +135,7 @@ class BootstrappedOnlyCompilationTests {
if scala.util.Properties.isWin then basicTests
else compileDir("tests/old-tasty-interpreter-prototype", withTastyInspectorOptions) :: basicTests
- aggregateTests(tests: _*).checkRuns()
+ aggregateTests(tests*).checkRuns()
}
@Test def runBootstrappedOnly: Unit = {
diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala
index 2acb71ce62a8..fa89c82fc7e7 100644
--- a/compiler/test/dotty/tools/dotc/CompilationTests.scala
+++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala
@@ -44,7 +44,7 @@ class CompilationTests {
// Run tests for legacy lazy vals
compileFilesInDir("tests/pos", defaultOptions.and("-Ysafe-init", "-Ylegacy-lazy-vals", "-Ycheck-constraint-deps"), FileFilter.include(TestSources.posLazyValsAllowlist)),
compileDir("tests/pos-special/java-param-names", defaultOptions.withJavacOnlyOptions("-parameters")),
- compileDir("tests/pos-special/stdlib", defaultOptions),
+ compileDir("tests/pos-special/stdlib", allowDeepSubtypes),
)
if scala.util.Properties.isJavaAtLeast("16") then
@@ -60,6 +60,7 @@ class CompilationTests {
compileFile("tests/rewrites/rewrites.scala", defaultOptions.and("-source", "3.0-migration").and("-rewrite", "-indent")),
compileFile("tests/rewrites/rewrites3x.scala", defaultOptions.and("-rewrite", "-source", "future-migration")),
compileFile("tests/rewrites/rewrites3x-fatal-warnings.scala", defaultOptions.and("-rewrite", "-source", "future-migration", "-Xfatal-warnings")),
+ compileFile("tests/rewrites/with-type-operator.scala", defaultOptions.and("-rewrite", "-source", "future-migration")),
compileFile("tests/rewrites/filtering-fors.scala", defaultOptions.and("-rewrite", "-source", "3.2-migration")),
compileFile("tests/rewrites/refutable-pattern-bindings.scala", defaultOptions.and("-rewrite", "-source", "3.2-migration")),
compileFile("tests/rewrites/i8982.scala", defaultOptions.and("-indent", "-rewrite")),
diff --git a/compiler/test/dotty/tools/dotc/IdempotencyTests.scala b/compiler/test/dotty/tools/dotc/IdempotencyTests.scala
index b515ebb05f96..81dd7d7f4710 100644
--- a/compiler/test/dotty/tools/dotc/IdempotencyTests.scala
+++ b/compiler/test/dotty/tools/dotc/IdempotencyTests.scala
@@ -45,7 +45,7 @@ class IdempotencyTests {
compileList(testDir.getName, sources.reverse, opt)(TestGroup("idempotency/orderIdempotency2"))
)
}
- aggregateTests(tests: _*)
+ aggregateTests(tests*)
}
def check(name: String) = {
diff --git a/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala b/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala
index 0af4434fdcde..45b807f73079 100644
--- a/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala
+++ b/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala
@@ -92,7 +92,7 @@ class CommentPicklingTest {
val out = tmp./("out")
out.createDirectory()
- val options = compileOptions.and("-d", out.toAbsolute.toString).and(sourceFiles: _*)
+ val options = compileOptions.and("-d", out.toAbsolute.toString).and(sourceFiles*)
val reporter = TestReporter.reporter(System.out, logLevel = ERROR)
Main.process(options.all, reporter)
assertFalse("Compilation failed.", reporter.hasErrors)
diff --git a/compiler/test/dotty/tools/dotc/core/tasty/TastyHeaderUnpicklerTest.scala b/compiler/test/dotty/tools/dotc/core/tasty/TastyHeaderUnpicklerTest.scala
new file mode 100644
index 000000000000..53c1f40638a4
--- /dev/null
+++ b/compiler/test/dotty/tools/dotc/core/tasty/TastyHeaderUnpicklerTest.scala
@@ -0,0 +1,311 @@
+package dotty.tools.dotc.core.tasty
+
+import org.junit.Assert._
+import org.junit.{Test, Ignore}
+
+import dotty.tools.tasty.TastyFormat._
+import dotty.tools.tasty.TastyBuffer._
+import dotty.tools.tasty.TastyBuffer
+import dotty.tools.tasty.TastyReader
+import dotty.tools.tasty.UnpickleException
+import dotty.tools.tasty.TastyHeaderUnpickler
+import dotty.tools.tasty.TastyHeaderUnpickler.TastyVersion
+import dotty.tools.tasty.UnpicklerConfig
+
+class TastyHeaderUnpicklerTest {
+
+ import TastyHeaderUnpicklerTest._
+
+ @Test
+ def okThisCompilerReadsItself: Unit = {
+ val file = TastyVersion(MajorVersion, MinorVersion, ExperimentalVersion)
+ val read = TastyVersion(MajorVersion, MinorVersion, ExperimentalVersion)
+ runTest(file, read, "Scala (current)")
+ }
+
+ @Test
+ def okExperimentalCompilerReadsItself: Unit = {
+ val file = TastyVersion(MajorVersion, MinorVersion, 1)
+ val read = TastyVersion(MajorVersion, MinorVersion, 1)
+ runTest(file, read, "Scala (current)")
+ }
+
+ @Test
+ def okStableCompilerReadsItself: Unit = {
+ val file = TastyVersion(MajorVersion, MinorVersion, 0)
+ val read = TastyVersion(MajorVersion, MinorVersion, 0)
+ runTest(file, read, "Scala (current)")
+ }
+
+ @Test
+ def okReadOldStableMinorFromStable: Unit = {
+ val file = TastyVersion(28, 2, 0)
+ val read = TastyVersion(28, 3, 0)
+ runTest(file, read, "Scala 3.2.2")
+ }
+
+ @Test
+ def okReadOldStableMinorFromExperimental: Unit = {
+ val file = TastyVersion(28, 2, 0)
+ val read = TastyVersion(28, 3, 1)
+ runTest(file, read, "Scala 3.2.2")
+ }
+
+ @Test
+ def failReadExperimentalFromStableSameMinor: Unit = {
+ val file = TastyVersion(28, 4, 1)
+ val read = TastyVersion(28, 4, 0)
+ expectUnpickleError(runTest(file, read, "Scala 3.4.0-RC1-bin-SNAPSHOT")) {
+ """Backward incompatible TASTy file has version 28.4-experimental-1, produced by Scala 3.4.0-RC1-bin-SNAPSHOT,
+ | expected stable TASTy from 28.0 to 28.4.
+ | The source of this file should be recompiled by a Scala 3.4.0 compiler or newer.
+ | Usually this means that the library dependency containing this file should be updated.
+ | Please refer to the documentation for information on TASTy versioning:
+ | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin
+ }
+ }
+
+ @Test
+ def failReadExperimentalFromOldMinor: Unit = {
+ val file = TastyVersion(28, 3, 1)
+ val read = TastyVersion(28, 4, 0)
+ expectUnpickleError(runTest(file, read, "Scala 3.2.1-RC1-bin-SNAPSHOT")) {
+ """Backward incompatible TASTy file has version 28.3-experimental-1, produced by Scala 3.2.1-RC1-bin-SNAPSHOT,
+ | expected stable TASTy from 28.0 to 28.4.
+ | The source of this file should be recompiled by a Scala 3.3.0 compiler or newer.
+ | Usually this means that the library dependency containing this file should be updated.
+ | Please refer to the documentation for information on TASTy versioning:
+ | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin
+ }
+ }
+
+ @Test
+ def failReadOldMajor: Unit = {
+ val file = TastyVersion(27, 3, 0)
+ val read = TastyVersion(28, 3, 0)
+ expectUnpickleError(runTest(file, read, "Scala 3.0.0-M1")) {
+ """Backward incompatible TASTy file has version 27.3,
+ | expected stable TASTy from 28.0 to 28.3.
+ | The source of this file should be recompiled by a Scala 3.0.0 compiler or newer.
+ | Usually this means that the library dependency containing this file should be updated.
+ | Please refer to the documentation for information on TASTy versioning:
+ | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin
+ }
+ }
+
+ @Test
+ def failReadOldMajor_generic: Unit = {
+ // We check the generic version here because it will produce a different message.
+ val file = TastyVersion(27, 3, 0)
+ val read = TastyVersion(28, 3, 0)
+ expectUnpickleError(runTest(file, read, "Scala 3.0.0-M1", generic = true)) {
+ """Backward incompatible TASTy file has version 27.3,
+ | expected stable TASTy from 28.0 to 28.3.
+ | The source of this file should be recompiled by a later version.
+ | Usually this means that the classpath entry of this file should be updated.
+ | Please refer to the documentation for information on TASTy versioning:
+ | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin
+ }
+ }
+
+ @Test
+ def failReadOldExperimentalFromSameMinorWhileExperimental: Unit = {
+ val file = TastyVersion(28, 4, 1)
+ val read = TastyVersion(28, 4, 2)
+ expectUnpickleError(runTest(file, read, "Scala 3.3.3-RC1-NIGHTLY")) {
+ """Backward incompatible TASTy file has version 28.4-experimental-1, produced by Scala 3.3.3-RC1-NIGHTLY,
+ | expected stable TASTy from 28.0 to 28.3, or exactly 28.4-experimental-2.
+ | The source of this file should be recompiled by the same nightly or snapshot Scala 3.3 compiler.
+ | Usually this means that the library dependency containing this file should be updated.
+ | Please refer to the documentation for information on TASTy versioning:
+ | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin
+ }
+ }
+
+ @Test
+ def failReadOldExperimentalFromSameMinorWhileExperimental_generic: Unit = {
+ // We check the generic version here because it will produce a different message.
+ val file = TastyVersion(28, 4, 1)
+ val read = TastyVersion(28, 4, 2)
+ expectUnpickleError(runTest(file, read, "Scala 3.3.3-RC1-NIGHTLY", generic = true)) {
+ """Backward incompatible TASTy file has version 28.4-experimental-1, produced by Scala 3.3.3-RC1-NIGHTLY,
+ | expected stable TASTy from 28.0 to 28.3, or exactly 28.4-experimental-2.
+ | The source of this file should be recompiled by a later version.
+ | Usually this means that the classpath entry of this file should be updated.
+ | Please refer to the documentation for information on TASTy versioning:
+ | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin
+ }
+ }
+
+ @Test
+ def failReadNewerStableMinorFromStable: Unit = {
+ val file = TastyVersion(28, 3, 0)
+ val read = TastyVersion(28, 2, 0)
+ expectUnpickleError(runTest(file, read, "Scala 3.3.1")) {
+ """Forward incompatible TASTy file has version 28.3, produced by Scala 3.3.1,
+ | expected stable TASTy from 28.0 to 28.2.
+ | To read this TASTy file, use a Scala 3.3.0 compiler or newer.
+ | Please refer to the documentation for information on TASTy versioning:
+ | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin
+ }
+ }
+
+ @Test
+ def failReadNewerStableMinorFromStable_generic: Unit = {
+ // We check the generic version here because it will produce a different message.
+ val file = TastyVersion(28, 3, 0)
+ val read = TastyVersion(28, 2, 0)
+ expectUnpickleError(runTest(file, read, "Scala 3.3.1", generic = true)) {
+ """Forward incompatible TASTy file has version 28.3, produced by Scala 3.3.1,
+ | expected stable TASTy from 28.0 to 28.2.
+ | To read this TASTy file, use a newer version of this tool compatible with TASTy 28.3.
+ | Please refer to the documentation for information on TASTy versioning:
+ | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin
+ }
+ }
+
+ @Test
+ def failReadNewerExperimentalMinorFromStable: Unit = {
+ val file = TastyVersion(28, 3, 1)
+ val read = TastyVersion(28, 2, 0)
+ expectUnpickleError(runTest(file, read, "Scala 3.2.2-RC1-NIGHTLY")) {
+ """Forward incompatible TASTy file has version 28.3-experimental-1, produced by Scala 3.2.2-RC1-NIGHTLY,
+ | expected stable TASTy from 28.0 to 28.2.
+ | To read this experimental TASTy file, use the same nightly or snapshot Scala 3.2 compiler.
+ | Note that you are using a stable compiler, which can not read experimental TASTy.
+ | Please refer to the documentation for information on TASTy versioning:
+ | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin
+ }
+ }
+
+ @Test
+ def failReadNewerStableMajor: Unit = {
+ val file = TastyVersion(29, 0, 0)
+ val read = TastyVersion(28, 3, 0)
+ expectUnpickleError(runTest(file, read, "Scala 4.0.0")) {
+ """Forward incompatible TASTy file has version 29.0, produced by Scala 4.0.0,
+ | expected stable TASTy from 28.0 to 28.3.
+ | To read this TASTy file, use a more recent Scala compiler.
+ | Please refer to the documentation for information on TASTy versioning:
+ | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin
+ }
+ }
+
+ @Test
+ def failReadNewerExperimentalMajor: Unit = {
+ val file = TastyVersion(29, 0, 1)
+ val read = TastyVersion(28, 3, 0)
+ expectUnpickleError(runTest(file, read, "Scala 4.0.0-M1")) {
+ """Forward incompatible TASTy file has version 29.0-experimental-1, produced by Scala 4.0.0-M1,
+ | expected stable TASTy from 28.0 to 28.3.
+ | To read this experimental TASTy file, use the same Scala compiler.
+ | Note that you are using a stable compiler, which can not read experimental TASTy.
+ | Please refer to the documentation for information on TASTy versioning:
+ | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin
+ }
+ }
+
+ @Test
+ def failReadNewerExperimentalMajor_generic: Unit = {
+ // We check the generic version here because it will produce a different message.
+ val file = TastyVersion(29, 0, 1)
+ val read = TastyVersion(28, 3, 0)
+ expectUnpickleError(runTest(file, read, "Scala 4.0.0-M1", generic = true)) {
+ """Forward incompatible TASTy file has version 29.0-experimental-1, produced by Scala 4.0.0-M1,
+ | expected stable TASTy from 28.0 to 28.3.
+ | To read this experimental TASTy file, use the version of this tool compatible with TASTy 29.0-experimental-1.
+ | Note that this tool does not support reading experimental TASTy.
+ | Please refer to the documentation for information on TASTy versioning:
+ | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin
+ }
+ }
+
+ @Test
+ def failReadStableFromExperimentalSameMinor: Unit = {
+ val file = TastyVersion(28, 4, 0)
+ val read = TastyVersion(28, 4, 1) // 3.4.0-RC1-NIGHTLY
+ expectUnpickleError(runTest(file, read, "Scala 3.4.2")) {
+ """Forward incompatible TASTy file has version 28.4, produced by Scala 3.4.2,
+ | expected stable TASTy from 28.0 to 28.3, or exactly 28.4-experimental-1.
+ | To read this TASTy file, use a Scala 3.4.0 compiler or newer.
+ | Please refer to the documentation for information on TASTy versioning:
+ | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin
+ }
+ }
+
+ @Test
+ def failReadNewerExperimentalFromExperimentalSameMinor: Unit = {
+ val file = TastyVersion(28, 4, 2)
+ val read = TastyVersion(28, 4, 1)
+ expectUnpickleError(runTest(file, read, "Scala 3.3.3-RC2-NIGHTLY")) {
+ """Forward incompatible TASTy file has version 28.4-experimental-2, produced by Scala 3.3.3-RC2-NIGHTLY,
+ | expected stable TASTy from 28.0 to 28.3, or exactly 28.4-experimental-1.
+ | To read this experimental TASTy file, use the same nightly or snapshot Scala 3.3 compiler.
+ | Please refer to the documentation for information on TASTy versioning:
+ | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin
+ }
+ }
+
+ @Test
+ def failReadNewerExperimentalFromExperimentalSameMinor_generic: Unit = {
+ // We check the generic version here because it will produce a different message.
+ val file = TastyVersion(28, 4, 2)
+ val read = TastyVersion(28, 4, 1)
+ expectUnpickleError(runTest(file, read, "Scala 3.3.3-RC2-NIGHTLY", generic = true)) {
+ """Forward incompatible TASTy file has version 28.4-experimental-2, produced by Scala 3.3.3-RC2-NIGHTLY,
+ | expected stable TASTy from 28.0 to 28.3, or exactly 28.4-experimental-1.
+ | To read this experimental TASTy file, use the version of this tool compatible with TASTy 28.4-experimental-2.
+ | Please refer to the documentation for information on TASTy versioning:
+ | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin
+ }
+ }
+
+}
+
+object TastyHeaderUnpicklerTest {
+
+ def fillHeader(maj: Int, min: Int, exp: Int, compiler: String): TastyBuffer = {
+ val compilerBytes = compiler.getBytes(java.nio.charset.StandardCharsets.UTF_8).nn
+ val buf = new TastyBuffer(header.length + 32 + compilerBytes.length)
+ for (ch <- header) buf.writeByte(ch.toByte)
+ buf.writeNat(maj)
+ buf.writeNat(min)
+ buf.writeNat(exp)
+ buf.writeNat(compilerBytes.length)
+ buf.writeBytes(compilerBytes, compilerBytes.length)
+ buf.writeUncompressedLong(237478L)
+ buf.writeUncompressedLong(324789L)
+ buf
+ }
+
+ case class CustomScalaConfig(compilerVersion: TastyVersion) extends TastyUnpickler.Scala3CompilerConfig {
+ override def majorVersion: Int = compilerVersion.major
+ override def minorVersion: Int = compilerVersion.minor
+ override def experimentalVersion: Int = compilerVersion.experimental
+ }
+
+ case class CustomGenericConfig(compilerVersion: TastyVersion) extends UnpicklerConfig.Generic {
+ override def majorVersion: Int = compilerVersion.major
+ override def minorVersion: Int = compilerVersion.minor
+ override def experimentalVersion: Int = compilerVersion.experimental
+ }
+
+ def runTest(file: TastyVersion, read: TastyVersion, compiler: String, generic: Boolean = false): Unit = {
+ val headerBuffer = fillHeader(file.major, file.minor, file.experimental, compiler)
+ val bs = headerBuffer.bytes.clone
+ val config = if (generic) CustomGenericConfig(read) else CustomScalaConfig(read)
+ val hr = new TastyHeaderUnpickler(config, new TastyReader(bs))
+ hr.readFullHeader()
+ }
+
+ def expectUnpickleError(op: => Unit)(message: String) = {
+ try {
+ op
+ fail()
+ }
+ catch {
+ case err: UnpickleException => assert(err.getMessage.nn.contains(message))
+ }
+ }
+
+}
diff --git a/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala b/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala
index 03b61c393d35..a96a2765d56a 100644
--- a/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala
+++ b/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala
@@ -17,6 +17,7 @@ import dotty.Properties
import interfaces.Diagnostic.{ERROR, WARNING}
import scala.io.Codec
+import scala.compiletime.uninitialized
class TestReporter protected (outWriter: PrintWriter, logLevel: Int)
extends Reporter with UniqueMessagePositions with HideNonSensicalMessages with MessageRendering {
@@ -81,9 +82,9 @@ object TestReporter {
private val failedTestsFileName: String = "last-failed.log"
private val failedTestsFile: JFile = new JFile(s"$testLogsDirName/$failedTestsFileName")
- private var outFile: JFile = _
- private var logWriter: PrintWriter = _
- private var failedTestsWriter: PrintWriter = _
+ private var outFile: JFile = uninitialized
+ private var logWriter: PrintWriter = uninitialized
+ private var failedTestsWriter: PrintWriter = uninitialized
private def initLog() = if (logWriter eq null) {
val date = new Date
diff --git a/compiler/test/dotty/tools/dotc/semanticdb/SemanticdbTests.scala b/compiler/test/dotty/tools/dotc/semanticdb/SemanticdbTests.scala
index de8c2c11f9c2..8839a6cd03b1 100644
--- a/compiler/test/dotty/tools/dotc/semanticdb/SemanticdbTests.scala
+++ b/compiler/test/dotty/tools/dotc/semanticdb/SemanticdbTests.scala
@@ -130,7 +130,7 @@ class SemanticdbTests:
val target = Files.createTempDirectory("semanticdb")
val javaArgs = Array("-d", target.toString) ++ javaFiles().map(_.toString)
val javac = ToolProvider.getSystemJavaCompiler
- val exitJava = javac.run(null, null, null, javaArgs:_*)
+ val exitJava = javac.run(null, null, null, javaArgs*)
assert(exitJava == 0, "java compiler has errors")
val args = Array(
"-Xsemanticdb",
diff --git a/compiler/test/dotty/tools/scripting/ClasspathTests.scala b/compiler/test/dotty/tools/scripting/ClasspathTests.scala
index 0f3ada041538..4fd1211698f6 100755
--- a/compiler/test/dotty/tools/scripting/ClasspathTests.scala
+++ b/compiler/test/dotty/tools/scripting/ClasspathTests.scala
@@ -43,7 +43,7 @@ class ClasspathTests:
// cwd:
// classpath:
- val scriptOutput: Seq[String] = exec(cmd:_*)
+ val scriptOutput: Seq[String] = exec(cmd*)
val scriptCwd: String = findTaggedLine("cwd", scriptOutput) // the value tagged "cwd: "
printf("script ran in directory [%s]\n", scriptCwd)
val scriptCp = findTaggedLine("classpath", scriptOutput) // the value tagged "classpath: "
@@ -94,7 +94,7 @@ class ClasspathTests:
cmd.foreach { printf("[%s]\n", _) }
// test script reports the classpath it sees
- val scriptOutput = exec(cmd:_*)
+ val scriptOutput = exec(cmd*)
val scriptCp = findTaggedLine("unglobbed classpath", scriptOutput)
printf("%s\n", scriptCp)
val classpathJars = scriptCp.split(psep).map { _.getName }.sorted.distinct
diff --git a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala
index 8d07002ac620..4804ffd349cb 100644
--- a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala
+++ b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala
@@ -85,18 +85,18 @@ trait ParallelTesting extends RunnerOrchestration { self =>
val newFlags = newFlags0.toArray
if (!flags.options.containsSlice(newFlags)) self match {
case self: JointCompilationSource =>
- self.copy(flags = flags.and(newFlags:_*))
+ self.copy(flags = flags.and(newFlags*))
case self: SeparateCompilationSource =>
- self.copy(flags = flags.and(newFlags:_*))
+ self.copy(flags = flags.and(newFlags*))
}
else self
}
def withoutFlags(flags1: String*): TestSource = self match {
case self: JointCompilationSource =>
- self.copy(flags = flags.without(flags1: _*))
+ self.copy(flags = flags.without(flags1*))
case self: SeparateCompilationSource =>
- self.copy(flags = flags.without(flags1: _*))
+ self.copy(flags = flags.without(flags1*))
}
lazy val allToolArgs: ToolArgs =
@@ -490,7 +490,7 @@ trait ParallelTesting extends RunnerOrchestration { self =>
def scalacOptions = toolArgs.getOrElse(ToolName.Scalac, Nil)
val flags = flags0
- .and(scalacOptions: _*)
+ .and(scalacOptions*)
.and("-d", targetDir.getPath)
.withClasspath(targetDir.getPath)
diff --git a/compiler/test/dotty/tools/vulpix/RunnerOrchestration.scala b/compiler/test/dotty/tools/vulpix/RunnerOrchestration.scala
index 8e9a27e766b4..9047bb6737dc 100644
--- a/compiler/test/dotty/tools/vulpix/RunnerOrchestration.scala
+++ b/compiler/test/dotty/tools/vulpix/RunnerOrchestration.scala
@@ -14,6 +14,7 @@ import scala.concurrent.duration.Duration
import scala.concurrent.{ Await, Future }
import scala.concurrent.ExecutionContext.Implicits.global
import scala.collection.mutable
+import scala.compiletime.uninitialized
/** Vulpix spawns JVM subprocesses (`numberOfSlaves`) in order to run tests
* without compromising the main JVM
@@ -70,8 +71,8 @@ trait RunnerOrchestration {
withRunner(_.runMain(classPath))
private class Runner(private var process: Process) {
- private var childStdout: BufferedReader = _
- private var childStdin: PrintStream = _
+ private var childStdout: BufferedReader = uninitialized
+ private var childStdin: PrintStream = uninitialized
/** Checks if `process` is still alive
*
diff --git a/language-server/test/dotty/tools/languageserver/util/Code.scala b/language-server/test/dotty/tools/languageserver/util/Code.scala
index 7eb2d377cb54..f88dff70ecaf 100644
--- a/language-server/test/dotty/tools/languageserver/util/Code.scala
+++ b/language-server/test/dotty/tools/languageserver/util/Code.scala
@@ -48,7 +48,7 @@ object Code {
* perform actions such as finding all references, etc.
*/
def code(args: Embedded*): ScalaSourceWithPositions = {
- val (text, positions) = textAndPositions(args: _*)
+ val (text, positions) = textAndPositions(args*)
ScalaSourceWithPositions(text, positions)
}
@@ -58,7 +58,7 @@ object Code {
* @see code
*/
def ws(args: Embedded*): WorksheetWithPositions = {
- val (text, positions) = textAndPositions(args: _*)
+ val (text, positions) = textAndPositions(args*)
WorksheetWithPositions(text, positions)
}
@@ -69,7 +69,7 @@ object Code {
* @see code
*/
def tasty(args: Embedded*): TastyWithPositions = {
- val (text, positions) = textAndPositions(args: _*)
+ val (text, positions) = textAndPositions(args*)
TastyWithPositions(text, positions)
}
diff --git a/library/src/scala/IArray.scala b/library/src/scala/IArray.scala
index e284c2e35c2f..887ca517ef2b 100644
--- a/library/src/scala/IArray.scala
+++ b/library/src/scala/IArray.scala
@@ -411,25 +411,25 @@ object IArray:
def emptyObjectIArray: IArray[Object] = Array.emptyObjectArray
/** An immutable array with given elements. */
- def apply[T](xs: T*)(using ct: ClassTag[T]): IArray[T] = Array(xs: _*)
+ def apply[T](xs: T*)(using ct: ClassTag[T]): IArray[T] = Array(xs*)
/** An immutable array with given elements. */
- def apply(x: Boolean, xs: Boolean*): IArray[Boolean] = Array(x, xs: _*)
+ def apply(x: Boolean, xs: Boolean*): IArray[Boolean] = Array(x, xs*)
/** An immutable array with given elements. */
- def apply(x: Byte, xs: Byte*): IArray[Byte] = Array(x, xs: _*)
+ def apply(x: Byte, xs: Byte*): IArray[Byte] = Array(x, xs*)
/** An immutable array with given elements. */
- def apply(x: Short, xs: Short*): IArray[Short] = Array(x, xs: _*)
+ def apply(x: Short, xs: Short*): IArray[Short] = Array(x, xs*)
/** An immutable array with given elements. */
- def apply(x: Char, xs: Char*): IArray[Char] = Array(x, xs: _*)
+ def apply(x: Char, xs: Char*): IArray[Char] = Array(x, xs*)
/** An immutable array with given elements. */
- def apply(x: Int, xs: Int*): IArray[Int] = Array(x, xs: _*)
+ def apply(x: Int, xs: Int*): IArray[Int] = Array(x, xs*)
/** An immutable array with given elements. */
- def apply(x: Long, xs: Long*): IArray[Long] = Array(x, xs: _*)
+ def apply(x: Long, xs: Long*): IArray[Long] = Array(x, xs*)
/** An immutable array with given elements. */
- def apply(x: Float, xs: Float*): IArray[Float] = Array(x, xs: _*)
+ def apply(x: Float, xs: Float*): IArray[Float] = Array(x, xs*)
/** An immutable array with given elements. */
- def apply(x: Double, xs: Double*): IArray[Double] = Array(x, xs: _*)
+ def apply(x: Double, xs: Double*): IArray[Double] = Array(x, xs*)
/** An immutable array with given elements. */
- def apply(x: Unit, xs: Unit*): IArray[Unit] = Array(x, xs: _*)
+ def apply(x: Unit, xs: Unit*): IArray[Unit] = Array(x, xs*)
/** Build an array from the iterable collection.
*
@@ -459,7 +459,7 @@ object IArray:
// `Array.concat` should arguably take in a `Seq[Array[_ <: T]]`,
// but since it currently takes a `Seq[Array[T]]` we have to perform a cast,
// knowing tacitly that `concat` is not going to do the wrong thing.
- Array.concat[T](xss.asInstanceOf[Seq[Array[T]]]: _*)
+ Array.concat[T](xss.asInstanceOf[Seq[Array[T]]]*)
/** Returns an immutable array that contains the results of some element computation a number
* of times. Each element is determined by a separate computation.
diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala
index 2d9aaea2be08..6993b8202082 100644
--- a/library/src/scala/Tuple.scala
+++ b/library/src/scala/Tuple.scala
@@ -1,12 +1,12 @@
package scala
import annotation.{experimental, showAsInfix}
-import compiletime._
-import compiletime.ops.int._
+import compiletime.*
+import compiletime.ops.int.*
/** Tuple of arbitrary arity */
sealed trait Tuple extends Product {
- import Tuple._
+ import Tuple.*
/** Create a copy of this tuple as an Array */
inline def toArray: Array[Object] =
@@ -292,7 +292,7 @@ case object EmptyTuple extends Tuple {
/** Tuple of arbitrary non-zero arity */
sealed trait NonEmptyTuple extends Tuple {
- import Tuple._
+ import Tuple.*
/** Get the i-th element of this tuple.
* Equivalent to productElement but with a precise return type.
diff --git a/library/src/scala/annotation/MacroAnnotation.scala b/library/src/scala/annotation/MacroAnnotation.scala
index 5c39ef45f417..999bc3095a69 100644
--- a/library/src/scala/annotation/MacroAnnotation.scala
+++ b/library/src/scala/annotation/MacroAnnotation.scala
@@ -2,7 +2,7 @@
package scala
package annotation
-import scala.quoted._
+import scala.quoted.*
/** Base trait for macro annotation implementation.
* Macro annotations can transform definitions and add new definitions.
@@ -46,7 +46,7 @@ definition that is owned by the package or package object.
*
* class memoize extends MacroAnnotation:
* def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] =
- * import quotes.reflect._
+ * import quotes.reflect.*
* tree match
* case DefDef(name, TermParamClause(param :: Nil) :: Nil, tpt, Some(rhsTree)) =>
* (param.tpt.tpe.asType, tpt.tpe.asType) match
diff --git a/library/src/scala/annotation/constructorOnly.scala b/library/src/scala/annotation/constructorOnly.scala
index c78c16534183..8fefc3b012dd 100644
--- a/library/src/scala/annotation/constructorOnly.scala
+++ b/library/src/scala/annotation/constructorOnly.scala
@@ -8,7 +8,7 @@
package scala.annotation
-import scala.annotation.meta._
+import scala.annotation.meta.*
/** An annotation that goes on parameters of classes or traits. It asserts
* that the parameter is used only for initialization and is not kept in
diff --git a/library/src/scala/annotation/newMain.scala b/library/src/scala/annotation/newMain.scala
index 6864b5accd6c..552e4225a648 100644
--- a/library/src/scala/annotation/newMain.scala
+++ b/library/src/scala/annotation/newMain.scala
@@ -65,8 +65,8 @@ import scala.annotation.meta.param
*/
@experimental
final class newMain extends MainAnnotation[FromString, Any]:
- import newMain._
- import MainAnnotation._
+ import newMain.*
+ import MainAnnotation.*
private val longArgRegex = "--[a-zA-Z][a-zA-Z0-9]+".r
private val shortArgRegex = "-[a-zA-Z]".r
diff --git a/library/src/scala/annotation/static.scala b/library/src/scala/annotation/static.scala
index b00072b18908..7cee5fc160d5 100644
--- a/library/src/scala/annotation/static.scala
+++ b/library/src/scala/annotation/static.scala
@@ -1,6 +1,6 @@
package scala.annotation
-import scala.annotation.meta._
+import scala.annotation.meta.*
/** https://github.com/scala/scala.github.com/pull/491 */
diff --git a/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala b/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala
index 477ac6d742f7..e69de29bb2d1 100644
--- a/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala
+++ b/library/src/scala/annotation/unchecked/uncheckedCapabilityLeaks.scala
@@ -1,12 +0,0 @@
-package scala.annotation
-package unchecked
-
-/** An annotation for mutable variables that are allowed to capture
- * the root capability `cap`. Allowing this is not capture safe since
- * it can cause leakage of capabilities from local scopes by assigning
- * values retaining such capabilties to the annotated variable in
- * an outer scope.
- */
-class uncheckedCaptures extends StaticAnnotation
-
-
diff --git a/library/src/scala/annotation/unchecked/uncheckedCaptures.scala b/library/src/scala/annotation/unchecked/uncheckedCaptures.scala
new file mode 100644
index 000000000000..477ac6d742f7
--- /dev/null
+++ b/library/src/scala/annotation/unchecked/uncheckedCaptures.scala
@@ -0,0 +1,12 @@
+package scala.annotation
+package unchecked
+
+/** An annotation for mutable variables that are allowed to capture
+ * the root capability `cap`. Allowing this is not capture safe since
+ * it can cause leakage of capabilities from local scopes by assigning
+ * values retaining such capabilties to the annotated variable in
+ * an outer scope.
+ */
+class uncheckedCaptures extends StaticAnnotation
+
+
diff --git a/library/src/scala/compiletime/ops/any.scala b/library/src/scala/compiletime/ops/any.scala
index f0998058e9f7..e3f030c33634 100644
--- a/library/src/scala/compiletime/ops/any.scala
+++ b/library/src/scala/compiletime/ops/any.scala
@@ -5,7 +5,7 @@ object any:
/** Equality comparison of two singleton types.
* ```scala
* //{
- * import compiletime.ops.any._
+ * import compiletime.ops.any.*
* //}
* val eq1: 1 == 1 = true
* val eq2: 1 == "1" = false
@@ -18,7 +18,7 @@ object any:
/** Inequality comparison of two singleton types.
* ```scala
* //{
- * import compiletime.ops.any._
+ * import compiletime.ops.any.*
* //}
* val eq1: 1 != 1 = false
* val eq2: 1 != "1" = true
@@ -31,7 +31,7 @@ object any:
/** Tests if a type is a constant.
* ```scala
* //{
- * import compiletime.ops.any._
+ * import compiletime.ops.any.*
* //}
* val c1: IsConst[1] = true
* val c2: IsConst["hi"] = true
@@ -42,7 +42,7 @@ object any:
* will be evaluated only at its concrete type application. E.g.:
* ```scala
* //{
- * import compiletime.ops.any._
+ * import compiletime.ops.any.*
* //}
* //def `isConst`` returns the type `IsConst[X]`, since `X` is not yet known.
* def isConst[X] : IsConst[X] = ???
@@ -56,7 +56,7 @@ object any:
/** String conversion of a constant singleton type.
* ```scala
* //{
- * import compiletime.ops.any._
+ * import compiletime.ops.any.*
* //}
* val s1: ToString[1] = "1"
* val sTrue: ToString[true] = "true"
diff --git a/library/src/scala/compiletime/ops/boolean.scala b/library/src/scala/compiletime/ops/boolean.scala
index 3645524607dd..f6a8c3d3b37e 100644
--- a/library/src/scala/compiletime/ops/boolean.scala
+++ b/library/src/scala/compiletime/ops/boolean.scala
@@ -6,7 +6,7 @@ object boolean:
/** Negation of a `Boolean` singleton type.
* ```scala
* //{
- * import compiletime.ops.boolean._
+ * import compiletime.ops.boolean.*
* //}
* val notFalse: ![false] = true
* val notTrue: ![true] = false
@@ -18,7 +18,7 @@ object boolean:
/** Exclusive disjunction of two `Boolean` singleton types.
* ```scala
* //{
- * import compiletime.ops.boolean._
+ * import compiletime.ops.boolean.*
* //}
* val a: true ^ true = false
* val b: false ^ true = true
@@ -30,7 +30,7 @@ object boolean:
/** Conjunction of two `Boolean` singleton types.
* ```scala
* //{
- * import compiletime.ops.boolean._
+ * import compiletime.ops.boolean.*
* //}
* val a: true && true = true
* val b: false && true = false
@@ -42,7 +42,7 @@ object boolean:
/** Disjunction of two `Boolean` singleton types.
* ```scala
* //{
- * import compiletime.ops.boolean._
+ * import compiletime.ops.boolean.*
* //}
* val a: true || false = true
* val b: false || false = false
diff --git a/library/src/scala/compiletime/ops/double.scala b/library/src/scala/compiletime/ops/double.scala
index 65a2e1dde407..0e038904221e 100644
--- a/library/src/scala/compiletime/ops/double.scala
+++ b/library/src/scala/compiletime/ops/double.scala
@@ -5,7 +5,7 @@ object double:
/** Addition of two `Double` singleton types.
* ```scala
* //{
- * import compiletime.ops.double._
+ * import compiletime.ops.double.*
* //}
* val sum: 2.0 + 2.0 = 4.0
* ```
@@ -16,7 +16,7 @@ object double:
/** Subtraction of two `Double` singleton types.
* ```scala
* //{
- * import compiletime.ops.double._
+ * import compiletime.ops.double.*
* //}
* val sub: 4.0 - 2.0 = 2.0
* ```
@@ -27,7 +27,7 @@ object double:
/** Multiplication of two `Double` singleton types.
* ```scala
* //{
- * import compiletime.ops.double._
+ * import compiletime.ops.double.*
* //}
* val mul: 4.0 * 2.0 = 8.0
* ```
@@ -38,7 +38,7 @@ object double:
/** Integer division of two `Double` singleton types.
* ```scala
* //{
- * import compiletime.ops.double._
+ * import compiletime.ops.double.*
* //}
* val div: 5.0 / 2.0 = 2.5
* ```
@@ -49,7 +49,7 @@ object double:
/** Remainder of the division of `X` by `Y`.
* ```scala
* //{
- * import compiletime.ops.double._
+ * import compiletime.ops.double.*
* //}
* val mod: 5.0 % 2.0 = 1.0
* ```
@@ -60,7 +60,7 @@ object double:
/** Less-than comparison of two `Double` singleton types.
* ```scala
* //{
- * import compiletime.ops.double._
+ * import compiletime.ops.double.*
* //}
* val lt1: 4.0 < 2.0 = false
* val lt2: 2.0 < 4.0 = true
@@ -72,7 +72,7 @@ object double:
/** Greater-than comparison of two `Double` singleton types.
* ```scala
* //{
- * import compiletime.ops.double._
+ * import compiletime.ops.double.*
* //}
* val gt1: 4.0 > 2.0 = true
* val gt2: 2.0 > 2.0 = false
@@ -84,7 +84,7 @@ object double:
/** Greater-or-equal comparison of two `Double` singleton types.
* ```scala
* //{
- * import compiletime.ops.double._
+ * import compiletime.ops.double.*
* //}
* val ge1: 4.0 >= 2.0 = true
* val ge2: 2.0 >= 3.0 = false
@@ -96,7 +96,7 @@ object double:
/** Less-or-equal comparison of two `Double` singleton types.
* ```scala
* //{
- * import compiletime.ops.double._
+ * import compiletime.ops.double.*
* //}
* val lt1: 4.0 <= 2.0 = false
* val lt2: 2.0 <= 2.0 = true
@@ -108,7 +108,7 @@ object double:
/** Absolute value of an `Double` singleton type.
* ```scala
* //{
- * import compiletime.ops.double._
+ * import compiletime.ops.double.*
* //}
* val abs: Abs[-1.0] = 1.0
* ```
@@ -119,7 +119,7 @@ object double:
/** Negation of an `Double` singleton type.
* ```scala
* //{
- * import compiletime.ops.double._
+ * import compiletime.ops.double.*
* //}
* val neg1: Negate[-1.0] = 1.0
* val neg2: Negate[1.0] = -1.0
@@ -131,7 +131,7 @@ object double:
/** Minimum of two `Double` singleton types.
* ```scala
* //{
- * import compiletime.ops.double._
+ * import compiletime.ops.double.*
* //}
* val min: Min[-1.0, 1.0] = -1.0
* ```
@@ -142,7 +142,7 @@ object double:
/** Maximum of two `Double` singleton types.
* ```scala
* //{
- * import compiletime.ops.double._
+ * import compiletime.ops.double.*
* //}
* val max: Max[-1.0, 1.0] = 1.0
* ```
@@ -153,7 +153,7 @@ object double:
/** Int conversion of a `Double` singleton type.
* ```scala
* //{
- * import compiletime.ops.double._
+ * import compiletime.ops.double.*
* //}
* val x: ToInt[1.0] = 1
* ```
@@ -164,7 +164,7 @@ object double:
/** Long conversion of a `Double` singleton type.
* ```scala
* //{
- * import compiletime.ops.double._
+ * import compiletime.ops.double.*
* //}
* val x: ToLong[1.0] = 1L
* ```
@@ -175,7 +175,7 @@ object double:
/** Float conversion of a `Double` singleton type.
* ```scala
* //{
- * import compiletime.ops.double._
+ * import compiletime.ops.double.*
* //}
* val x: ToFloat[1.0] = 1.0f
* ```
diff --git a/library/src/scala/compiletime/ops/float.scala b/library/src/scala/compiletime/ops/float.scala
index 0f2a8faa43e7..d7be87be3d9c 100644
--- a/library/src/scala/compiletime/ops/float.scala
+++ b/library/src/scala/compiletime/ops/float.scala
@@ -5,7 +5,7 @@ object float:
/** Addition of two `Float` singleton types.
* ```scala
* //{
- * import compiletime.ops.float._
+ * import compiletime.ops.float.*
* //}
* val sum: 2.0f + 2.0f = 4.0f
* ```
@@ -16,7 +16,7 @@ object float:
/** Subtraction of two `Float` singleton types.
* ```scala
* //{
- * import compiletime.ops.float._
+ * import compiletime.ops.float.*
* //}
* val sub: 4.0f - 2.0f = 2.0f
* ```
@@ -27,7 +27,7 @@ object float:
/** Multiplication of two `Float` singleton types.
* ```scala
* //{
- * import compiletime.ops.float._
+ * import compiletime.ops.float.*
* //}
* val mul: 4.0f * 2.0f = 8.0f
* ```
@@ -38,7 +38,7 @@ object float:
/** Integer division of two `Float` singleton types.
* ```scala
* //{
- * import compiletime.ops.float._
+ * import compiletime.ops.float.*
* //}
* val div: 5.0f / 2.0f = 2.5f
* ```
@@ -49,7 +49,7 @@ object float:
/** Remainder of the division of `X` by `Y`.
* ```scala
* //{
- * import compiletime.ops.float._
+ * import compiletime.ops.float.*
* //}
* val mod: 5.0f % 2.0f = 1.0f
* ```
@@ -60,7 +60,7 @@ object float:
/** Less-than comparison of two `Float` singleton types.
* ```scala
* //{
- * import compiletime.ops.float._
+ * import compiletime.ops.float.*
* //}
* val lt1: 4.0f < 2.0f = false
* val lt2: 2.0f < 4.0f = true
@@ -72,7 +72,7 @@ object float:
/** Greater-than comparison of two `Float` singleton types.
* ```scala
* //{
- * import compiletime.ops.float._
+ * import compiletime.ops.float.*
* //}
* val gt1: 4.0f > 2.0f = true
* val gt2: 2.0f > 2.0f = false
@@ -84,7 +84,7 @@ object float:
/** Greater-or-equal comparison of two `Float` singleton types.
* ```scala
* //{
- * import compiletime.ops.float._
+ * import compiletime.ops.float.*
* //}
* val ge1: 4.0f >= 2.0f = true
* val ge2: 2.0f >= 3.0f = false
@@ -96,7 +96,7 @@ object float:
/** Less-or-equal comparison of two `Float` singleton types.
* ```scala
* //{
- * import compiletime.ops.float._
+ * import compiletime.ops.float.*
* //}
* val lt1: 4.0f <= 2.0f = false
* val lt2: 2.0f <= 2.0f = true
@@ -108,7 +108,7 @@ object float:
/** Absolute value of an `Float` singleton type.
* ```scala
* //{
- * import compiletime.ops.float._
+ * import compiletime.ops.float.*
* //}
* val abs: Abs[-1.0f] = 1.0f
* ```
@@ -119,7 +119,7 @@ object float:
/** Negation of an `Float` singleton type.
* ```scala
* //{
- * import compiletime.ops.float._
+ * import compiletime.ops.float.*
* //}
* val neg1: Negate[-1.0f] = 1.0f
* val neg2: Negate[1.0f] = -1.0f
@@ -131,7 +131,7 @@ object float:
/** Minimum of two `Float` singleton types.
* ```scala
* //{
- * import compiletime.ops.float._
+ * import compiletime.ops.float.*
* //}
* val min: Min[-1.0f, 1.0f] = -1.0f
* ```
@@ -142,7 +142,7 @@ object float:
/** Maximum of two `Float` singleton types.
* ```scala
* //{
- * import compiletime.ops.float._
+ * import compiletime.ops.float.*
* //}
* val max: Max[-1.0f, 1.0f] = 1.0f
* ```
@@ -153,7 +153,7 @@ object float:
/** Int conversion of a `Float` singleton type.
* ```scala
* //{
- * import compiletime.ops.float._
+ * import compiletime.ops.float.*
* //}
* val x: ToInt[1.0f] = 1
* ```
@@ -164,7 +164,7 @@ object float:
/** Long conversion of a `Float` singleton type.
* ```scala
* //{
- * import compiletime.ops.float._
+ * import compiletime.ops.float.*
* //}
* val x: ToLong[1.0f] = 1L
* ```
@@ -175,7 +175,7 @@ object float:
/** Double conversion of a `Float` singleton type.
* ```scala
* //{
- * import compiletime.ops.float._
+ * import compiletime.ops.float.*
* //}
* val x: ToDouble[1.0f] = 1.0
* ```
diff --git a/library/src/scala/compiletime/ops/int.scala b/library/src/scala/compiletime/ops/int.scala
index 7efdc1851c4e..ed4a3c3c3261 100644
--- a/library/src/scala/compiletime/ops/int.scala
+++ b/library/src/scala/compiletime/ops/int.scala
@@ -6,7 +6,7 @@ object int:
*
* ```scala
* //{
- * import compiletime.ops.int._
+ * import compiletime.ops.int.*
* //}
* type S[N <: Int] <: Int = N match {
* case 0 => 1
@@ -23,7 +23,7 @@ object int:
/** Addition of two `Int` singleton types.
* ```scala
* //{
- * import compiletime.ops.int._
+ * import compiletime.ops.int.*
* //}
* val sum: 2 + 2 = 4
* ```
@@ -34,7 +34,7 @@ object int:
/** Subtraction of two `Int` singleton types.
* ```scala
* //{
- * import compiletime.ops.int._
+ * import compiletime.ops.int.*
* //}
* val sub: 4 - 2 = 2
* ```
@@ -45,7 +45,7 @@ object int:
/** Multiplication of two `Int` singleton types.
* ```scala
* //{
- * import compiletime.ops.int._
+ * import compiletime.ops.int.*
* //}
* val mul: 4 * 2 = 8
* ```
@@ -56,7 +56,7 @@ object int:
/** Integer division of two `Int` singleton types.
* ```scala
* //{
- * import compiletime.ops.int._
+ * import compiletime.ops.int.*
* //}
* val div: 5 / 2 = 2
* ```
@@ -67,7 +67,7 @@ object int:
/** Remainder of the division of `X` by `Y`.
* ```scala
* //{
- * import compiletime.ops.int._
+ * import compiletime.ops.int.*
* //}
* val mod: 5 % 2 = 1
* ```
@@ -78,7 +78,7 @@ object int:
/** Binary left shift of `X` by `Y`.
* ```scala
* //{
- * import compiletime.ops.int._
+ * import compiletime.ops.int.*
* //}
* val lshift: 1 << 2 = 4
* ```
@@ -89,7 +89,7 @@ object int:
/** Binary right shift of `X` by `Y`.
* ```scala
* //{
- * import compiletime.ops.int._
+ * import compiletime.ops.int.*
* //}
* val rshift: 10 >> 1 = 5
* ```
@@ -100,7 +100,7 @@ object int:
/** Binary right shift of `X` by `Y`, filling the left with zeros.
* ```scala
* //{
- * import compiletime.ops.int._
+ * import compiletime.ops.int.*
* //}
* val rshiftzero: 10 >>> 1 = 5
* ```
@@ -111,7 +111,7 @@ object int:
/** Bitwise xor of `X` and `Y`.
* ```scala
* //{
- * import compiletime.ops.int._
+ * import compiletime.ops.int.*
* //}
* val xor: 10 ^ 30 = 20
* ```
@@ -122,7 +122,7 @@ object int:
/** Less-than comparison of two `Int` singleton types.
* ```scala
* //{
- * import compiletime.ops.int._
+ * import compiletime.ops.int.*
* //}
* val lt1: 4 < 2 = false
* val lt2: 2 < 4 = true
@@ -134,7 +134,7 @@ object int:
/** Greater-than comparison of two `Int` singleton types.
* ```scala
* //{
- * import compiletime.ops.int._
+ * import compiletime.ops.int.*
* //}
* val gt1: 4 > 2 = true
* val gt2: 2 > 2 = false
@@ -146,7 +146,7 @@ object int:
/** Greater-or-equal comparison of two `Int` singleton types.
* ```scala
* //{
- * import compiletime.ops.int._
+ * import compiletime.ops.int.*
* //}
* val ge1: 4 >= 2 = true
* val ge2: 2 >= 3 = false
@@ -158,7 +158,7 @@ object int:
/** Less-or-equal comparison of two `Int` singleton types.
* ```scala
* //{
- * import compiletime.ops.int._
+ * import compiletime.ops.int.*
* //}
* val lt1: 4 <= 2 = false
* val lt2: 2 <= 2 = true
@@ -170,7 +170,7 @@ object int:
/** Bitwise and of `X` and `Y`.
* ```scala
* //{
- * import compiletime.ops.int._
+ * import compiletime.ops.int.*
* //}
* val and1: BitwiseAnd[4, 4] = 4
* val and2: BitwiseAnd[10, 5] = 0
@@ -182,7 +182,7 @@ object int:
/** Bitwise or of `X` and `Y`.
* ```scala
* //{
- * import compiletime.ops.int._
+ * import compiletime.ops.int.*
* //}
* val or: BitwiseOr[10, 11] = 11
* ```
@@ -193,7 +193,7 @@ object int:
/** Absolute value of an `Int` singleton type.
* ```scala
* //{
- * import compiletime.ops.int._
+ * import compiletime.ops.int.*
* //}
* val abs: Abs[-1] = 1
* ```
@@ -204,7 +204,7 @@ object int:
/** Negation of an `Int` singleton type.
* ```scala
* //{
- * import compiletime.ops.int._
+ * import compiletime.ops.int.*
* //}
* val neg1: Negate[-1] = 1
* val neg2: Negate[1] = -1
@@ -216,7 +216,7 @@ object int:
/** Minimum of two `Int` singleton types.
* ```scala
* //{
- * import compiletime.ops.int._
+ * import compiletime.ops.int.*
* //}
* val min: Min[-1, 1] = -1
* ```
@@ -227,7 +227,7 @@ object int:
/** Maximum of two `Int` singleton types.
* ```scala
* //{
- * import compiletime.ops.int._
+ * import compiletime.ops.int.*
* //}
* val max: Max[-1, 1] = 1
* ```
@@ -238,7 +238,7 @@ object int:
/** String conversion of an `Int` singleton type.
* ```scala
* //{
- * import compiletime.ops.int._
+ * import compiletime.ops.int.*
* //}
* val abs: ToString[1] = "1"
* ```
@@ -250,7 +250,7 @@ object int:
/** Long conversion of an `Int` singleton type.
* ```scala
* //{
- * import compiletime.ops.int._
+ * import compiletime.ops.int.*
* //}
* val x: ToLong[1] = 1L
* ```
@@ -261,7 +261,7 @@ object int:
/** Float conversion of an `Int` singleton type.
* ```scala
* //{
- * import compiletime.ops.int._
+ * import compiletime.ops.int.*
* //}
* val x: ToFloat[1] = 1.0f
* ```
@@ -272,7 +272,7 @@ object int:
/** Double conversion of an `Int` singleton type.
* ```scala
* //{
- * import compiletime.ops.int._
+ * import compiletime.ops.int.*
* //}
* val x: ToDouble[1] = 1.0
* ```
@@ -286,7 +286,7 @@ object int:
* in other words if it is equal to zero.
* ```scala
* //{
- * import compiletime.ops.int._
+ * import compiletime.ops.int.*
* //}
* val zero_lzc: NumberOfLeadingZeros[0] = 32
* val eight_lzc: NumberOfLeadingZeros[8] = 28
diff --git a/library/src/scala/compiletime/ops/long.scala b/library/src/scala/compiletime/ops/long.scala
index 708fae3f07f3..25563ac70367 100644
--- a/library/src/scala/compiletime/ops/long.scala
+++ b/library/src/scala/compiletime/ops/long.scala
@@ -21,7 +21,7 @@ object long:
/** Addition of two `Long` singleton types.
* ```scala
* //{
- * import compiletime.ops.long._
+ * import compiletime.ops.long.*
* //}
* val sum: 2L + 2L = 4L
* ```
@@ -32,7 +32,7 @@ object long:
/** Subtraction of two `Long` singleton types.
* ```scala
* //{
- * import compiletime.ops.long._
+ * import compiletime.ops.long.*
* //}
* val sub: 4L - 2L = 2L
* ```
@@ -43,7 +43,7 @@ object long:
/** Multiplication of two `Long` singleton types.
* ```scala
* //{
- * import compiletime.ops.long._
+ * import compiletime.ops.long.*
* //}
* val mul: 4L * 2L = 8L
* ```
@@ -54,7 +54,7 @@ object long:
/** Integer division of two `Long` singleton types.
* ```scala
* //{
- * import compiletime.ops.long._
+ * import compiletime.ops.long.*
* //}
* val div: 5L / 2L = 2L
* ```
@@ -65,7 +65,7 @@ object long:
/** Remainder of the division of `X` by `Y`.
* ```scala
* //{
- * import compiletime.ops.long._
+ * import compiletime.ops.long.*
* //}
* val mod: 5L % 2L = 1L
* ```
@@ -76,7 +76,7 @@ object long:
/** Binary left shift of `X` by `Y`.
* ```scala
* //{
- * import compiletime.ops.long._
+ * import compiletime.ops.long.*
* //}
* val lshift: 1L << 2L = 4L
* ```
@@ -87,7 +87,7 @@ object long:
/** Binary right shift of `X` by `Y`.
* ```scala
* //{
- * import compiletime.ops.long._
+ * import compiletime.ops.long.*
* //}
* val rshift: 10L >> 1L = 5L
* ```
@@ -98,7 +98,7 @@ object long:
/** Binary right shift of `X` by `Y`, filling the left with zeros.
* ```scala
* //{
- * import compiletime.ops.long._
+ * import compiletime.ops.long.*
* //}
* val rshiftzero: 10L >>> 1L = 5L
* ```
@@ -109,7 +109,7 @@ object long:
/** Bitwise xor of `X` and `Y`.
* ```scala
* //{
- * import compiletime.ops.long._
+ * import compiletime.ops.long.*
* //}
* val xor: 10L ^ 30L = 20L
* ```
@@ -120,7 +120,7 @@ object long:
/** Less-than comparison of two `Long` singleton types.
* ```scala
* //{
- * import compiletime.ops.long._
+ * import compiletime.ops.long.*
* //}
* val lt1: 4L < 2L = false
* val lt2: 2L < 4L = true
@@ -132,7 +132,7 @@ object long:
/** Greater-than comparison of two `Long` singleton types.
* ```scala
* //{
- * import compiletime.ops.long._
+ * import compiletime.ops.long.*
* //}
* val gt1: 4L > 2L = true
* val gt2: 2L > 2L = false
@@ -144,7 +144,7 @@ object long:
/** Greater-or-equal comparison of two `Long` singleton types.
* ```scala
* //{
- * import compiletime.ops.long._
+ * import compiletime.ops.long.*
* //}
* val ge1: 4L >= 2L = true
* val ge2: 2L >= 3L = false
@@ -156,7 +156,7 @@ object long:
/** Less-or-equal comparison of two `Long` singleton types.
* ```scala
* //{
- * import compiletime.ops.long._
+ * import compiletime.ops.long.*
* //}
* val lt1: 4L <= 2L = false
* val lt2: 2L <= 2L = true
@@ -168,7 +168,7 @@ object long:
/** Bitwise and of `X` and `Y`.
* ```scala
* //{
- * import compiletime.ops.long._
+ * import compiletime.ops.long.*
* //}
* val and1: BitwiseAnd[4L, 4L] = 4L
* val and2: BitwiseAnd[10L, 5L] = 0L
@@ -180,7 +180,7 @@ object long:
/** Bitwise or of `X` and `Y`.
* ```scala
* //{
- * import compiletime.ops.long._
+ * import compiletime.ops.long.*
* //}
* val or: BitwiseOr[10L, 11L] = 11L
* ```
@@ -191,7 +191,7 @@ object long:
/** Absolute value of an `Long` singleton type.
* ```scala
* //{
- * import compiletime.ops.long._
+ * import compiletime.ops.long.*
* //}
* val abs: Abs[-1L] = 1L
* ```
@@ -202,7 +202,7 @@ object long:
/** Negation of an `Long` singleton type.
* ```scala
* //{
- * import compiletime.ops.long._
+ * import compiletime.ops.long.*
* //}
* val neg1: Negate[-1L] = 1L
* val neg2: Negate[1L] = -1L
@@ -214,7 +214,7 @@ object long:
/** Minimum of two `Long` singleton types.
* ```scala
* //{
- * import compiletime.ops.long._
+ * import compiletime.ops.long.*
* //}
* val min: Min[-1L, 1L] = -1L
* ```
@@ -225,7 +225,7 @@ object long:
/** Maximum of two `Long` singleton types.
* ```scala
* //{
- * import compiletime.ops.long._
+ * import compiletime.ops.long.*
* //}
* val max: Max[-1L, 1L] = 1L
* ```
@@ -239,7 +239,7 @@ object long:
* in other words if it is equal to zero.
* ```scala
* //{
- * import compiletime.ops.long._
+ * import compiletime.ops.long.*
* //}
* val zero_lzc: NumberOfLeadingZeros[0L] = 64
* val eight_lzc: NumberOfLeadingZeros[8L] = 60
@@ -253,7 +253,7 @@ object long:
/** Int conversion of a `Long` singleton type.
* ```scala
* //{
- * import compiletime.ops.long._
+ * import compiletime.ops.long.*
* //}
* val x: ToInt[1L] = 1
* ```
@@ -264,7 +264,7 @@ object long:
/** Float conversion of a `Long` singleton type.
* ```scala
* //{
- * import compiletime.ops.long._
+ * import compiletime.ops.long.*
* //}
* val x: ToFloat[1L] = 1.0f
* ```
@@ -275,7 +275,7 @@ object long:
/** Double conversion of a `Long` singleton type.
* ```scala
* //{
- * import compiletime.ops.long._
+ * import compiletime.ops.long.*
* //}
* val x: ToDouble[1L] = 1.0
* ```
diff --git a/library/src/scala/compiletime/ops/string.scala b/library/src/scala/compiletime/ops/string.scala
index 4c220f992707..63caa9ae6371 100644
--- a/library/src/scala/compiletime/ops/string.scala
+++ b/library/src/scala/compiletime/ops/string.scala
@@ -5,7 +5,7 @@ object string:
/** Concatenation of two `String` singleton types.
* ```scala
* //{
- * import compiletime.ops.string._
+ * import compiletime.ops.string.*
* //}
* val hello: "hello " + "world" = "hello world"
* ```
@@ -16,7 +16,7 @@ object string:
/** Length of a `String` singleton type.
* ```scala
* //{
- * import compiletime.ops.string._
+ * import compiletime.ops.string.*
* //}
* val helloSize: Length["hello"] = 5
* ```
@@ -30,7 +30,7 @@ object string:
* Thus the length of the substring is IEnd-IBeg.
* ```scala
* //{
- * import compiletime.ops.string._
+ * import compiletime.ops.string.*
* //}
* val x: Substring["hamburger", 4, 8] = "urge"
* val y: Substring["smiles", 1, 5] = "mile"
@@ -43,7 +43,7 @@ object string:
* regular expression `String` singleton type.
* ```scala
* //{
- * import compiletime.ops.string._
+ * import compiletime.ops.string.*
* //}
* val x: Matches["unhappy", "un.*"] = true
* ```
@@ -56,7 +56,7 @@ object string:
* the sequence is at index 0, the next at index 1, and so on.
* ```scala
* //{
- * import string._
+ * import string.*
* //}
* val c: CharAt["hello", 0] = 'h'
* ```
diff --git a/library/src/scala/quoted/Expr.scala b/library/src/scala/quoted/Expr.scala
index 8243e7dc4a4b..525f647eaaac 100644
--- a/library/src/scala/quoted/Expr.scala
+++ b/library/src/scala/quoted/Expr.scala
@@ -18,7 +18,7 @@ object Expr {
* Some bindings may be elided as an early optimization.
*/
def betaReduce[T](expr: Expr[T])(using Quotes): Expr[T] =
- import quotes.reflect._
+ import quotes.reflect.*
Term.betaReduce(expr.asTerm) match
case Some(expr1) => expr1.asExpr.asInstanceOf[Expr[T]]
case _ => expr
@@ -28,7 +28,7 @@ object Expr {
* will be equivalent to `'{ $s1; $s2; ...; $e }`.
*/
def block[T](statements: List[Expr[Any]], expr: Expr[T])(using Quotes): Expr[T] = {
- import quotes.reflect._
+ import quotes.reflect.*
Block(statements.map(asTerm), expr.asTerm).asExpr.asInstanceOf[Expr[T]]
}
@@ -69,7 +69,7 @@ object Expr {
* `'{ List($e1, $e2, ...) }` typed as an `Expr[List[T]]`
*/
def ofList[T](xs: Seq[Expr[T]])(using Type[T])(using Quotes): Expr[List[T]] =
- if (xs.isEmpty) Expr(Nil) else '{ List(${Varargs(xs)}: _*) }
+ if xs.isEmpty then Expr(Nil) else '{ List(${Varargs(xs)}*) }
/** Creates an expression that will construct a copy of this tuple
*
@@ -240,7 +240,7 @@ object Expr {
* @tparam T type of the implicit parameter
*/
def summon[T](using Type[T])(using Quotes): Option[Expr[T]] = {
- import quotes.reflect._
+ import quotes.reflect.*
Implicits.search(TypeRepr.of[T]) match {
case iss: ImplicitSearchSuccess => Some(iss.tree.asExpr.asInstanceOf[Expr[T]])
case isf: ImplicitSearchFailure => None
diff --git a/library/src/scala/quoted/ExprMap.scala b/library/src/scala/quoted/ExprMap.scala
index 70af00b16be5..fbe5dee2b342 100644
--- a/library/src/scala/quoted/ExprMap.scala
+++ b/library/src/scala/quoted/ExprMap.scala
@@ -7,7 +7,7 @@ trait ExprMap:
/** Map sub-expressions an expression `e` with a type `T` */
def transformChildren[T](e: Expr[T])(using Type[T])(using Quotes): Expr[T] = {
- import quotes.reflect._
+ import quotes.reflect.*
final class MapChildren() {
def transformStatement(tree: Statement)(owner: Symbol): Statement = {
diff --git a/library/src/scala/quoted/FromExpr.scala b/library/src/scala/quoted/FromExpr.scala
index f81afc627609..bcd10e4f09ed 100644
--- a/library/src/scala/quoted/FromExpr.scala
+++ b/library/src/scala/quoted/FromExpr.scala
@@ -82,7 +82,7 @@ object FromExpr {
/** Lift a quoted primitive value `'{ x }` into `x` */
private class PrimitiveFromExpr[T <: Boolean | Byte | Short | Int | Long | Float | Double | Char | String] extends FromExpr[T] {
def unapply(expr: Expr[T])(using Quotes) =
- import quotes.reflect._
+ import quotes.reflect.*
def rec(tree: Term): Option[T] = tree match {
case Block(stats, e) => if stats.isEmpty then rec(e) else None
case Inlined(_, bindings, e) => if bindings.isEmpty then rec(e) else None
@@ -133,13 +133,13 @@ object FromExpr {
}
/** Default implementation of `FromExpr[StringContext]`
- * - Transform `'{StringContext(args: _*)}` into `Some(StringContext(args: _*))` if `args` is explicit and each one is liftable
+ * - Transform `'{StringContext(args*)}` into `Some(StringContext(args*))` if `args` is explicit and each one is liftable
* - Otherwise returns `None`
*/
given StringContextFromExpr: FromExpr[StringContext] with {
def unapply(x: Expr[StringContext])(using Quotes) = x match {
- case '{ new StringContext(${Varargs(Exprs(args))}: _*) } => Some(StringContext(args: _*))
- case '{ StringContext(${Varargs(Exprs(args))}: _*) } => Some(StringContext(args: _*))
+ case '{ new StringContext(${Varargs(Exprs(args))}*) } => Some(StringContext(args*))
+ case '{ StringContext(${Varargs(Exprs(args))}*) } => Some(StringContext(args*))
case _ => None
}
}
@@ -428,8 +428,8 @@ object FromExpr {
given SeqFromExpr[T](using Type[T], FromExpr[T]): FromExpr[Seq[T]] with {
def unapply(x: Expr[Seq[T]])(using Quotes) = x match {
case Varargs(Exprs(elems)) => Some(elems)
- case '{ scala.Seq[T](${Varargs(Exprs(elems))}: _*) } => Some(elems)
- case '{ scala.collection.immutable.Seq[T](${Varargs(Exprs(elems))}: _*) } => Some(elems)
+ case '{ scala.Seq[T](${Varargs(Exprs(elems))}*) } => Some(elems)
+ case '{ scala.collection.immutable.Seq[T](${Varargs(Exprs(elems))}*) } => Some(elems)
case '{ ${Expr(x)}: List[T] } => Some(x)
case _ => None
}
@@ -454,10 +454,10 @@ object FromExpr {
*/
given ListFromExpr[T](using Type[T], FromExpr[T]): FromExpr[List[T]] with {
def unapply(x: Expr[List[T]])(using Quotes) = x match {
- case '{ scala.List[T](${Varargs(Exprs(elems))}: _*) } => Some(elems.toList)
+ case '{ scala.List[T](${Varargs(Exprs(elems))}*) } => Some(elems.toList)
case '{ scala.List.empty[T] } => Some(Nil)
case '{ Nil } => Some(Nil)
- case '{ scala.collection.immutable.List[T](${Varargs(Exprs(elems))}: _*) } => Some(elems.toList)
+ case '{ scala.collection.immutable.List[T](${Varargs(Exprs(elems))}*) } => Some(elems.toList)
case '{ scala.collection.immutable.List.empty[T] } => Some(Nil)
case _ => None
}
@@ -470,9 +470,9 @@ object FromExpr {
*/
given SetFromExpr[T](using Type[T], FromExpr[T]): FromExpr[Set[T]] with {
def unapply(x: Expr[Set[T]])(using Quotes) = x match {
- case '{ Set[T](${Varargs(Exprs(elems))}: _*) } => Some(elems.toSet)
+ case '{ Set[T](${Varargs(Exprs(elems))}*) } => Some(elems.toSet)
case '{ Set.empty[T] } => Some(Set.empty[T])
- case '{ scala.collection.immutable.Set[T](${Varargs(Exprs(elems))}: _*) } => Some(elems.toSet)
+ case '{ scala.collection.immutable.Set[T](${Varargs(Exprs(elems))}*) } => Some(elems.toSet)
case '{ scala.collection.immutable.Set.empty[T] } => Some(Set.empty[T])
case _ => None
}
@@ -485,9 +485,9 @@ object FromExpr {
*/
given MapFromExpr[T, U](using Type[T], Type[U], FromExpr[T], FromExpr[U]): FromExpr[Map[T, U]] with {
def unapply(x: Expr[Map[T, U]])(using Quotes) = x match {
- case '{ Map[T, U](${Varargs(Exprs(elems))}: _*) } => Some(elems.toMap)
+ case '{ Map[T, U](${Varargs(Exprs(elems))}*) } => Some(elems.toMap)
case '{ Map.empty[T, U] } => Some(Map.empty)
- case '{ scala.collection.immutable.Map[T, U](${Varargs(Exprs(elems))}: _*) } => Some(elems.toMap)
+ case '{ scala.collection.immutable.Map[T, U](${Varargs(Exprs(elems))}*) } => Some(elems.toMap)
case '{ scala.collection.immutable.Map.empty[T, U] } => Some(Map.empty)
case _ => None
}
diff --git a/library/src/scala/quoted/Quotes.scala b/library/src/scala/quoted/Quotes.scala
index 055c560ad64a..884ae9efe35b 100644
--- a/library/src/scala/quoted/Quotes.scala
+++ b/library/src/scala/quoted/Quotes.scala
@@ -8,9 +8,9 @@ import scala.reflect.TypeTest
*
* Usage:
* ```scala
- * import scala.quoted._
+ * import scala.quoted.*
* def myExpr[T](using Quotes): Expr[T] = {
- * import quotes.reflect._
+ * import quotes.reflect.*
* ???
* }
* ```
@@ -24,7 +24,7 @@ transparent inline def quotes(using q: Quotes): q.type = q
* This API does not have the static type guarantees that `Expr` and `Type` provide.
* `Quotes` are generated from an enclosing `${ ... }` or `scala.staging.run`. For example:
* ```scala sc:nocompile
- * import scala.quoted._
+ * import scala.quoted.*
* inline def myMacro: Expr[T] =
* ${ /* (quotes: Quotes) ?=> */ myExpr }
* def myExpr(using Quotes): Expr[T] =
@@ -105,9 +105,9 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching =>
*
* Usage:
* ```scala
- * import scala.quoted._
+ * import scala.quoted.*
* def f(expr: Expr[Int])(using Quotes) =
- * import quotes.reflect._
+ * import quotes.reflect.*
* val ast: Term = expr.asTerm
* ???
* ```
@@ -2354,10 +2354,10 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching =>
* `ParamClause` encodes the following enumeration
* ```scala
* //{
- * import scala.quoted._
+ * import scala.quoted.*
* def inQuotes(using Quotes) = {
* val q: Quotes = summon[Quotes]
- * import q.reflect._
+ * import q.reflect.*
* //}
* enum ParamClause:
* case TypeParamClause(params: List[TypeDef])
@@ -2606,10 +2606,10 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching =>
* Usage:
* ```scala
* //{
- * import scala.quoted._
+ * import scala.quoted.*
* def f(using Quotes) = {
* val q: Quotes = summon[Quotes]
- * import q.reflect._
+ * import q.reflect.*
* val typeRepr: TypeRepr = ???
* //}
* typeRepr.asType match
@@ -3745,7 +3745,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching =>
* ```scala
* //{
* given Quotes = ???
- * import quotes.reflect._
+ * import quotes.reflect.*
* //}
* val moduleName: String = Symbol.freshName("MyModule")
* val parents = List(TypeTree.of[Object])
@@ -3767,7 +3767,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching =>
* ```scala
* //{
* given Quotes = ???
- * import quotes.reflect._
+ * import quotes.reflect.*
* //}
* '{
* object MyModule$macro$1 extends Object:
@@ -4122,10 +4122,10 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching =>
* Usages:
* ```scala
* def rhsExpr(using q: Quotes): Expr[Unit] =
- * import q.reflect._
+ * import q.reflect.*
* '{ val y = ???; (y, y) }
* def aValDef(using q: Quotes)(owner: q.reflect.Symbol) =
- * import q.reflect._
+ * import q.reflect.*
* val sym = Symbol.newVal(owner, "x", TypeRepr.of[Unit], Flags.EmptyFlags, Symbol.noSymbol)
* val rhs = rhsExpr(using sym.asQuotes).asTerm
* ValDef(sym, Some(rhs))
@@ -4134,7 +4134,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching =>
* ```scala
* //{
* def inQuotes(using q: Quotes) = {
- * import q.reflect._
+ * import q.reflect.*
* //}
* new TreeMap:
* override def transformTerm(tree: Term)(owner: Symbol): Term =
@@ -4749,7 +4749,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching =>
* ```scala
* //{
* def inQuotes(using q: Quotes) = {
- * import q.reflect._
+ * import q.reflect.*
* //}
* class MyTreeAccumulator[X] extends TreeAccumulator[X] {
* def foldTree(x: X, tree: Tree)(owner: Symbol): X = ???
@@ -4862,7 +4862,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching =>
* ```scala
* //{
* def inQuotes(using q: Quotes) = {
- * import q.reflect._
+ * import q.reflect.*
* //}
* class MyTraverser extends TreeTraverser {
* override def traverseTree(tree: Tree)(owner: Symbol): Unit = ???
@@ -4888,7 +4888,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching =>
* ```scala
* //{
* def inQuotes(using q: Quotes) = {
- * import q.reflect._
+ * import q.reflect.*
* //}
* class MyTreeMap extends TreeMap {
* override def transformTree(tree: Tree)(owner: Symbol): Tree = ???
diff --git a/library/src/scala/quoted/ToExpr.scala b/library/src/scala/quoted/ToExpr.scala
index 0e31c4590a26..042c8ff37a52 100644
--- a/library/src/scala/quoted/ToExpr.scala
+++ b/library/src/scala/quoted/ToExpr.scala
@@ -20,70 +20,70 @@ object ToExpr {
/** Default implementation of `ToExpr[Boolean]` */
given BooleanToExpr[T <: Boolean]: ToExpr[T] with {
def apply(x: T)(using Quotes) =
- import quotes.reflect._
+ import quotes.reflect.*
Literal(BooleanConstant(x)).asExpr.asInstanceOf[Expr[T]]
}
/** Default implementation of `ToExpr[Byte]` */
given ByteToExpr[T <: Byte]: ToExpr[T] with {
def apply(x: T)(using Quotes) =
- import quotes.reflect._
+ import quotes.reflect.*
Literal(ByteConstant(x)).asExpr.asInstanceOf[Expr[T]]
}
/** Default implementation of `ToExpr[Short]` */
given ShortToExpr[T <: Short]: ToExpr[T] with {
def apply(x: T)(using Quotes) =
- import quotes.reflect._
+ import quotes.reflect.*
Literal(ShortConstant(x)).asExpr.asInstanceOf[Expr[T]]
}
/** Default implementation of `ToExpr[Int]` */
given IntToExpr[T <: Int]: ToExpr[T] with {
def apply(x: T)(using Quotes) =
- import quotes.reflect._
+ import quotes.reflect.*
Literal(IntConstant(x)).asExpr.asInstanceOf[Expr[T]]
}
/** Default implementation of `ToExpr[Long]` */
given LongToExpr[T <: Long]: ToExpr[T] with {
def apply(x: T)(using Quotes) =
- import quotes.reflect._
+ import quotes.reflect.*
Literal(LongConstant(x)).asExpr.asInstanceOf[Expr[T]]
}
/** Default implementation of `ToExpr[Float]` */
given FloatToExpr[T <: Float]: ToExpr[T] with {
def apply(x: T)(using Quotes) =
- import quotes.reflect._
+ import quotes.reflect.*
Literal(FloatConstant(x)).asExpr.asInstanceOf[Expr[T]]
}
/** Default implementation of `ToExpr[Double]` */
given DoubleToExpr[T <: Double]: ToExpr[T] with {
def apply(x: T)(using Quotes) =
- import quotes.reflect._
+ import quotes.reflect.*
Literal(DoubleConstant(x)).asExpr.asInstanceOf[Expr[T]]
}
/** Default implementation of `ToExpr[Char]` */
given CharToExpr[T <: Char]: ToExpr[T] with {
def apply(x: T)(using Quotes) =
- import quotes.reflect._
+ import quotes.reflect.*
Literal(CharConstant(x)).asExpr.asInstanceOf[Expr[T]]
}
/** Default implementation of `ToExpr[String]` */
given StringToExpr[T <: String]: ToExpr[T] with {
def apply(x: T)(using Quotes) =
- import quotes.reflect._
+ import quotes.reflect.*
Literal(StringConstant(x)).asExpr.asInstanceOf[Expr[T]]
}
/** Default implementation of `ToExpr[Class[T]]` */
given ClassToExpr[T <: Class[?]]: ToExpr[T] with {
def apply(x: T)(using Quotes) = {
- import quotes.reflect._
+ import quotes.reflect.*
Ref(defn.Predef_classOf).appliedToType(TypeRepr.typeConstructorOf(x)).asExpr.asInstanceOf[Expr[T]]
}
}
@@ -97,63 +97,63 @@ object ToExpr {
/** Default implementation of `ToExpr[Array[T]]` */
given ArrayToExpr[T: Type: ToExpr: ClassTag]: ToExpr[Array[T]] with {
def apply(arr: Array[T])(using Quotes): Expr[Array[T]] =
- '{ Array[T](${Expr(arr.toSeq)}: _*)(${Expr(summon[ClassTag[T]])}) }
+ '{ Array[T](${Expr(arr.toSeq)}*)(${Expr(summon[ClassTag[T]])}) }
}
/** Default implementation of `ToExpr[Array[Boolean]]` */
given ArrayOfBooleanToExpr: ToExpr[Array[Boolean]] with {
def apply(array: Array[Boolean])(using Quotes): Expr[Array[Boolean]] =
if (array.length == 0) '{ Array.emptyBooleanArray }
- else '{ Array(${Expr(array(0))}, ${Expr(array.toSeq.tail)}: _*) }
+ else '{ Array(${Expr(array(0))}, ${Expr(array.toSeq.tail)}*) }
}
/** Default implementation of `ToExpr[Array[Byte]]` */
given ArrayOfByteToExpr: ToExpr[Array[Byte]] with {
def apply(array: Array[Byte])(using Quotes): Expr[Array[Byte]] =
if (array.length == 0) '{ Array.emptyByteArray }
- else '{ Array(${Expr(array(0))}, ${Expr(array.toSeq.tail)}: _*) }
+ else '{ Array(${Expr(array(0))}, ${Expr(array.toSeq.tail)}*) }
}
/** Default implementation of `ToExpr[Array[Short]]` */
given ArrayOfShortToExpr: ToExpr[Array[Short]] with {
def apply(array: Array[Short])(using Quotes): Expr[Array[Short]] =
if (array.length == 0) '{ Array.emptyShortArray }
- else '{ Array(${Expr(array(0))}, ${Expr(array.toSeq.tail)}: _*) }
+ else '{ Array(${Expr(array(0))}, ${Expr(array.toSeq.tail)}*) }
}
/** Default implementation of `ToExpr[Array[Char]]` */
given ArrayOfCharToExpr: ToExpr[Array[Char]] with {
def apply(array: Array[Char])(using Quotes): Expr[Array[Char]] =
if (array.length == 0) '{ Array.emptyCharArray }
- else '{ Array(${Expr(array(0))}, ${Expr(array.toSeq.tail)}: _*) }
+ else '{ Array(${Expr(array(0))}, ${Expr(array.toSeq.tail)}*) }
}
/** Default implementation of `ToExpr[Array[Int]]` */
given ArrayOfIntToExpr: ToExpr[Array[Int]] with {
def apply(array: Array[Int])(using Quotes): Expr[Array[Int]] =
if (array.length == 0) '{ Array.emptyIntArray }
- else '{ Array(${Expr(array(0))}, ${Expr(array.toSeq.tail)}: _*) }
+ else '{ Array(${Expr(array(0))}, ${Expr(array.toSeq.tail)}*) }
}
/** Default implementation of `ToExpr[Array[Long]]` */
given ArrayOfLongToExpr: ToExpr[Array[Long]] with {
def apply(array: Array[Long])(using Quotes): Expr[Array[Long]] =
if (array.length == 0) '{ Array.emptyLongArray }
- else '{ Array(${Expr(array(0))}, ${Expr(array.toSeq.tail)}: _*) }
+ else '{ Array(${Expr(array(0))}, ${Expr(array.toSeq.tail)}*) }
}
/** Default implementation of `ToExpr[Array[Float]]` */
given ArrayOfFloatToExpr: ToExpr[Array[Float]] with {
def apply(array: Array[Float])(using Quotes): Expr[Array[Float]] =
if (array.length == 0) '{ Array.emptyFloatArray }
- else '{ Array(${Expr(array(0))}, ${Expr(array.toSeq.tail)}: _*) }
+ else '{ Array(${Expr(array(0))}, ${Expr(array.toSeq.tail)}*) }
}
/** Default implementation of `ToExpr[Array[Double]]` */
given ArrayOfDoubleToExpr: ToExpr[Array[Double]] with {
def apply(array: Array[Double])(using Quotes): Expr[Array[Double]] =
if (array.length == 0) '{ Array.emptyDoubleArray }
- else '{ Array(${Expr(array(0))}, ${Expr(array.toSeq.tail)}: _*) }
+ else '{ Array(${Expr(array(0))}, ${Expr(array.toSeq.tail)}*) }
}
/** Default implementation of `ToExpr[IArray[T]]` */
@@ -183,13 +183,13 @@ object ToExpr {
/** Default implementation of `ToExpr[Set[T]]` */
given SetToExpr[T: Type: ToExpr]: ToExpr[Set[T]] with {
def apply(set: Set[T])(using Quotes): Expr[Set[T]] =
- '{ Set(${Expr(set.toSeq)}: _*) }
+ '{ Set(${Expr(set.toSeq)}*) }
}
/** Default implementation of `ToExpr[Map[T, U]]` */
given MapToExpr[T: Type: ToExpr, U: Type: ToExpr]: ToExpr[Map[T, U]] with {
def apply(map: Map[T, U])(using Quotes): Expr[Map[T, U]] =
- '{ Map(${Expr(map.toSeq)}: _*) }
+ '{ Map(${Expr(map.toSeq)}*) }
}
/** Default implementation of `ToExpr[Option[T]]` */
@@ -430,7 +430,7 @@ object ToExpr {
given StringContextToExpr: ToExpr[StringContext] with {
def apply(stringContext: StringContext)(using Quotes): Expr[StringContext] =
val parts = Varargs(stringContext.parts.map(Expr(_)))
- '{ StringContext($parts: _*) }
+ '{ StringContext($parts*) }
}
}
diff --git a/library/src/scala/quoted/Varargs.scala b/library/src/scala/quoted/Varargs.scala
index e2e74c3879c6..61937fc8fecb 100644
--- a/library/src/scala/quoted/Varargs.scala
+++ b/library/src/scala/quoted/Varargs.scala
@@ -28,7 +28,7 @@ object Varargs {
* ```
*/
def apply[T](xs: Seq[Expr[T]])(using Type[T])(using Quotes): Expr[Seq[T]] = {
- import quotes.reflect._
+ import quotes.reflect.*
Repeated(xs.map(_.asTerm).toList, TypeTree.of[T]).asExpr.asInstanceOf[Expr[Seq[T]]]
}
@@ -43,7 +43,7 @@ object Varargs {
*
*/
def unapply[T](expr: Expr[Seq[T]])(using Quotes): Option[Seq[Expr[T]]] = {
- import quotes.reflect._
+ import quotes.reflect.*
def rec(tree: Term): Option[Seq[Expr[T]]] = tree match {
case Repeated(elems, _) => Some(elems.map(x => x.asExpr.asInstanceOf[Expr[T]]))
case Typed(e, _) => rec(e)
diff --git a/library/src/scala/reflect/Selectable.scala b/library/src/scala/reflect/Selectable.scala
index 183571c35aa7..6da2b0ff88cd 100644
--- a/library/src/scala/reflect/Selectable.scala
+++ b/library/src/scala/reflect/Selectable.scala
@@ -35,9 +35,9 @@ trait Selectable extends scala.Selectable:
*/
final def applyDynamic(name: String, paramTypes: Class[?]*)(args: Any*): Any =
val rcls = selectedValue.getClass
- val mth = rcls.getMethod(name, paramTypes: _*).nn
+ val mth = rcls.getMethod(name, paramTypes*).nn
ensureAccessible(mth)
- mth.invoke(selectedValue, args.asInstanceOf[Seq[AnyRef]]: _*)
+ mth.invoke(selectedValue, args.asInstanceOf[Seq[AnyRef]]*)
object Selectable:
diff --git a/library/src/scala/runtime/Arrays.scala b/library/src/scala/runtime/Arrays.scala
index 96a26aad2486..9f5bdd99a5f4 100644
--- a/library/src/scala/runtime/Arrays.scala
+++ b/library/src/scala/runtime/Arrays.scala
@@ -27,5 +27,5 @@ object Arrays {
/** Create an array of a reference type T.
*/
def newArray[Arr](componentType: Class[?], returnType: Class[Arr], dimensions: Array[Int]): Arr =
- jlr.Array.newInstance(componentType, dimensions: _*).asInstanceOf[Arr]
+ jlr.Array.newInstance(componentType, dimensions*).asInstanceOf[Arr]
}
diff --git a/library/src/scala/runtime/TupleXXL.scala b/library/src/scala/runtime/TupleXXL.scala
index 22c3cc521427..b4a81d60f817 100644
--- a/library/src/scala/runtime/TupleXXL.scala
+++ b/library/src/scala/runtime/TupleXXL.scala
@@ -43,6 +43,6 @@ final class TupleXXL private (es: IArray[Object]) extends Product {
object TupleXXL {
def fromIterator(elems: Iterator[Any]): TupleXXL = new TupleXXL(elems.map(_.asInstanceOf[Object]).toArray.asInstanceOf[IArray[Object]]) // TODO use Iterator.toIArray
def fromIArray(elems: IArray[Object]): TupleXXL = new TupleXXL(elems)
- def apply(elems: Any*): TupleXXL = new TupleXXL(IArray(elems.asInstanceOf[Seq[AnyRef]]: _*))
+ def apply(elems: Any*): TupleXXL = new TupleXXL(IArray(elems.asInstanceOf[Seq[AnyRef]]*))
def unapplySeq(x: TupleXXL): Option[Seq[Any]] = Some(x.elems.asInstanceOf[Array[Object]].toSeq) // TODO use IArray.toSeq
}
diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala
index fbab0c14c9fb..c2a12cec2ecc 100644
--- a/library/src/scala/runtime/stdLibPatches/language.scala
+++ b/library/src/scala/runtime/stdLibPatches/language.scala
@@ -114,6 +114,10 @@ object language:
*/
@compileTimeOnly("`symbolLiterals` can only be used at compile time in import statements")
object symbolLiterals
+
+ /** TODO */
+ @compileTimeOnly("`ascriptionVarargsUnpacking` can only be used at compile time in import statements")
+ object ascriptionVarargsUnpacking
end deprecated
/** Where imported, auto-tupling is disabled.
diff --git a/library/src/scala/util/FromDigits.scala b/library/src/scala/util/FromDigits.scala
index 1577f4103e03..cb73782829ff 100644
--- a/library/src/scala/util/FromDigits.scala
+++ b/library/src/scala/util/FromDigits.scala
@@ -1,6 +1,6 @@
package scala.util
import scala.math.{BigInt}
-import quoted._
+import quoted.*
import annotation.internal.sharable
diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala b/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala
index 1f8eea8f4688..8ffd8ed28044 100644
--- a/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala
+++ b/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala
@@ -69,7 +69,7 @@ abstract class PcCollector[T](
case _ => rawPath
def collect(
parent: Option[Tree]
- )(tree: Tree, pos: SourcePosition, symbol: Option[Symbol]): T
+ )(tree: Tree| EndMarker, pos: SourcePosition, symbol: Option[Symbol]): T
/**
* @return (adjusted position, should strip backticks)
@@ -423,7 +423,7 @@ abstract class PcCollector[T](
parent: Option[Tree]
): Set[T] =
def collect(
- tree: Tree,
+ tree: Tree | EndMarker,
pos: SourcePosition,
symbol: Option[Symbol] = None
) =
@@ -461,6 +461,9 @@ abstract class PcCollector[T](
case df: NamedDefTree
if df.span.isCorrect && df.nameSpan.isCorrect &&
filter(df) && !isGeneratedGiven(df) =>
+ def collectEndMarker =
+ EndMarker.getPosition(df, pos, sourceText).map:
+ collect(EndMarker(df.symbol), _)
val annots = collectTrees(df.mods.annotations)
val traverser =
new PcCollector.DeepFolderWithParent[Set[T]](
@@ -470,7 +473,7 @@ abstract class PcCollector[T](
occurrences + collect(
df,
pos.withSpan(df.nameSpan)
- )
+ ) ++ collectEndMarker
) { case (set, tree) =>
traverser(set, tree)
}
@@ -635,3 +638,34 @@ case class ExtensionParamOccurence(
sym: Symbol,
methods: List[untpd.Tree]
)
+
+case class EndMarker(symbol: Symbol)
+
+object EndMarker:
+ /**
+ * Matches end marker line from start to the name's beginning.
+ * E.g.
+ * end /* some comment */
+ */
+ private val endMarkerRegex = """.*end(/\*.*\*/|\s)+""".r
+ def getPosition(df: NamedDefTree, pos: SourcePosition, sourceText: String)(
+ implicit ct: Context
+ ): Option[SourcePosition] =
+ val name = df.name.toString()
+ val endMarkerLine =
+ sourceText.slice(df.span.start, df.span.end).split('\n').last
+ val index = endMarkerLine.length() - name.length()
+ if index < 0 then None
+ else
+ val (possiblyEndMarker, possiblyEndMarkerName) =
+ endMarkerLine.splitAt(index)
+ Option.when(
+ possiblyEndMarkerName == name &&
+ endMarkerRegex.matches(possiblyEndMarker)
+ )(
+ pos
+ .withStart(df.span.end - name.length())
+ .withEnd(df.span.end)
+ )
+ end getPosition
+end EndMarker
diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcDocumentHighlightProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcDocumentHighlightProvider.scala
index 71e36297cbba..aeb9480930f9 100644
--- a/presentation-compiler/src/main/dotty/tools/pc/PcDocumentHighlightProvider.scala
+++ b/presentation-compiler/src/main/dotty/tools/pc/PcDocumentHighlightProvider.scala
@@ -19,7 +19,7 @@ final class PcDocumentHighlightProvider(
def collect(
parent: Option[Tree]
)(
- tree: Tree,
+ tree: Tree | EndMarker,
toAdjust: SourcePosition,
sym: Option[Symbol]
): DocumentHighlight =
diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala b/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala
index afb858ab3242..2d4a9d8643c9 100644
--- a/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala
+++ b/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala
@@ -24,7 +24,7 @@ import org.eclipse.lsp4j as l
final class PcInlineValueProviderImpl(
val driver: InteractiveDriver,
val params: OffsetParams
-) extends PcCollector[Occurence](driver, params)
+) extends PcCollector[Option[Occurence]](driver, params)
with InlineValueProvider:
val text = params.text.toCharArray()
@@ -32,16 +32,19 @@ final class PcInlineValueProviderImpl(
val position: l.Position = pos.toLsp.getStart()
override def collect(parent: Option[Tree])(
- tree: Tree,
+ tree: Tree | EndMarker,
pos: SourcePosition,
sym: Option[Symbol]
- ): Occurence =
- val (adjustedPos, _) = adjust(pos)
- Occurence(tree, parent, adjustedPos)
+ ): Option[Occurence] =
+ tree match
+ case tree: Tree =>
+ val (adjustedPos, _) = adjust(pos)
+ Some(Occurence(tree, parent, adjustedPos))
+ case _ => None
override def defAndRefs(): Either[String, (Definition, List[Reference])] =
val newctx = driver.currentCtx.fresh.setCompilationUnit(unit)
- val allOccurences = result()
+ val allOccurences = result().flatten
for
definition <- allOccurences
.collectFirst { case Occurence(defn: ValDef, _, pos) =>
diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcRenameProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcRenameProvider.scala
index 4477529d7124..56924f3cfded 100644
--- a/presentation-compiler/src/main/dotty/tools/pc/PcRenameProvider.scala
+++ b/presentation-compiler/src/main/dotty/tools/pc/PcRenameProvider.scala
@@ -34,7 +34,7 @@ final class PcRenameProvider(
def collect(
parent: Option[Tree]
- )(tree: Tree, toAdjust: SourcePosition, sym: Option[Symbol]): l.TextEdit =
+ )(tree: Tree | EndMarker, toAdjust: SourcePosition, sym: Option[Symbol]): l.TextEdit =
val (pos, stripBackticks) = adjust(toAdjust, forRename = true)
l.TextEdit(
pos.toLsp,
diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcSemanticTokensProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcSemanticTokensProvider.scala
index 5f47b4d0d8bb..d70fa32c2b10 100644
--- a/presentation-compiler/src/main/dotty/tools/pc/PcSemanticTokensProvider.scala
+++ b/presentation-compiler/src/main/dotty/tools/pc/PcSemanticTokensProvider.scala
@@ -32,7 +32,7 @@ final class PcSemanticTokensProvider(
* 3. type parameters,
* In all those cases we don't have a specific value for sure.
*/
- private def isDeclaration(tree: Tree) = tree match
+ private def isDeclaration(tree: Tree | EndMarker) = tree match
case df: ValOrDefDef => df.rhs.isEmpty
case df: TypeDef =>
df.rhs match
@@ -49,7 +49,8 @@ final class PcSemanticTokensProvider(
* that the compiler sees them as vals, as it's not clear
* if they should be declaration/definition at all.
*/
- private def isDefinition(tree: Tree) = tree match
+ private def isDefinition(tree: Tree | EndMarker) = tree match
+ case _: EndMarker => true
case df: Bind => true
case df: ValOrDefDef =>
!df.rhs.isEmpty && !df.symbol.isAllOf(Flags.EnumCase)
@@ -62,8 +63,12 @@ final class PcSemanticTokensProvider(
object Collector extends PcCollector[Option[Node]](driver, params):
override def collect(
parent: Option[Tree]
- )(tree: Tree, pos: SourcePosition, symbol: Option[Symbol]): Option[Node] =
- val sym = symbol.fold(tree.symbol)(identity)
+ )(tree: Tree | EndMarker, pos: SourcePosition, symbol: Option[Symbol]): Option[Node] =
+ val sym =
+ tree match
+ case tree: Tree =>
+ symbol.fold(tree.symbol)(identity)
+ case EndMarker(sym) => sym
if !pos.exists || sym == null || sym == NoSymbol then None
else
Some(
diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionPos.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionPos.scala
index 9ce7939c10fa..29699bd05203 100644
--- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionPos.scala
+++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionPos.scala
@@ -29,10 +29,9 @@ case class CompletionPos(
):
def sourcePos: SourcePosition = cursorPos.withSpan(Spans.Span(start, end))
+ def stripSuffixEditRange: l.Range = new l.Range(cursorPos.offsetToPos(start), cursorPos.offsetToPos(end))
+ def toEditRange: l.Range = cursorPos.withStart(start).withEnd(cursorPos.point).toLsp
- def toEditRange: l.Range =
- new l.Range(cursorPos.offsetToPos(start), cursorPos.offsetToPos(end))
- end toEditRange
end CompletionPos
object CompletionPos:
diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala
index 78f4affe8c49..323f63050377 100644
--- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala
+++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala
@@ -151,10 +151,7 @@ class CompletionProvider(
indexedContext: IndexedContext
)(using ctx: Context): CompletionItem =
val printer =
- ShortenedTypePrinter(search, IncludeDefaultParam.ResolveLater)(using
- indexedContext
- )
- val editRange = completionPos.toEditRange
+ ShortenedTypePrinter(search, IncludeDefaultParam.ResolveLater)(using indexedContext)
// For overloaded signatures we get multiple symbols, so we need
// to recalculate the description
@@ -165,24 +162,22 @@ class CompletionProvider(
val ident = completion.insertText.getOrElse(completion.label)
def mkItem(
- insertText: String,
+ newText: String,
additionalEdits: List[TextEdit] = Nil,
range: Option[LspRange] = None
): CompletionItem =
- val nameEdit = new TextEdit(
- range.getOrElse(editRange),
- insertText
- )
+ val oldText = params.text.substring(completionPos.start, completionPos.end)
+ val editRange = if newText.startsWith(oldText) then completionPos.stripSuffixEditRange
+ else completionPos.toEditRange
+
+ val textEdit = new TextEdit(range.getOrElse(editRange), newText)
+
val item = new CompletionItem(label)
item.setSortText(f"${idx}%05d")
item.setDetail(description)
- item.setFilterText(
- completion.filterText.getOrElse(completion.label)
- )
- item.setTextEdit(nameEdit)
- item.setAdditionalTextEdits(
- (completion.additionalEdits ++ additionalEdits).asJava
- )
+ item.setFilterText(completion.filterText.getOrElse(completion.label))
+ item.setTextEdit(textEdit)
+ item.setAdditionalTextEdits((completion.additionalEdits ++ additionalEdits).asJava)
completion.insertMode.foreach(item.setInsertTextMode)
completion
diff --git a/presentation-compiler/test/dotty/tools/pc/base/BasePCSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BasePCSuite.scala
index 8c040d61575c..eb7a0783d941 100644
--- a/presentation-compiler/test/dotty/tools/pc/base/BasePCSuite.scala
+++ b/presentation-compiler/test/dotty/tools/pc/base/BasePCSuite.scala
@@ -54,7 +54,7 @@ abstract class BasePCSuite extends PcAssertions:
.newInstance("", myclasspath.asJava, scalacOpts.asJava)
protected def config: PresentationCompilerConfig =
- PresentationCompilerConfigImpl().copy(snippetAutoIndent = false, timeoutDelay = if isDebug then 3600 else 5)
+ PresentationCompilerConfigImpl().copy(snippetAutoIndent = false, timeoutDelay = if isDebug then 3600 else 10)
private def inspectDialect(filename: String, code: String) =
val file = tmp.resolve(filename)
diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala
index 213dd7157293..a64a6dfac6a2 100644
--- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala
+++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala
@@ -1500,3 +1500,47 @@ class CompletionSuite extends BaseCompletionSuite:
|""".stripMargin,
)
+ @Test def `prepend-instead-of-replace` =
+ checkEdit(
+ """|object O:
+ | printl@@println()
+ |""".stripMargin,
+ """|object O:
+ | printlnprintln()
+ |""".stripMargin,
+ assertSingleItem = false
+ )
+
+ @Test def `prepend-instead-of-replace-duplicate-word` =
+ checkEdit(
+ """|object O:
+ | println@@println()
+ |""".stripMargin,
+ """|object O:
+ | printlnprintln()
+ |""".stripMargin,
+ assertSingleItem = false
+ )
+
+ @Test def `replace-when-inside` =
+ checkEdit(
+ """|object O:
+ | print@@ln()
+ |""".stripMargin,
+ """|object O:
+ | println()
+ |""".stripMargin,
+ assertSingleItem = false
+ )
+
+ @Test def `replace-exact-same` =
+ checkEdit(
+ """|object O:
+ | println@@()
+ |""".stripMargin,
+ """|object O:
+ | println()
+ |""".stripMargin,
+ assertSingleItem = false
+ )
+
diff --git a/presentation-compiler/test/dotty/tools/pc/tests/edit/PcRenameSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/edit/PcRenameSuite.scala
index 256b0cb1075a..23c81fcf515a 100644
--- a/presentation-compiler/test/dotty/tools/pc/tests/edit/PcRenameSuite.scala
+++ b/presentation-compiler/test/dotty/tools/pc/tests/edit/PcRenameSuite.scala
@@ -478,9 +478,33 @@ class PcRenameSuite extends BasePcRenameSuite:
| def <>(f: Int => Int): Bar = Bar(x.map(f))
|}
|
- |val f =
+ |val f =
| for {
| b <- Bar(List(1,2,3))
| } yield b
- |""".stripMargin,
+ |""".stripMargin
+ )
+
+ @Test def `end-marker` =
+ check(
+ """|def <>(a: Int) =
+ | ???
+ |end <>
+ |""".stripMargin
+ )
+
+ @Test def `end-marker-with-comment` =
+ check(
+ """|def <>(a: Int) =
+ | ???
+ |end /* a comment */ <> /* a comment */
+ |""".stripMargin
+ )
+
+ @Test def `end-marker-wrong` =
+ check(
+ """|def <> =
+ | def bar =
+ | ???
+ | end bar""".stripMargin
)
diff --git a/presentation-compiler/test/dotty/tools/pc/tests/tokens/SemanticTokensSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/tokens/SemanticTokensSuite.scala
index c1a36bc59ed4..9ef153e51da1 100644
--- a/presentation-compiler/test/dotty/tools/pc/tests/tokens/SemanticTokensSuite.scala
+++ b/presentation-compiler/test/dotty/tools/pc/tests/tokens/SemanticTokensSuite.scala
@@ -343,3 +343,11 @@ class SemanticTokensSuite extends BaseSemanticTokensSuite:
|}
|""".stripMargin,
)
+
+ @Test def `end-marker` =
+ check(
+ """|def <>/*method,definition*/ =
+ | 1
+ |end <>/*method,definition*/
+ |""".stripMargin,
+ )
diff --git a/presentation-compiler/test/dotty/tools/pc/utils/DefSymbolCollector.scala b/presentation-compiler/test/dotty/tools/pc/utils/DefSymbolCollector.scala
index 168ccb033423..0171d2a0d76d 100644
--- a/presentation-compiler/test/dotty/tools/pc/utils/DefSymbolCollector.scala
+++ b/presentation-compiler/test/dotty/tools/pc/utils/DefSymbolCollector.scala
@@ -8,6 +8,7 @@ import dotty.tools.dotc.core.Symbols.*
import dotty.tools.dotc.interactive.InteractiveDriver
import dotty.tools.dotc.util.SourcePosition
import dotty.tools.pc.PcCollector
+import dotty.tools.pc.EndMarker
final class DefSymbolCollector(
driver: InteractiveDriver,
@@ -15,7 +16,7 @@ final class DefSymbolCollector(
) extends PcCollector[Option[Symbol]](driver, params):
def collect(parent: Option[Tree])(
- tree: Tree,
+ tree: Tree | EndMarker,
toAdjust: SourcePosition,
sym: Option[Symbol]
): Option[Symbol] =
diff --git a/scaladoc-js/common/src/utils/html.scala b/scaladoc-js/common/src/utils/html.scala
index 1a7f108a3555..0eadf028000f 100644
--- a/scaladoc-js/common/src/utils/html.scala
+++ b/scaladoc-js/common/src/utils/html.scala
@@ -12,8 +12,8 @@ object HTML {
case class Tag[T <: domhtml.Element](private val elemFactory: () => T):
private def textNode(s: String): Text = document.createTextNode(s)
- def apply(tags: TagArg*): T = apply()(tags:_*)
- def apply(first: AttrArg, rest: AttrArg*): T = apply((first +: rest):_*)()
+ def apply(tags: TagArg*): T = apply()(tags*)
+ def apply(first: AttrArg, rest: AttrArg*): T = apply((first +: rest)*)()
def apply(attrs: AttrArg*)(tags: TagArg*): T =
val elem: T = elemFactory()
def unpackTags(tags: TagArg*): Unit = tags.foreach {
@@ -29,8 +29,8 @@ object HTML {
case s: Seq[AppliedAttr] => unpackAttributes(s*)
}
- unpackTags(tags:_*)
- unpackAttributes(attrs:_*)
+ unpackTags(tags*)
+ unpackAttributes(attrs*)
elem
object Tag:
diff --git a/scaladoc/src/dotty/tools/scaladoc/parsers/WikiCodeBlockParser.scala b/scaladoc/src/dotty/tools/scaladoc/parsers/WikiCodeBlockParser.scala
index 4201cae4e2e6..e6ebe0d2cc7a 100644
--- a/scaladoc/src/dotty/tools/scaladoc/parsers/WikiCodeBlockParser.scala
+++ b/scaladoc/src/dotty/tools/scaladoc/parsers/WikiCodeBlockParser.scala
@@ -142,7 +142,7 @@ class WikiCodeBlockParser(
codeBlock.setCharsFromContent
block.appendChild(codeBlock)
} else {
- val codeBlock = new Text(SegmentedSequence.create(segments.asScala.toSeq:_*))
+ val codeBlock = new Text(SegmentedSequence.create(segments.asScala.toSeq*))
block.appendChild(codeBlock)
}
}
diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/HtmlRenderer.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/HtmlRenderer.scala
index 872f8a4f09c9..20f3335a44ef 100644
--- a/scaladoc/src/dotty/tools/scaladoc/renderers/HtmlRenderer.scala
+++ b/scaladoc/src/dotty/tools/scaladoc/renderers/HtmlRenderer.scala
@@ -32,8 +32,8 @@ class HtmlRenderer(rootPackage: Member, members: Map[DRI, Member])(using ctx: Do
case _ => Nil)
:+ (Attr("data-pathToRoot") := pathToRoot(page.link.dri))
- val htmlTag = html(attrs: _*)(
- head((mkHead(page) :+ docHead):_*),
+ val htmlTag = html(attrs*)(
+ head((mkHead(page) :+ docHead)*),
body(
if !page.hasFrame then docBody
else mkFrame(page.link, parents, docBody, toc)
@@ -216,7 +216,7 @@ class HtmlRenderer(rootPackage: Member, members: Map[DRI, Member])(using ctx: Do
a(href := pathToPage(link.dri, b.dri))(b.name),
"/"
)).dropRight(1)
- div(cls := "breadcrumbs container")(innerTags:_*)
+ div(cls := "breadcrumbs container")(innerTags*)
val (apiNavOpt, docsNavOpt): (Option[(Boolean, Seq[AppliedTag])], Option[(Boolean, Seq[AppliedTag])]) = buildNavigation(link)
diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/MemberRenderer.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/MemberRenderer.scala
index 996b422b44fd..612444fd4ffd 100644
--- a/scaladoc/src/dotty/tools/scaladoc/renderers/MemberRenderer.scala
+++ b/scaladoc/src/dotty/tools/scaladoc/renderers/MemberRenderer.scala
@@ -26,7 +26,7 @@ class MemberRenderer(signatureRenderer: SignatureRenderer)(using DocContext) ext
val headNode = m.inheritedFrom.map(form => signatureRenderer.renderLink(form.name, form.dri))
val tailNodes = defs.flatMap(renderDef)
val nodes = headNode.fold(tailNodes.drop(1))(_ +: tailNodes)
- tableRow("Definition Classes", div(nodes:_*))
+ tableRow("Definition Classes", div(nodes*))
case _ => Nil
@@ -250,7 +250,7 @@ class MemberRenderer(signatureRenderer: SignatureRenderer)(using DocContext) ext
val memberInf = memberInfo(member, withBrief = true)
val annots = annotations(member)
- div(topLevelAttr:_*)(
+ div(topLevelAttr*)(
div(cls := "documentableElement-expander")(
Option.when(annots.nonEmpty || originInf.nonEmpty || memberInf.nonEmpty)(button(cls := "icon-button ar show-content")).toList,
annots.map(div(_)).toList,
diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/Resources.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/Resources.scala
index b84c07b4bade..3e49af2e0576 100644
--- a/scaladoc/src/dotty/tools/scaladoc/renderers/Resources.scala
+++ b/scaladoc/src/dotty/tools/scaladoc/renderers/Resources.scala
@@ -20,7 +20,7 @@ enum Resource(val path: String):
trait Resources(using ctx: DocContext) extends Locations, Writer:
private def dynamicJsData =
val str = jsonObject("filterDefaults" -> jsonObject(
- FilterAttributes.defaultValues.toSeq.map { case (n, v) => n -> jsonString(v) }:_*
+ FilterAttributes.defaultValues.toSeq.map { case (n, v) => n -> jsonString(v) }*
))
Resource.Text("scripts/data.js", s"var scaladocData = $str")
diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/SignatureRenderer.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/SignatureRenderer.scala
index 5c6235b14e7d..65c67d3457a5 100644
--- a/scaladoc/src/dotty/tools/scaladoc/renderers/SignatureRenderer.scala
+++ b/scaladoc/src/dotty/tools/scaladoc/renderers/SignatureRenderer.scala
@@ -18,7 +18,7 @@ trait SignatureRenderer:
def renderElement(e: SignaturePart, modifiers: AppliedAttr*): AppliedTag = renderElementWith(e, modifiers*)
def renderLink(name: String, dri: DRI, modifiers: AppliedAttr*) =
- renderLinkContent(name, dri, modifiers:_*)
+ renderLinkContent(name, dri, modifiers*)
def unresolvedLink(content: TagArg, modifiers: AppliedAttr*) =
span(Attr("data-unresolved-link") := "", modifiers)(content)
@@ -26,7 +26,7 @@ trait SignatureRenderer:
def renderLinkContent(content: TagArg, dri: DRI, modifiers: AppliedAttr*) =
link(dri) match
case Some(link) => a(href := link, modifiers)(content)
- case _ => unresolvedLink(content, modifiers:_*)
+ case _ => unresolvedLink(content, modifiers*)
def renderElementWith(e: SignaturePart, modifiers: AppliedAttr*) = e match
case Name(name, dri) =>
@@ -34,7 +34,7 @@ trait SignatureRenderer:
renderLink(name, dri, attrs*)
case Type(name, Some(dri)) =>
val attrs = Seq(Attr("t") := "t") ++ modifiers
- renderLink(name, dri, attrs:_*)
+ renderLink(name, dri, attrs*)
case Type(name, None) => span(Attr("t") := "t")(name)
case Keyword(name) => span(Attr("t") := "k")(name)
case Plain(name) => raw(name)
diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/MarkdownParser.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/MarkdownParser.scala
index edf9051c0ed7..77d57748a2e8 100644
--- a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/MarkdownParser.scala
+++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/MarkdownParser.scala
@@ -38,7 +38,7 @@ object MarkdownParser {
new MutableDataSet()
.setFrom(ParserEmulationProfile.COMMONMARK.getOptions)
- .set(Parser.EXTENSIONS, Arrays.asList(extArray:_*))
+ .set(Parser.EXTENSIONS, Arrays.asList(extArray*))
.set(EmojiExtension.ROOT_IMAGE_PATH,
"https://github.global.ssl.fastly.net/images/icons/emoji/")
.set(WikiLinkExtension.LINK_ESCAPE_CHARS, "")
diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/Preparser.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/Preparser.scala
index 95db8983626a..9fad9e22eeb9 100644
--- a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/Preparser.scala
+++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/Preparser.scala
@@ -130,7 +130,7 @@ object Preparser {
val tagsWithoutDiagram = tags.filterNot(pair => stripTags.contains(pair._1))
val bodyTags: mutable.Map[TagKey, List[String]] =
- mutable.Map((tagsWithoutDiagram).toSeq: _*)
+ mutable.Map((tagsWithoutDiagram).toSeq*)
def allTags(key: SimpleTagKey): List[String] =
(bodyTags remove key).getOrElse(Nil).reverse
diff --git a/scaladoc/src/dotty/tools/scaladoc/util/html.scala b/scaladoc/src/dotty/tools/scaladoc/util/html.scala
index 9d832d28ee0b..f7d99eaf4927 100644
--- a/scaladoc/src/dotty/tools/scaladoc/util/html.scala
+++ b/scaladoc/src/dotty/tools/scaladoc/util/html.scala
@@ -9,8 +9,8 @@ object HTML:
type TagArg = AppliedTag | Seq[AppliedTag] | String | Seq[String]
case class Tag(name: String):
- def apply(tags: TagArg*): AppliedTag = apply()(tags:_*)
- def apply(first: AttrArg, rest: AttrArg*): AppliedTag = apply((first +: rest):_*)()
+ def apply(tags: TagArg*): AppliedTag = apply()(tags*)
+ def apply(first: AttrArg, rest: AttrArg*): AppliedTag = apply((first +: rest)*)()
def apply(attrs: AttrArg*)(tags: TagArg*): AppliedTag =
def unpackTags(tags: TagArg*)(using sb: StringBuilder): StringBuilder =
tags.foreach {
@@ -19,7 +19,7 @@ object HTML:
case s: String =>
sb.append(s.escapeReservedTokens)
case s: Seq[AppliedTag | String] =>
- unpackTags(s:_*)
+ unpackTags(s*)
}
sb
val sb = StringBuilder()
@@ -31,7 +31,7 @@ object HTML:
sb.append(" ").append(e)
}
sb.append(">")
- unpackTags(tags:_*)(using sb)
+ unpackTags(tags*)(using sb)
sb.append(s"$name>")
sb
diff --git a/scaladoc/test/dotty/tools/scaladoc/site/NavigationTest.scala b/scaladoc/test/dotty/tools/scaladoc/site/NavigationTest.scala
index f137e9e6b13e..fe822df5f8a0 100644
--- a/scaladoc/test/dotty/tools/scaladoc/site/NavigationTest.scala
+++ b/scaladoc/test/dotty/tools/scaladoc/site/NavigationTest.scala
@@ -11,8 +11,8 @@ class NavigationTest extends BaseHtmlTest:
withHtmlFile(page){ content =>
def test(query: String, el: Seq[NavMenuTestEntry]) =
- content.assertTextsIn(query, el.map(_.name):_*)
- content.assertAttr(query,"href", el.map(_.link):_*)
+ content.assertTextsIn(query, el.map(_.name)*)
+ content.assertAttr(query,"href", el.map(_.link)*)
test(".side-menu>div>span>a", topLevel)
test(".side-menu>div>div>span>a", topLevel.flatMap(_.nested))
diff --git a/scaladoc/test/dotty/tools/scaladoc/site/SiteGeneratationTest.scala b/scaladoc/test/dotty/tools/scaladoc/site/SiteGeneratationTest.scala
index e012044156cc..4d558fe492c9 100644
--- a/scaladoc/test/dotty/tools/scaladoc/site/SiteGeneratationTest.scala
+++ b/scaladoc/test/dotty/tools/scaladoc/site/SiteGeneratationTest.scala
@@ -27,7 +27,7 @@ class SiteGeneratationTest extends BaseHtmlTest:
content.assertTextsIn(".projectVersion", projectVersion)
content.assertTextsIn("h1", header)
content.assertTextsIn("title", title)
- content.assertTextsIn(".breadcrumbs a", (parents :+ title):_*)
+ content.assertTextsIn(".breadcrumbs a", (parents :+ title)*)
checks(content)
}
diff --git a/scaladoc/test/dotty/tools/scaladoc/tasty/comments/IntegrationTest.scala b/scaladoc/test/dotty/tools/scaladoc/tasty/comments/IntegrationTest.scala
index 71f4b42f3320..690c4ba166f5 100644
--- a/scaladoc/test/dotty/tools/scaladoc/tasty/comments/IntegrationTest.scala
+++ b/scaladoc/test/dotty/tools/scaladoc/tasty/comments/IntegrationTest.scala
@@ -8,7 +8,7 @@ abstract class BaseIntegrationTest(pck: String) extends BaseHtmlTest:
@Test
def testLinks: Unit = withGeneratedDoc(pcks = Seq(pck, "commonlinks")) {
def checkDocLinks(links: String*)(ctx: DocumentContext): Unit =
- ctx.assertAttr(".documentableBrief a, .cover a", "href", links:_*)
+ ctx.assertAttr(".documentableBrief a, .cover a", "href", links*)
ctx.assertNotExists("unresolvedLinkSelector")
def checkUnresolved(ctx: DocumentContext): Unit =
diff --git a/scaladoc/test/dotty/tools/scaladoc/tasty/comments/QueryParserTests.scala b/scaladoc/test/dotty/tools/scaladoc/tasty/comments/QueryParserTests.scala
index 0d33a9363bac..103e95359714 100644
--- a/scaladoc/test/dotty/tools/scaladoc/tasty/comments/QueryParserTests.scala
+++ b/scaladoc/test/dotty/tools/scaladoc/tasty/comments/QueryParserTests.scala
@@ -13,8 +13,8 @@ class QueryParserTests {
val head = shorthand.head
val tail = shorthand.tail
head match {
- case ((id: String), ch) => Query.QualifiedId(Query.Qual.Id(id), ch, l2q(tail : _*)(last))
- case ((qual: Qual), ch) => Query.QualifiedId(qual, ch, l2q(tail : _*)(last))
+ case ((id: String), ch) => Query.QualifiedId(Query.Qual.Id(id), ch, l2q(tail*)(last))
+ case ((qual: Qual), ch) => Query.QualifiedId(qual, ch, l2q(tail*)(last))
}
}
}
diff --git a/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala b/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala
index c0ed5dbd58fa..db07666d3be1 100644
--- a/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala
+++ b/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala
@@ -3,6 +3,7 @@ package dotty.tools.tasty
import java.util.UUID
import TastyFormat.{MajorVersion, MinorVersion, ExperimentalVersion, header}
+import TastyHeaderUnpickler.TastyVersion
/**
* The Tasty Header consists of four fields:
@@ -27,12 +28,67 @@ sealed abstract case class TastyHeader(
toolingVersion: String
)
-class TastyHeaderUnpickler(reader: TastyReader) {
+trait UnpicklerConfig {
+ /** The TASTy major version that this reader supports */
+ def majorVersion: Int
+ /** The TASTy minor version that this reader supports */
+ def minorVersion: Int
+ /** The TASTy experimental version that this reader supports */
+ def experimentalVersion: Int
+ /** The description of the upgraded tool that can read the given TASTy version */
+ def upgradedReaderTool(version: TastyVersion): String
+ /** The description of the upgraded tool that can produce the given TASTy version */
+ def upgradedProducerTool(version: TastyVersion): String
+ /** Additional information to help a user fix the outdated TASTy problem */
+ def recompileAdditionalInfo: String
+ /** Additional information to help a user fix the more recent TASTy problem */
+ def upgradeAdditionalInfo(fileVersion: TastyVersion): String
+}
+
+object UnpicklerConfig {
+
+ /** A config where its major, minor and experimental versions are fixed to those in TastyFormat */
+ trait DefaultTastyVersion extends UnpicklerConfig {
+ override final def majorVersion: Int = MajorVersion
+ override final def minorVersion: Int = MinorVersion
+ override final def experimentalVersion: Int = ExperimentalVersion
+ }
+
+ trait Generic extends UnpicklerConfig {
+ final def upgradedProducerTool(version: TastyVersion): String =
+ "a later version"
+
+ final def upgradedReaderTool(version: TastyVersion): String =
+ if (version.isExperimental) s"the version of this tool compatible with TASTy ${version.show}"
+ else s"a newer version of this tool compatible with TASTy ${version.show}"
+
+ final def recompileAdditionalInfo: String = """
+ | Usually this means that the classpath entry of this file should be updated.""".stripMargin
+
+ final def upgradeAdditionalInfo(fileVersion: TastyVersion): String =
+ if (fileVersion.isExperimental && experimentalVersion == 0) {
+ """
+ | Note that this tool does not support reading experimental TASTy.""".stripMargin
+ }
+ else ""
+ }
+
+ /** A config for the TASTy reader of a generic tool */
+ val generic: UnpicklerConfig = new UnpicklerConfig with Generic with DefaultTastyVersion {}
+}
+
+class TastyHeaderUnpickler(config: UnpicklerConfig, reader: TastyReader) {
import TastyHeaderUnpickler._
import reader._
+ def this(config: UnpicklerConfig, bytes: Array[Byte]) = this(config, new TastyReader(bytes))
+ def this(reader: TastyReader) = this(UnpicklerConfig.generic, reader)
def this(bytes: Array[Byte]) = this(new TastyReader(bytes))
+ private val toolMajor: Int = config.majorVersion
+ private val toolMinor: Int = config.minorVersion
+ private val toolExperimental: Int = config.experimentalVersion
+
/** reads and verifies the TASTy version, extracting the UUID */
def readHeader(): UUID =
readFullHeader().uuid
@@ -45,8 +101,11 @@ class TastyHeaderUnpickler(reader: TastyReader) {
val fileMajor = readNat()
if (fileMajor <= 27) { // old behavior before `tasty-core` 3.0.0-M4
val fileMinor = readNat()
- val signature = signatureString(fileMajor, fileMinor, 0)
- throw new UnpickleException(signature + backIncompatAddendum + toolingAddendum)
+ val fileVersion = TastyVersion(fileMajor, fileMinor, 0)
+ val toolVersion = TastyVersion(toolMajor, toolMinor, toolExperimental)
+ val signature = signatureString(fileVersion, toolVersion, what = "Backward", tool = None)
+ val fix = recompileFix(toolVersion.minStable)
+ throw new UnpickleException(signature + fix + tastyAddendum)
}
else {
val fileMinor = readNat()
@@ -63,20 +122,38 @@ class TastyHeaderUnpickler(reader: TastyReader) {
fileMajor = fileMajor,
fileMinor = fileMinor,
fileExperimental = fileExperimental,
- compilerMajor = MajorVersion,
- compilerMinor = MinorVersion,
- compilerExperimental = ExperimentalVersion
+ compilerMajor = toolMajor,
+ compilerMinor = toolMinor,
+ compilerExperimental = toolExperimental
)
check(validVersion, {
- val signature = signatureString(fileMajor, fileMinor, fileExperimental)
- val producedByAddendum = s"\nThe TASTy file was produced by $toolingVersion.$toolingAddendum"
- val msg = (
- if (fileExperimental != 0) unstableAddendum
- else if (fileMajor < MajorVersion) backIncompatAddendum
- else forwardIncompatAddendum
+ // failure means that the TASTy file cannot be read, therefore it is either:
+ // - backwards incompatible major, in which case the library should be recompiled by the minimum stable minor
+ // version supported by this compiler
+ // - any experimental in an older minor, in which case the library should be recompiled by the stable
+ // compiler in the same minor.
+ // - older experimental in the same minor, in which case the compiler is also experimental, and the library
+ // should be recompiled by the current compiler
+ // - forward incompatible, in which case the compiler must be upgraded to the same version as the file.
+ val fileVersion = TastyVersion(fileMajor, fileMinor, fileExperimental)
+ val toolVersion = TastyVersion(toolMajor, toolMinor, toolExperimental)
+
+ val compat = Compatibility.failReason(file = fileVersion, read = toolVersion)
+
+ val what = if (compat < 0) "Backward" else "Forward"
+ val signature = signatureString(fileVersion, toolVersion, what, tool = Some(toolingVersion))
+ val fix = (
+ if (compat < 0) {
+ val newCompiler =
+ if (compat == Compatibility.BackwardIncompatibleMajor) toolVersion.minStable
+ else if (compat == Compatibility.BackwardIncompatibleExperimental) fileVersion.nextStable
+ else toolVersion // recompile the experimental library with the current experimental compiler
+ recompileFix(newCompiler)
+ }
+ else upgradeFix(fileVersion)
)
- signature + msg + producedByAddendum
+ signature + fix + tastyAddendum
})
val uuid = new UUID(readUncompressedLong(), readUncompressedLong())
@@ -89,40 +166,75 @@ class TastyHeaderUnpickler(reader: TastyReader) {
private def check(cond: Boolean, msg: => String): Unit = {
if (!cond) throw new UnpickleException(msg)
}
+
+ private def signatureString(
+ fileVersion: TastyVersion, toolVersion: TastyVersion, what: String, tool: Option[String]) = {
+ val optProducedBy = tool.fold("")(t => s", produced by $t")
+ s"""$what incompatible TASTy file has version ${fileVersion.show}$optProducedBy,
+ | expected ${toolVersion.validRange}.
+ |""".stripMargin
+ }
+
+ private def recompileFix(producerVersion: TastyVersion) = {
+ val addendum = config.recompileAdditionalInfo
+ val newTool = config.upgradedProducerTool(producerVersion)
+ s""" The source of this file should be recompiled by $newTool.$addendum""".stripMargin
+ }
+
+ private def upgradeFix(fileVersion: TastyVersion) = {
+ val addendum = config.upgradeAdditionalInfo(fileVersion)
+ val newTool = config.upgradedReaderTool(fileVersion)
+ s""" To read this ${fileVersion.kind} file, use $newTool.$addendum""".stripMargin
+ }
+
+ private def tastyAddendum: String = """
+ | Please refer to the documentation for information on TASTy versioning:
+ | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin
}
object TastyHeaderUnpickler {
- private def toolingAddendum = (
- if (ExperimentalVersion > 0)
- "\nNote that your tooling is currently using an unstable TASTy version."
- else
- ""
- )
-
- private def signatureString(fileMajor: Int, fileMinor: Int, fileExperimental: Int) = {
- def showMinorVersion(min: Int, exp: Int) = {
- val expStr = if (exp == 0) "" else s" [unstable release: $exp]"
- s"$min$expStr"
- }
- val minorVersion = showMinorVersion(MinorVersion, ExperimentalVersion)
- val fileMinorVersion = showMinorVersion(fileMinor, fileExperimental)
- s"""TASTy signature has wrong version.
- | expected: {majorVersion: $MajorVersion, minorVersion: $minorVersion}
- | found : {majorVersion: $fileMajor, minorVersion: $fileMinorVersion}
- |
- |""".stripMargin
+ private object Compatibility {
+ final val BackwardIncompatibleMajor = -3
+ final val BackwardIncompatibleExperimental = -2
+ final val ExperimentalRecompile = -1
+ final val ExperimentalUpgrade = 1
+ final val ForwardIncompatible = 2
+
+ /** Given that file can't be read, extract the reason */
+ def failReason(file: TastyVersion, read: TastyVersion): Int =
+ if (file.major == read.major && file.minor == read.minor && file.isExperimental && read.isExperimental) {
+ if (file.experimental < read.experimental) ExperimentalRecompile // recompile library as compiler is too new
+ else ExperimentalUpgrade // they should upgrade compiler as library is too new
+ }
+ else if (file.major < read.major)
+ BackwardIncompatibleMajor // pre 3.0.0
+ else if (file.isExperimental && file.major == read.major && file.minor <= read.minor)
+ // e.g. 3.4.0 reading 3.4.0-RC1-NIGHTLY, or 3.3.0 reading 3.0.2-RC1-NIGHTLY
+ BackwardIncompatibleExperimental
+ else ForwardIncompatible
}
- private def unstableAddendum =
- """This TASTy file was produced by an unstable release.
- |To read this TASTy file, your tooling must be at the same version.""".stripMargin
+ case class TastyVersion(major: Int, minor: Int, experimental: Int) {
+ def isExperimental: Boolean = experimental > 0
+
+ def nextStable: TastyVersion = copy(experimental = 0)
- private def backIncompatAddendum =
- """This TASTy file was produced by an earlier release that is not supported anymore.
- |Please recompile this TASTy with a later version.""".stripMargin
+ def minStable: TastyVersion = copy(minor = 0, experimental = 0)
+
+ def show: String = {
+ val suffix = if (isExperimental) s"-experimental-$experimental" else ""
+ s"$major.$minor$suffix"
+ }
- private def forwardIncompatAddendum =
- """This TASTy file was produced by a more recent, forwards incompatible release.
- |To read this TASTy file, please upgrade your tooling.""".stripMargin
+ def kind: String =
+ if (isExperimental) "experimental TASTy" else "TASTy"
+
+ def validRange: String = {
+ val min = TastyVersion(major, 0, 0)
+ val max = if (experimental == 0) this else TastyVersion(major, minor - 1, 0)
+ val extra = Option.when(experimental > 0)(this)
+ s"stable TASTy from ${min.show} to ${max.show}${extra.fold("")(e => s", or exactly ${e.show}")}"
+ }
+ }
}
diff --git a/tasty/test/dotty/tools/tasty/TastyHeaderUnpicklerTest.scala b/tasty/test/dotty/tools/tasty/TastyHeaderUnpicklerTest.scala
deleted file mode 100644
index 9f54c4b3061b..000000000000
--- a/tasty/test/dotty/tools/tasty/TastyHeaderUnpicklerTest.scala
+++ /dev/null
@@ -1,84 +0,0 @@
-package dotty.tools.tasty
-
-import org.junit.Assert._
-import org.junit.{Test, Ignore}
-
-import TastyFormat._
-import TastyBuffer._
-
-@Ignore // comment if you want to experiment with error messages
-class TastyHeaderUnpicklerTest {
-
- import TastyHeaderUnpicklerTest._
-
- @Test def vanilla: Unit = {
- runTest(MajorVersion, MinorVersion, ExperimentalVersion, "Scala 3.0.0-M4-bin-SNAPSHOT-git-12345")
- }
-
- @Test def failBumpExperimental: Unit = {
- (runTest(MajorVersion, MinorVersion, ExperimentalVersion + 1, "Scala 3.0.0-M4-bin-SNAPSHOT-git-12345"))
- }
-
- @Test def failBumpMinor: Unit = {
- (runTest(MajorVersion, MinorVersion + 1, ExperimentalVersion, "Scala 3.1.0-RC1"))
- }
-
- @Test def failBumpMajor: Unit = {
- (runTest(MajorVersion + 1, MinorVersion, ExperimentalVersion, "Scala 4.0.0-M1"))
- }
-
- @Test def failBumpMajorFinal: Unit = {
- (runTest(MajorVersion + 1, MinorVersion, 0, "Scala 4.0.0"))
- }
-
- @Test def okSubtractExperimental: Unit = {
- (runTest(MajorVersion, MinorVersion, ExperimentalVersion - 1, "Scala 3.0.0"))
- }
-
- @Test def okSubtractMinor: Unit = {
- (runTest(MajorVersion, MinorVersion - 1, ExperimentalVersion, "Scala 3.0.0-M4-bin-SNAPSHOT-git-12345"))
- }
-
- @Test def failSubtractMajor: Unit = {
- (runTest(MajorVersion - 1, MinorVersion, ExperimentalVersion, "Scala 3.0.0-M4-bin-SNAPSHOT-git-12345"))
- }
-
-}
-
-object TastyHeaderUnpicklerTest {
-
-
- def fillHeader(maj: Int, min: Int, exp: Int, compiler: String): TastyBuffer = {
- val compilerBytes = compiler.getBytes(java.nio.charset.StandardCharsets.UTF_8)
- val buf = new TastyBuffer(header.length + 32 + compilerBytes.length)
- for (ch <- header) buf.writeByte(ch.toByte)
- buf.writeNat(maj)
- buf.writeNat(min)
- buf.writeNat(exp)
- buf.writeNat(compilerBytes.length)
- buf.writeBytes(compilerBytes, compilerBytes.length)
- buf.writeUncompressedLong(237478L)
- buf.writeUncompressedLong(324789L)
- buf
- }
-
- def runTest(maj: Int, min: Int, exp: Int, compiler: String): Unit = {
- val headerBuffer = fillHeader(maj, min, exp, compiler)
- val bs = headerBuffer.bytes.clone
-
- val hr = new TastyHeaderUnpickler(bs)
-
- hr.readFullHeader()
- }
-
- def expectUnpickleError(op: => Unit) = {
- try {
- op
- fail()
- }
- catch {
- case err: UnpickleException => ()
- }
- }
-
-}
diff --git a/tests/init-global/pos/i18629.scala b/tests/init-global/pos/i18629.scala
new file mode 100644
index 000000000000..f97c21ee918d
--- /dev/null
+++ b/tests/init-global/pos/i18629.scala
@@ -0,0 +1,6 @@
+object Foo {
+ val bar = List() match {
+ case List() => ???
+ case _ => ???
+ }
+}
diff --git a/tests/init/pos/i18407/base_0.scala b/tests/init/pos/i18407/base_0.scala
new file mode 100644
index 000000000000..892ff56e2ab1
--- /dev/null
+++ b/tests/init/pos/i18407/base_0.scala
@@ -0,0 +1,4 @@
+// base_0.scala
+trait BaseTest extends AnyFreeSpecLike {
+ "empty-test" - {} // ok if we comment out this line
+}
diff --git a/tests/init/pos/i18407/macros_0.scala b/tests/init/pos/i18407/macros_0.scala
new file mode 100644
index 000000000000..83a5cb7a81c2
--- /dev/null
+++ b/tests/init/pos/i18407/macros_0.scala
@@ -0,0 +1,37 @@
+// macros_0.scala
+object source {
+ import scala.quoted._
+
+ class Position()
+
+ object Position {
+ def withPosition[T](
+ fun: Expr[Position => T]
+ )(using quotes: Quotes, typeOfT: Type[T]): Expr[T] = {
+ '{
+ ${ fun }.apply(new source.Position())
+ }
+ }
+ }
+}
+
+trait AnyFreeSpecLike {
+ import scala.language.implicitConversions
+
+ protected final class FreeSpecStringWrapper(
+ string: String,
+ pos: source.Position
+ ) {
+ def -(fun: => Unit): Unit = fun
+ }
+
+ inline implicit def convertToFreeSpecStringWrapper(
+ s: String
+ ): FreeSpecStringWrapper = {
+ ${
+ source.Position.withPosition[FreeSpecStringWrapper]('{
+ (pos: source.Position) => new FreeSpecStringWrapper(s, pos)
+ })
+ }
+ }
+}
diff --git a/tests/init/pos/i18407/test_1.scala b/tests/init/pos/i18407/test_1.scala
new file mode 100644
index 000000000000..d3050da180b1
--- /dev/null
+++ b/tests/init/pos/i18407/test_1.scala
@@ -0,0 +1,4 @@
+class MyTest extends BaseTest {
+ "empty-test" - {}
+ private val myObject = new {}
+}
diff --git a/tests/neg-custom-args/captures/buffers.check b/tests/neg-custom-args/captures/buffers.check
new file mode 100644
index 000000000000..07acea3c48e3
--- /dev/null
+++ b/tests/neg-custom-args/captures/buffers.check
@@ -0,0 +1,26 @@
+-- Error: tests/neg-custom-args/captures/buffers.scala:11:6 ------------------------------------------------------------
+11 | var elems: Array[A] = new Array[A](10) // error // error
+ | ^
+ | Mutable variable elems cannot have type Array[A] since
+ | that type refers to the type variable A, which is not sealed.
+-- Error: tests/neg-custom-args/captures/buffers.scala:16:38 -----------------------------------------------------------
+16 | def make[A: ClassTag](xs: A*) = new ArrayBuffer: // error
+ | ^^^^^^^^^^^
+ | Sealed type variable A cannot be instantiated to box A^? since
+ | that type refers to the type variable A, which is not sealed.
+ | This is often caused by a local capability in an argument of constructor ArrayBuffer
+ | leaking as part of its result.
+-- Error: tests/neg-custom-args/captures/buffers.scala:11:13 -----------------------------------------------------------
+11 | var elems: Array[A] = new Array[A](10) // error // error
+ | ^^^^^^^^
+ | Array cannot have element type A since
+ | that type variable is not sealed.
+ | Since arrays are mutable, they have to be treated like variables,
+ | so their element type must be sealed.
+-- Error: tests/neg-custom-args/captures/buffers.scala:22:9 ------------------------------------------------------------
+22 | val x: Array[A] = new Array[A](10) // error
+ | ^^^^^^^^
+ | Array cannot have element type A since
+ | that type variable is not sealed.
+ | Since arrays are mutable, they have to be treated like variables,
+ | so their element type must be sealed.
diff --git a/tests/neg-custom-args/captures/buffers.scala b/tests/neg-custom-args/captures/buffers.scala
new file mode 100644
index 000000000000..760ddab96ae5
--- /dev/null
+++ b/tests/neg-custom-args/captures/buffers.scala
@@ -0,0 +1,30 @@
+import reflect.ClassTag
+
+class Buffer[A]
+
+class ArrayBuffer[sealed A: ClassTag] extends Buffer[A]:
+ var elems: Array[A] = new Array[A](10)
+ def add(x: A): this.type = ???
+ def at(i: Int): A = ???
+
+class ArrayBufferBAD[A: ClassTag] extends Buffer[A]:
+ var elems: Array[A] = new Array[A](10) // error // error
+ def add(x: A): this.type = ???
+ def at(i: Int): A = ???
+
+object ArrayBuffer:
+ def make[A: ClassTag](xs: A*) = new ArrayBuffer: // error
+ elems = xs.toArray
+ def apply[sealed A: ClassTag](xs: A*) = new ArrayBuffer:
+ elems = xs.toArray // ok
+
+class EncapsArray[A: ClassTag]:
+ val x: Array[A] = new Array[A](10) // error
+
+
+
+
+
+
+
+
diff --git a/tests/neg-custom-args/captures/cc-this.check b/tests/neg-custom-args/captures/cc-this.check
index 335302c5c259..070e815d6d45 100644
--- a/tests/neg-custom-args/captures/cc-this.check
+++ b/tests/neg-custom-args/captures/cc-this.check
@@ -12,4 +12,4 @@
-- Error: tests/neg-custom-args/captures/cc-this.scala:17:8 ------------------------------------------------------------
17 | class C4(val f: () => Int) extends C3 // error
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- | reference (C4.this.f : () => Int) is not included in the allowed capture set {} of pure base class class C3
+ |reference (C4.this.f : () => Int) captured by this self type is not included in the allowed capture set {} of pure base class class C3
diff --git a/tests/neg-custom-args/captures/cc-this2.check b/tests/neg-custom-args/captures/cc-this2.check
index 5e43a45b67f5..bd9a1085d262 100644
--- a/tests/neg-custom-args/captures/cc-this2.check
+++ b/tests/neg-custom-args/captures/cc-this2.check
@@ -1,6 +1,12 @@
--- Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:2:6 --------------------------------------------------------
+-- Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:3:8 --------------------------------------------------------
+3 | this: D^ => // error
+ | ^^
+ |reference (caps.cap : caps.Cap) captured by this self type is not included in the allowed capture set {} of pure base class class C
+-- [E058] Type Mismatch Error: tests/neg-custom-args/captures/cc-this2/D_2.scala:2:6 -----------------------------------
2 |class D extends C: // error
- |^
- |reference (caps.cap : caps.Cap) is not included in the allowed capture set {} of pure base class class C
-3 | this: D^ =>
+ | ^
+ | illegal inheritance: self type D^ of class D does not conform to self type C
+ | of parent class C
+ |
+ | longer explanation available when compiling with `-explain`
diff --git a/tests/neg-custom-args/captures/cc-this2/D_2.scala b/tests/neg-custom-args/captures/cc-this2/D_2.scala
index b22e5e456092..de1a722f73a9 100644
--- a/tests/neg-custom-args/captures/cc-this2/D_2.scala
+++ b/tests/neg-custom-args/captures/cc-this2/D_2.scala
@@ -1,3 +1,3 @@
class D extends C: // error
- this: D^ =>
+ this: D^ => // error
diff --git a/tests/neg-custom-args/captures/exception-definitions.check b/tests/neg-custom-args/captures/exception-definitions.check
index 16d623e64f7c..72b88f252e59 100644
--- a/tests/neg-custom-args/captures/exception-definitions.check
+++ b/tests/neg-custom-args/captures/exception-definitions.check
@@ -1,13 +1,12 @@
--- Error: tests/neg-custom-args/captures/exception-definitions.scala:2:6 -----------------------------------------------
-2 |class Err extends Exception: // error
- |^
- |reference (caps.cap : caps.Cap) is not included in the allowed capture set {} of pure base class class Throwable
-3 | self: Err^ =>
+-- Error: tests/neg-custom-args/captures/exception-definitions.scala:3:8 -----------------------------------------------
+3 | self: Err^ => // error
+ | ^^^^
+ |reference (caps.cap : caps.Cap) captured by this self type is not included in the allowed capture set {} of pure base class class Throwable
-- Error: tests/neg-custom-args/captures/exception-definitions.scala:7:12 ----------------------------------------------
7 | val x = c // error
| ^
- |(c : Any^) cannot be referenced here; it is not included in the allowed capture set {} of pure base class class Throwable
--- Error: tests/neg-custom-args/captures/exception-definitions.scala:8:8 -----------------------------------------------
+ |(c : Any^) cannot be referenced here; it is not included in the allowed capture set {} of the self type of class Err2
+-- Error: tests/neg-custom-args/captures/exception-definitions.scala:8:13 ----------------------------------------------
8 | class Err3(c: Any^) extends Exception // error
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- | reference (Err3.this.c : Any^) is not included in the allowed capture set {} of pure base class class Throwable
+ | ^
+ | reference (Err3.this.c : Any^) is not included in the allowed capture set {} of the self type of class Err3
diff --git a/tests/neg-custom-args/captures/exception-definitions.scala b/tests/neg-custom-args/captures/exception-definitions.scala
index a19b751825b8..fbc9f3fd1d33 100644
--- a/tests/neg-custom-args/captures/exception-definitions.scala
+++ b/tests/neg-custom-args/captures/exception-definitions.scala
@@ -1,6 +1,6 @@
-class Err extends Exception: // error
- self: Err^ =>
+class Err extends Exception:
+ self: Err^ => // error
def test(c: Any^) =
class Err2 extends Exception:
diff --git a/tests/neg-custom-args/captures/filevar.scala b/tests/neg-custom-args/captures/filevar.scala
index c8280e2ff3b7..34588617c0b8 100644
--- a/tests/neg-custom-args/captures/filevar.scala
+++ b/tests/neg-custom-args/captures/filevar.scala
@@ -5,7 +5,7 @@ class File:
def write(x: String): Unit = ???
class Service:
- var file: File^{cap[Service]} = uninitialized
+ var file: File^{cap[Service]} = uninitialized // error
def log = file.write("log")
def withFile[T](op: (l: caps.Cap) ?-> (f: File^{l}) => T): T =
diff --git a/tests/neg-custom-args/captures/leaked-curried.check b/tests/neg-custom-args/captures/leaked-curried.check
index c23d1516acf5..3f0a9800a4ec 100644
--- a/tests/neg-custom-args/captures/leaked-curried.check
+++ b/tests/neg-custom-args/captures/leaked-curried.check
@@ -2,10 +2,7 @@
14 | () => () => io // error
| ^^
|(io : Cap^) cannot be referenced here; it is not included in the allowed capture set {} of the self type of class Fuzz
--- [E058] Type Mismatch Error: tests/neg-custom-args/captures/leaked-curried.scala:15:10 -------------------------------
-15 | class Foo extends Box, Pure: // error
- | ^
- | illegal inheritance: self type Foo^{io} of class Foo does not conform to self type Pure
- | of parent trait Pure
- |
- | longer explanation available when compiling with `-explain`
+-- Error: tests/neg-custom-args/captures/leaked-curried.scala:17:20 ----------------------------------------------------
+17 | () => () => io // error
+ | ^^
+ |(io : Cap^) cannot be referenced here; it is not included in the allowed capture set {} of the self type of class Foo
diff --git a/tests/neg-custom-args/captures/leaked-curried.scala b/tests/neg-custom-args/captures/leaked-curried.scala
index a7c48219b450..f9238259e065 100644
--- a/tests/neg-custom-args/captures/leaked-curried.scala
+++ b/tests/neg-custom-args/captures/leaked-curried.scala
@@ -12,8 +12,8 @@ def main(): Unit =
self =>
val get: () ->{} () ->{io} Cap^ =
() => () => io // error
- class Foo extends Box, Pure: // error
+ class Foo extends Box, Pure:
val get: () ->{} () ->{io} Cap^ =
- () => () => io
+ () => () => io // error
new Foo
val bad = leaked.get()().use() // using a leaked capability
diff --git a/tests/neg-custom-args/captures/levels.check b/tests/neg-custom-args/captures/levels.check
index f91f90fb652f..c0cc7f0a759c 100644
--- a/tests/neg-custom-args/captures/levels.check
+++ b/tests/neg-custom-args/captures/levels.check
@@ -1,8 +1,8 @@
-- Error: tests/neg-custom-args/captures/levels.scala:6:16 -------------------------------------------------------------
6 | private var v: T = init // error
| ^
- | mutable variable v cannot have type T since
- | that type refers to the type variable T, which is not sealed.
+ | Mutable variable v cannot have type T since
+ | that type variable is not sealed.
-- Error: tests/neg-custom-args/captures/levels.scala:17:13 ------------------------------------------------------------
17 | val _ = Ref[String => String]((x: String) => x) // error
| ^^^^^^^^^^^^^^^^^^^^^
diff --git a/tests/neg-custom-args/captures/localcaps.check b/tests/neg-custom-args/captures/localcaps.check
new file mode 100644
index 000000000000..b09702749d10
--- /dev/null
+++ b/tests/neg-custom-args/captures/localcaps.check
@@ -0,0 +1,12 @@
+-- Error: tests/neg-custom-args/captures/localcaps.scala:4:12 ----------------------------------------------------------
+4 | def x: C^{cap[d]} = ??? // error
+ | ^^^^^^
+ | `d` does not name an outer definition that represents a capture level
+-- Error: tests/neg-custom-args/captures/localcaps.scala:9:47 ----------------------------------------------------------
+9 | private val z2 = identity((x: Int) => (c: C^{cap[z2]}) => x) // error
+ | ^^^^^^^
+ | `z2` does not name an outer definition that represents a capture level
+-- Error: tests/neg-custom-args/captures/localcaps.scala:6:6 -----------------------------------------------------------
+6 | def y: C^{cap[C]} = ??? // error
+ | ^
+ | local root (cap[C] : caps.Cap) cannot appear in type of class C
diff --git a/tests/neg-custom-args/captures/localcaps.scala b/tests/neg-custom-args/captures/localcaps.scala
index f5227bfef96b..049a1ee0d775 100644
--- a/tests/neg-custom-args/captures/localcaps.scala
+++ b/tests/neg-custom-args/captures/localcaps.scala
@@ -3,7 +3,7 @@ class C:
def x: C^{cap[d]} = ??? // error
- def y: C^{cap[C]} = ??? // ok
+ def y: C^{cap[C]} = ??? // error
private val z = (c0: caps.Cap) => (x: Int) => (c: C^{cap[C]}) => x // ok
private val z2 = identity((x: Int) => (c: C^{cap[z2]}) => x) // error
diff --git a/tests/neg-custom-args/captures/pairs.check b/tests/neg-custom-args/captures/pairs.check
index 38712469879f..9d1b3a76e164 100644
--- a/tests/neg-custom-args/captures/pairs.check
+++ b/tests/neg-custom-args/captures/pairs.check
@@ -12,3 +12,11 @@
| Required: Cap^ ->{d} Unit
|
| longer explanation available when compiling with `-explain`
+-- Error: tests/neg-custom-args/captures/pairs.scala:6:8 ---------------------------------------------------------------
+6 | def fst: Cap^{cap[Pair]} ->{x} Unit = x // error
+ | ^
+ | local root (cap[Pair] : caps.Cap) cannot appear in type of class Pair
+-- Error: tests/neg-custom-args/captures/pairs.scala:7:8 ---------------------------------------------------------------
+7 | def snd: Cap^{cap[Pair]} ->{y} Unit = y // error
+ | ^
+ | local root (cap[Pair] : caps.Cap) cannot appear in type of class Pair
diff --git a/tests/neg-custom-args/captures/pairs.scala b/tests/neg-custom-args/captures/pairs.scala
index 4fc495d60f95..99b27639f729 100644
--- a/tests/neg-custom-args/captures/pairs.scala
+++ b/tests/neg-custom-args/captures/pairs.scala
@@ -3,8 +3,8 @@
object Monomorphic2:
class Pair(x: Cap => Unit, y: Cap => Unit):
- def fst: Cap^{cap[Pair]} ->{x} Unit = x
- def snd: Cap^{cap[Pair]} ->{y} Unit = y
+ def fst: Cap^{cap[Pair]} ->{x} Unit = x // error
+ def snd: Cap^{cap[Pair]} ->{y} Unit = y // error
def test(c: Cap, d: Cap) =
def f(x: Cap): Unit = if c == x then ()
diff --git a/tests/neg-custom-args/captures/recursive-leaking-local-cap.scala b/tests/neg-custom-args/captures/recursive-leaking-local-cap.scala
new file mode 100644
index 000000000000..0daecafbf9d0
--- /dev/null
+++ b/tests/neg-custom-args/captures/recursive-leaking-local-cap.scala
@@ -0,0 +1,22 @@
+import language.experimental.captureChecking
+trait Cap:
+ def use: Int = 42
+
+def usingCap[sealed T](op: Cap^ => T): T = ???
+
+def badTest(): Unit =
+ def bad(b: Boolean)(c: Cap^): Cap^{cap[bad]} = // error
+ if b then c
+ else
+ val leaked = usingCap[Cap^{cap[bad]}](bad(true))
+ leaked.use // boom
+ c
+
+ usingCap[Unit]: c0 =>
+ bad(false)(c0)
+
+class Bad:
+ def foo: Cap^{cap[Bad]} = ??? // error
+ private def bar: Cap^{cap[Bad]} = ??? // ok
+
+
diff --git a/tests/neg-custom-args/captures/sealed-classes.scala b/tests/neg-custom-args/captures/sealed-classes.scala
new file mode 100644
index 000000000000..b8cb0acbf5c5
--- /dev/null
+++ b/tests/neg-custom-args/captures/sealed-classes.scala
@@ -0,0 +1,21 @@
+abstract class C1[A1]:
+ def set(x: A1): Unit
+ def get: A1
+
+trait Co[+A]:
+ def get: A
+
+class C2[sealed A2] extends C1[A2], Co[A2]: // ok
+ private var x: A2 = ???
+ def set(x: A2): Unit =
+ this.x = x
+ def get: A2 = x
+
+class C3[A3] extends C2[A3] // error
+
+abstract class C4[sealed A4] extends Co[A4] // ok
+
+abstract class C5[sealed +A5] extends Co[A5] // ok
+
+abstract class C6[A6] extends C5[A6] // error
+
diff --git a/tests/neg-custom-args/captures/sealed-leaks.check b/tests/neg-custom-args/captures/sealed-leaks.check
new file mode 100644
index 000000000000..f7098eba32b6
--- /dev/null
+++ b/tests/neg-custom-args/captures/sealed-leaks.check
@@ -0,0 +1,50 @@
+-- [E129] Potential Issue Warning: tests/neg-custom-args/captures/sealed-leaks.scala:31:6 ------------------------------
+31 | ()
+ | ^^
+ | A pure expression does nothing in statement position
+ |
+ | longer explanation available when compiling with `-explain`
+-- Error: tests/neg-custom-args/captures/sealed-leaks.scala:12:27 ------------------------------------------------------
+12 | val later2 = usingLogFile[(() => Unit) | Null] { f => () => f.write(0) } // error
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ | Sealed type variable T cannot be instantiated to (() => Unit) | Null since
+ | that type captures the root capability `cap`.
+ | This is often caused by a local capability in an argument of method usingLogFile
+ | leaking as part of its result.
+-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/sealed-leaks.scala:19:26 ---------------------------------
+19 | usingLogFile { f => x = f } // error
+ | ^
+ | Found: (f : java.io.FileOutputStream^)
+ | Required: (java.io.FileOutputStream | Null)^{cap[Test2]}
+ |
+ | longer explanation available when compiling with `-explain`
+-- Error: tests/neg-custom-args/captures/sealed-leaks.scala:30:10 ------------------------------------------------------
+30 | var x: T = y // error
+ | ^
+ | Mutable variable x cannot have type T since
+ | that type variable is not sealed.
+-- Error: tests/neg-custom-args/captures/sealed-leaks.scala:39:8 -------------------------------------------------------
+39 | var x: T = y // error
+ | ^
+ | Mutable variable x cannot have type T since
+ | that type variable is not sealed.
+ |
+ | Note that variable x does not count as local since it is captured by an anonymous function
+-- Error: tests/neg-custom-args/captures/sealed-leaks.scala:43:8 -------------------------------------------------------
+43 | var x: T = y // error
+ | ^
+ |Mutable variable x cannot have type T since
+ |that type variable is not sealed.
+ |
+ |Note that variable x does not count as local since it is captured by an anonymous function argument in a call to method identity
+-- Error: tests/neg-custom-args/captures/sealed-leaks.scala:47:8 -------------------------------------------------------
+47 | var x: T = y // error
+ | ^
+ | Mutable variable x cannot have type T since
+ | that type variable is not sealed.
+ |
+ | Note that variable x does not count as local since it is captured by method foo
+-- Error: tests/neg-custom-args/captures/sealed-leaks.scala:11:14 ------------------------------------------------------
+11 | val later = usingLogFile { f => () => f.write(0) } // error
+ | ^^^^^^^^^^^^
+ | local reference f leaks into outer capture set of type parameter T of method usingLogFile
diff --git a/tests/neg-custom-args/captures/sealed-leaks.scala b/tests/neg-custom-args/captures/sealed-leaks.scala
index a7acf77b5678..2555ba8a3e07 100644
--- a/tests/neg-custom-args/captures/sealed-leaks.scala
+++ b/tests/neg-custom-args/captures/sealed-leaks.scala
@@ -18,4 +18,34 @@ def Test2 =
usingLogFile { f => x = f } // error
- later()
\ No newline at end of file
+ later()
+
+def Test3 =
+ def f[T](y: T) =
+ var x: T = y
+ ()
+
+ class C[T](y: T):
+ object o:
+ var x: T = y // error
+ ()
+
+ class C2[T](y: T):
+ def f =
+ var x: T = y // ok
+ ()
+
+ def g1[T](y: T): T => Unit =
+ var x: T = y // error
+ y => x = y
+
+ def g2[T](y: T): T => Unit =
+ var x: T = y // error
+ identity(y => x = y)
+
+ def g3[T](y: T): Unit =
+ var x: T = y // error
+ def foo =
+ x = y
+ ()
+
diff --git a/tests/neg-custom-args/captures/sealed-refs.scala b/tests/neg-custom-args/captures/sealed-refs.scala
new file mode 100644
index 000000000000..05fa483acf28
--- /dev/null
+++ b/tests/neg-custom-args/captures/sealed-refs.scala
@@ -0,0 +1,42 @@
+class Ref[sealed A](init: A):
+ this: Ref[A]^ =>
+ private var x: A = init
+ def get: A = x
+ def set(x: A): Unit = this.x = x
+
+class It[X]:
+ this: It[X]^ =>
+
+def f1[B1](x: B1, next: B1 -> B1) =
+ var r = x // ok
+ r = next(x)
+ r
+
+def f2[B2](x: B2, next: B2 -> B2) =
+ val r = Ref[B2](x) // error
+ r.set(next(x))
+ r.get
+
+def g[sealed B](x: B, next: B -> B) =
+ val r = Ref[B](x) // ok
+ r.set(next(x))
+ r.get
+
+import annotation.unchecked.uncheckedCaptures
+
+def h[B](x: B, next: B -> B) =
+ val r = Ref[B @uncheckedCaptures](x) // ok
+ r.set(next(x))
+ r.get
+
+def f3[B](x: B, next: B -> B) =
+ val r: Ref[B^{cap[f3]}] = Ref[B^{cap[f3]}](x) // error
+ r.set(next(x))
+ val y = r.get
+ ()
+
+def f4[B](x: B, next: B -> B) =
+ val r: Ref[B]^{cap[f4]} = Ref[B](x) // error
+ r.set(next(x))
+ val y = r.get
+ ()
\ No newline at end of file
diff --git a/tests/neg/class-mods.scala b/tests/neg/class-mods.scala
index 60e9fb279364..cf4348ad42d7 100644
--- a/tests/neg/class-mods.scala
+++ b/tests/neg/class-mods.scala
@@ -2,7 +2,7 @@ open final class Foo1 // error
sealed open class Foo2 // error
open type T1 // error
-sealed type T2 // error
+type T2 // ok
abstract type T3 // error
abstract open type T4 // error
diff --git a/tests/neg/with-type-operator-future-migration.check b/tests/neg/with-type-operator-future-migration.check
new file mode 100644
index 000000000000..845601349c83
--- /dev/null
+++ b/tests/neg/with-type-operator-future-migration.check
@@ -0,0 +1,7 @@
+-- [E003] Syntax Error: tests/neg/with-type-operator-future-migration.scala:5:13 ---------------------------------------
+5 |def foo: Int with String = ??? // error
+ | ^^^^
+ | with as a type operator has been deprecated; use & instead
+ | This construct can be rewritten automatically under -rewrite -source future-migration.
+ |
+ | longer explanation available when compiling with `-explain`
diff --git a/tests/neg/with-type-operator-future-migration.scala b/tests/neg/with-type-operator-future-migration.scala
new file mode 100644
index 000000000000..3ed2e3a8f067
--- /dev/null
+++ b/tests/neg/with-type-operator-future-migration.scala
@@ -0,0 +1,5 @@
+//> using options -Werror
+
+import scala.language.`future-migration`
+
+def foo: Int with String = ??? // error
diff --git a/tests/neg/with-type-operator-future.scala b/tests/neg/with-type-operator-future.scala
new file mode 100644
index 000000000000..3f33ebde8708
--- /dev/null
+++ b/tests/neg/with-type-operator-future.scala
@@ -0,0 +1,3 @@
+import scala.language.`future`
+
+def foo: Int with String = ??? // error
diff --git a/tests/pos-custom-args/captures/sealed-lowerbound.scala b/tests/pos-custom-args/captures/sealed-lowerbound.scala
new file mode 100644
index 000000000000..e848f784cddc
--- /dev/null
+++ b/tests/pos-custom-args/captures/sealed-lowerbound.scala
@@ -0,0 +1,12 @@
+def foo[sealed B](x: B): B = x
+
+def bar[B, sealed A >: B](x: A): A = foo[A](x)
+
+class C[sealed A]
+
+class CV[sealed A](x: Int):
+ def this() = this:
+ val x = new C[A]:
+ println("foo")
+ 0
+
diff --git a/tests/pos-custom-args/captures/sealed-value-class.scala b/tests/pos-custom-args/captures/sealed-value-class.scala
new file mode 100644
index 000000000000..b5f25bf2d203
--- /dev/null
+++ b/tests/pos-custom-args/captures/sealed-value-class.scala
@@ -0,0 +1,3 @@
+class Ops[sealed A](xs: Array[A]) extends AnyVal:
+
+ def f(p: A => Boolean): Array[A] = xs
diff --git a/tests/pos-custom-args/captures/steppers.scala b/tests/pos-custom-args/captures/steppers.scala
new file mode 100644
index 000000000000..815ac938b492
--- /dev/null
+++ b/tests/pos-custom-args/captures/steppers.scala
@@ -0,0 +1,27 @@
+
+trait Stepper[+A]:
+ this: Stepper[A]^ =>
+
+object Stepper:
+ trait EfficientSplit
+
+sealed trait StepperShape[-T, S <: Stepper[_]^] extends Pure
+
+trait IterableOnce[+A] extends Any:
+ this: IterableOnce[A]^ =>
+ def stepper[S <: Stepper[_]^{this}](implicit shape: StepperShape[A, S]): S = ???
+
+sealed abstract class ArraySeq[sealed T] extends IterableOnce[T], Pure:
+ def array: Array[_]
+
+ def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq[T] =
+ val arr = array.asInstanceOf[Array[T]].sorted(ord.asInstanceOf[Ordering[Any]]).asInstanceOf[Array[T]]
+ ArraySeq.make(arr).asInstanceOf[ArraySeq[T]]
+
+object ArraySeq:
+
+ def make[sealed T](x: Array[T]): ArraySeq[T] = ???
+
+ final class ofRef[T <: AnyRef](val array: Array[T]) extends ArraySeq[T], Pure:
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S & Stepper.EfficientSplit = ???
+
diff --git a/tests/pos-special/stdlib/collection/ArrayOps.scala b/tests/pos-special/stdlib/collection/ArrayOps.scala
new file mode 100644
index 000000000000..a52fd0dbd162
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/ArrayOps.scala
@@ -0,0 +1,1664 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+
+import java.lang.Math.{max, min}
+import java.util.Arrays
+import language.experimental.captureChecking
+
+import scala.Predef.{ // unimport all array-related implicit conversions to avoid triggering them accidentally
+ genericArrayOps => _,
+ booleanArrayOps => _,
+ byteArrayOps => _,
+ charArrayOps => _,
+ doubleArrayOps => _,
+ floatArrayOps => _,
+ intArrayOps => _,
+ longArrayOps => _,
+ refArrayOps => _,
+ shortArrayOps => _,
+ unitArrayOps => _,
+ genericWrapArray => _,
+ wrapRefArray => _,
+ wrapIntArray => _,
+ wrapDoubleArray => _,
+ wrapLongArray => _,
+ wrapFloatArray => _,
+ wrapCharArray => _,
+ wrapByteArray => _,
+ wrapShortArray => _,
+ wrapBooleanArray => _,
+ wrapUnitArray => _,
+ wrapString => _,
+ copyArrayToImmutableIndexedSeq => _,
+ _
+}
+import scala.collection.Stepper.EfficientSplit
+import scala.collection.immutable.Range
+import scala.collection.mutable.ArrayBuilder
+import scala.math.Ordering
+import scala.reflect.ClassTag
+import scala.util.Sorting
+
+object ArrayOps {
+
+ @SerialVersionUID(3L)
+ private class ArrayView[sealed A](xs: Array[A]) extends AbstractIndexedSeqView[A] {
+ def length = xs.length
+ def apply(n: Int) = xs(n)
+ override def toString: String = immutable.ArraySeq.unsafeWrapArray(xs).mkString("ArrayView(", ", ", ")")
+ }
+
+ /** A lazy filtered array. No filtering is applied until one of `foreach`, `map` or `flatMap` is called. */
+ class WithFilter[sealed A](p: A => Boolean, xs: Array[A]) {
+
+ /** Apply `f` to each element for its side effects.
+ * Note: [U] parameter needed to help scalac's type inference.
+ */
+ def foreach[U](f: A => U): Unit = {
+ val len = xs.length
+ var i = 0
+ while(i < len) {
+ val x = xs(i)
+ if(p(x)) f(x)
+ i += 1
+ }
+ }
+
+ /** Builds a new array by applying a function to all elements of this array.
+ *
+ * @param f the function to apply to each element.
+ * @tparam B the element type of the returned array.
+ * @return a new array resulting from applying the given function
+ * `f` to each element of this array and collecting the results.
+ */
+ def map[sealed B: ClassTag](f: A => B): Array[B] = {
+ val b = ArrayBuilder.make[B]
+ var i = 0
+ while (i < xs.length) {
+ val x = xs(i)
+ if(p(x)) b += f(x)
+ i = i + 1
+ }
+ b.result()
+ }
+
+ /** Builds a new array by applying a function to all elements of this array
+ * and using the elements of the resulting collections.
+ *
+ * @param f the function to apply to each element.
+ * @tparam B the element type of the returned array.
+ * @return a new array resulting from applying the given collection-valued function
+ * `f` to each element of this array and concatenating the results.
+ */
+ def flatMap[sealed B: ClassTag](f: A => IterableOnce[B]): Array[B] = {
+ val b = ArrayBuilder.make[B]
+ var i = 0
+ while(i < xs.length) {
+ val x = xs(i)
+ if(p(x)) b ++= f(xs(i))
+ i += 1
+ }
+ b.result()
+ }
+
+ def flatMap[BS, sealed B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] =
+ flatMap[B](x => asIterable(f(x)))
+
+ /** Creates a new non-strict filter which combines this filter with the given predicate. */
+ def withFilter(q: A => Boolean): WithFilter[A]^{this, q} = new WithFilter[A](a => p(a) && q(a), xs)
+ }
+
+ @SerialVersionUID(3L)
+ private[collection] final class ArrayIterator[@specialized(Specializable.Everything) sealed A](xs: Array[A]) extends AbstractIterator[A] with Serializable {
+ private[this] var pos = 0
+ private[this] val len = xs.length
+ override def knownSize: Int = len - pos
+ def hasNext: Boolean = pos < len
+ def next(): A = {
+ if (pos >= xs.length) Iterator.empty.next()
+ val r = xs(pos)
+ pos += 1
+ r
+ }
+ override def drop(n: Int): Iterator[A] = {
+ if (n > 0) {
+ val newPos = pos + n
+ pos =
+ if (newPos < 0 /* overflow */) len
+ else Math.min(len, newPos)
+ }
+ this
+ }
+ }
+
+ @SerialVersionUID(3L)
+ private final class ReverseIterator[@specialized(Specializable.Everything) sealed A](xs: Array[A]) extends AbstractIterator[A] with Serializable {
+ private[this] var pos = xs.length-1
+ def hasNext: Boolean = pos >= 0
+ def next(): A = {
+ if (pos < 0) Iterator.empty.next()
+ val r = xs(pos)
+ pos -= 1
+ r
+ }
+
+ override def drop(n: Int): Iterator[A] = {
+ if (n > 0) pos = Math.max( -1, pos - n)
+ this
+ }
+ }
+
+ @SerialVersionUID(3L)
+ private final class GroupedIterator[sealed A](xs: Array[A], groupSize: Int) extends AbstractIterator[Array[A]] with Serializable {
+ private[this] var pos = 0
+ def hasNext: Boolean = pos < xs.length
+ def next(): Array[A] = {
+ if(pos >= xs.length) throw new NoSuchElementException
+ val r = new ArrayOps(xs).slice(pos, pos+groupSize)
+ pos += groupSize
+ r
+ }
+ }
+
+ /** The cut-off point for the array size after which we switch from `Sorting.stableSort` to
+ * an implementation that copies the data to a boxed representation for use with `Arrays.sort`.
+ */
+ private final val MaxStableSortLength = 300
+
+ /** Avoid an allocation in [[collect]]. */
+ private val fallback: Any => Any = _ => fallback
+}
+
+/** This class serves as a wrapper for `Array`s with many of the operations found in
+ * indexed sequences. Where needed, instances of arrays are implicitly converted
+ * into this class. There is generally no reason to create an instance explicitly or use
+ * an `ArrayOps` type. It is better to work with plain `Array` types instead and rely on
+ * the implicit conversion to `ArrayOps` when calling a method (which does not actually
+ * allocate an instance of `ArrayOps` because it is a value class).
+ *
+ * Neither `Array` nor `ArrayOps` are proper collection types
+ * (i.e. they do not extend `Iterable` or even `IterableOnce`). `mutable.ArraySeq` and
+ * `immutable.ArraySeq` serve this purpose.
+ *
+ * The difference between this class and `ArraySeq`s is that calling transformer methods such as
+ * `filter` and `map` will yield an array, whereas an `ArraySeq` will remain an `ArraySeq`.
+ *
+ * @tparam A type of the elements contained in this array.
+ */
+final class ArrayOps[sealed A](private val xs: Array[A]) extends AnyVal {
+
+ @`inline` private[this] implicit def elemTag: ClassTag[A] = ClassTag(xs.getClass.getComponentType)
+
+ /** The size of this array.
+ *
+ * @return the number of elements in this array.
+ */
+ @`inline` def size: Int = xs.length
+
+ /** The size of this array.
+ *
+ * @return the number of elements in this array.
+ */
+ @`inline` def knownSize: Int = xs.length
+
+ /** Tests whether the array is empty.
+ *
+ * @return `true` if the array contains no elements, `false` otherwise.
+ */
+ @`inline` def isEmpty: Boolean = xs.length == 0
+
+ /** Tests whether the array is not empty.
+ *
+ * @return `true` if the array contains at least one element, `false` otherwise.
+ */
+ @`inline` def nonEmpty: Boolean = xs.length != 0
+
+ /** Selects the first element of this array.
+ *
+ * @return the first element of this array.
+ * @throws NoSuchElementException if the array is empty.
+ */
+ def head: A = if (nonEmpty) xs.apply(0) else throw new NoSuchElementException("head of empty array")
+
+ /** Selects the last element.
+ *
+ * @return The last element of this array.
+ * @throws NoSuchElementException If the array is empty.
+ */
+ def last: A = if (nonEmpty) xs.apply(xs.length-1) else throw new NoSuchElementException("last of empty array")
+
+ /** Optionally selects the first element.
+ *
+ * @return the first element of this array if it is nonempty,
+ * `None` if it is empty.
+ */
+ def headOption: Option[A] = if(isEmpty) None else Some(head)
+
+ /** Optionally selects the last element.
+ *
+ * @return the last element of this array$ if it is nonempty,
+ * `None` if it is empty.
+ */
+ def lastOption: Option[A] = if(isEmpty) None else Some(last)
+
+ /** Compares the size of this array to a test value.
+ *
+ * @param otherSize the test value that gets compared with the size.
+ * @return A value `x` where
+ * {{{
+ * x < 0 if this.size < otherSize
+ * x == 0 if this.size == otherSize
+ * x > 0 if this.size > otherSize
+ * }}}
+ */
+ def sizeCompare(otherSize: Int): Int = Integer.compare(xs.length, otherSize)
+
+ /** Compares the length of this array to a test value.
+ *
+ * @param len the test value that gets compared with the length.
+ * @return A value `x` where
+ * {{{
+ * x < 0 if this.length < len
+ * x == 0 if this.length == len
+ * x > 0 if this.length > len
+ * }}}
+ */
+ def lengthCompare(len: Int): Int = Integer.compare(xs.length, len)
+
+ /** Method mirroring [[SeqOps.sizeIs]] for consistency, except it returns an `Int`
+ * because `size` is known and comparison is constant-time.
+ *
+ * These operations are equivalent to [[sizeCompare(Int) `sizeCompare(Int)`]], and
+ * allow the following more readable usages:
+ *
+ * {{{
+ * this.sizeIs < size // this.sizeCompare(size) < 0
+ * this.sizeIs <= size // this.sizeCompare(size) <= 0
+ * this.sizeIs == size // this.sizeCompare(size) == 0
+ * this.sizeIs != size // this.sizeCompare(size) != 0
+ * this.sizeIs >= size // this.sizeCompare(size) >= 0
+ * this.sizeIs > size // this.sizeCompare(size) > 0
+ * }}}
+ */
+ def sizeIs: Int = xs.length
+
+ /** Method mirroring [[SeqOps.lengthIs]] for consistency, except it returns an `Int`
+ * because `length` is known and comparison is constant-time.
+ *
+ * These operations are equivalent to [[lengthCompare(Int) `lengthCompare(Int)`]], and
+ * allow the following more readable usages:
+ *
+ * {{{
+ * this.lengthIs < len // this.lengthCompare(len) < 0
+ * this.lengthIs <= len // this.lengthCompare(len) <= 0
+ * this.lengthIs == len // this.lengthCompare(len) == 0
+ * this.lengthIs != len // this.lengthCompare(len) != 0
+ * this.lengthIs >= len // this.lengthCompare(len) >= 0
+ * this.lengthIs > len // this.lengthCompare(len) > 0
+ * }}}
+ */
+ def lengthIs: Int = xs.length
+
+ /** Selects an interval of elements. The returned array is made up
+ * of all elements `x` which satisfy the invariant:
+ * {{{
+ * from <= indexOf(x) < until
+ * }}}
+ *
+ * @param from the lowest index to include from this array.
+ * @param until the lowest index to EXCLUDE from this array.
+ * @return an array containing the elements greater than or equal to
+ * index `from` extending up to (but not including) index `until`
+ * of this array.
+ */
+ def slice(from: Int, until: Int): Array[A] = {
+ import java.util.Arrays.copyOfRange
+ val lo = max(from, 0)
+ val hi = min(until, xs.length)
+ if (hi > lo) {
+ (((xs: Array[_]): @unchecked) match {
+ case x: Array[AnyRef] => copyOfRange(x, lo, hi)
+ case x: Array[Int] => copyOfRange(x, lo, hi)
+ case x: Array[Double] => copyOfRange(x, lo, hi)
+ case x: Array[Long] => copyOfRange(x, lo, hi)
+ case x: Array[Float] => copyOfRange(x, lo, hi)
+ case x: Array[Char] => copyOfRange(x, lo, hi)
+ case x: Array[Byte] => copyOfRange(x, lo, hi)
+ case x: Array[Short] => copyOfRange(x, lo, hi)
+ case x: Array[Boolean] => copyOfRange(x, lo, hi)
+ }).asInstanceOf[Array[A]]
+ } else new Array[A](0)
+ }
+
+ /** The rest of the array without its first element. */
+ def tail: Array[A] =
+ if(xs.length == 0) throw new UnsupportedOperationException("tail of empty array") else slice(1, xs.length)
+
+ /** The initial part of the array without its last element. */
+ def init: Array[A] =
+ if(xs.length == 0) throw new UnsupportedOperationException("init of empty array") else slice(0, xs.length-1)
+
+ /** Iterates over the tails of this array. The first value will be this
+ * array and the final one will be an empty array, with the intervening
+ * values the results of successive applications of `tail`.
+ *
+ * @return an iterator over all the tails of this array
+ */
+ def tails: Iterator[Array[A]] = iterateUntilEmpty(xs => new ArrayOps(xs).tail)
+
+ /** Iterates over the inits of this array. The first value will be this
+ * array and the final one will be an empty array, with the intervening
+ * values the results of successive applications of `init`.
+ *
+ * @return an iterator over all the inits of this array
+ */
+ def inits: Iterator[Array[A]] = iterateUntilEmpty(xs => new ArrayOps(xs).init)
+
+ // A helper for tails and inits.
+ private[this] def iterateUntilEmpty(f: Array[A] => Array[A]): Iterator[Array[A]]^{f} =
+ Iterator.iterate(xs)(f).takeWhile(x => x.length != 0) ++ Iterator.single(Array.empty[A])
+
+ /** An array containing the first `n` elements of this array. */
+ def take(n: Int): Array[A] = slice(0, n)
+
+ /** The rest of the array without its `n` first elements. */
+ def drop(n: Int): Array[A] = slice(n, xs.length)
+
+ /** An array containing the last `n` elements of this array. */
+ def takeRight(n: Int): Array[A] = drop(xs.length - max(n, 0))
+
+ /** The rest of the array without its `n` last elements. */
+ def dropRight(n: Int): Array[A] = take(xs.length - max(n, 0))
+
+ /** Takes longest prefix of elements that satisfy a predicate.
+ *
+ * @param p The predicate used to test elements.
+ * @return the longest prefix of this array whose elements all satisfy
+ * the predicate `p`.
+ */
+ def takeWhile(p: A => Boolean): Array[A] = {
+ val i = indexWhere(x => !p(x))
+ val hi = if(i < 0) xs.length else i
+ slice(0, hi)
+ }
+
+ /** Drops longest prefix of elements that satisfy a predicate.
+ *
+ * @param p The predicate used to test elements.
+ * @return the longest suffix of this array whose first element
+ * does not satisfy the predicate `p`.
+ */
+ def dropWhile(p: A => Boolean): Array[A] = {
+ val i = indexWhere(x => !p(x))
+ val lo = if(i < 0) xs.length else i
+ slice(lo, xs.length)
+ }
+
+ def iterator: Iterator[A] =
+ ((xs: Any @unchecked) match {
+ case xs: Array[AnyRef] => new ArrayOps.ArrayIterator(xs)
+ case xs: Array[Int] => new ArrayOps.ArrayIterator(xs)
+ case xs: Array[Double] => new ArrayOps.ArrayIterator(xs)
+ case xs: Array[Long] => new ArrayOps.ArrayIterator(xs)
+ case xs: Array[Float] => new ArrayOps.ArrayIterator(xs)
+ case xs: Array[Char] => new ArrayOps.ArrayIterator(xs)
+ case xs: Array[Byte] => new ArrayOps.ArrayIterator(xs)
+ case xs: Array[Short] => new ArrayOps.ArrayIterator(xs)
+ case xs: Array[Boolean] => new ArrayOps.ArrayIterator(xs)
+ case xs: Array[Unit] => new ArrayOps.ArrayIterator(xs)
+ case null => throw new NullPointerException
+ }).asInstanceOf[Iterator[A]]
+
+ def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = {
+ import convert.impl._
+ val s = (shape.shape: @unchecked) match {
+ case StepperShape.ReferenceShape => (xs: Any) match {
+ case bs: Array[Boolean] => new BoxedBooleanArrayStepper(bs, 0, xs.length)
+ case _ => new ObjectArrayStepper[AnyRef](xs.asInstanceOf[Array[AnyRef ]], 0, xs.length)
+ }
+ case StepperShape.IntShape => new IntArrayStepper (xs.asInstanceOf[Array[Int ]], 0, xs.length)
+ case StepperShape.LongShape => new LongArrayStepper (xs.asInstanceOf[Array[Long ]], 0, xs.length)
+ case StepperShape.DoubleShape => new DoubleArrayStepper (xs.asInstanceOf[Array[Double ]], 0, xs.length)
+ case StepperShape.ByteShape => new WidenedByteArrayStepper (xs.asInstanceOf[Array[Byte ]], 0, xs.length)
+ case StepperShape.ShortShape => new WidenedShortArrayStepper (xs.asInstanceOf[Array[Short ]], 0, xs.length)
+ case StepperShape.CharShape => new WidenedCharArrayStepper (xs.asInstanceOf[Array[Char ]], 0, xs.length)
+ case StepperShape.FloatShape => new WidenedFloatArrayStepper (xs.asInstanceOf[Array[Float ]], 0, xs.length)
+ }
+ s.asInstanceOf[S with EfficientSplit]
+ }
+
+ /** Partitions elements in fixed size arrays.
+ * @see [[scala.collection.Iterator]], method `grouped`
+ *
+ * @param size the number of elements per group
+ * @return An iterator producing arrays of size `size`, except the
+ * last will be less than size `size` if the elements don't divide evenly.
+ */
+ def grouped(size: Int): Iterator[Array[A]] = new ArrayOps.GroupedIterator[A](xs, size)
+
+ /** Splits this array into a prefix/suffix pair according to a predicate.
+ *
+ * Note: `c span p` is equivalent to (but more efficient than)
+ * `(c takeWhile p, c dropWhile p)`, provided the evaluation of the
+ * predicate `p` does not cause any side-effects.
+ *
+ * @param p the test predicate
+ * @return a pair consisting of the longest prefix of this array whose
+ * elements all satisfy `p`, and the rest of this array.
+ */
+ def span(p: A => Boolean): (Array[A], Array[A]) = {
+ val i = indexWhere(x => !p(x))
+ val idx = if(i < 0) xs.length else i
+ (slice(0, idx), slice(idx, xs.length))
+ }
+
+ /** Splits this array into two at a given position.
+ * Note: `c splitAt n` is equivalent to `(c take n, c drop n)`.
+ *
+ * @param n the position at which to split.
+ * @return a pair of arrays consisting of the first `n`
+ * elements of this array, and the other elements.
+ */
+ def splitAt(n: Int): (Array[A], Array[A]) = (take(n), drop(n))
+
+ /** A pair of, first, all elements that satisfy predicate `p` and, second, all elements that do not. */
+ def partition(p: A => Boolean): (Array[A], Array[A]) = {
+ val res1, res2 = ArrayBuilder.make[A]
+ var i = 0
+ while(i < xs.length) {
+ val x = xs(i)
+ (if(p(x)) res1 else res2) += x
+ i += 1
+ }
+ (res1.result(), res2.result())
+ }
+
+ /** Applies a function `f` to each element of the array and returns a pair of arrays: the first one
+ * made of those values returned by `f` that were wrapped in [[scala.util.Left]], and the second
+ * one made of those wrapped in [[scala.util.Right]].
+ *
+ * Example:
+ * {{{
+ * val xs = Array(1, "one", 2, "two", 3, "three") partitionMap {
+ * case i: Int => Left(i)
+ * case s: String => Right(s)
+ * }
+ * // xs == (Array(1, 2, 3),
+ * // Array(one, two, three))
+ * }}}
+ *
+ * @tparam A1 the element type of the first resulting collection
+ * @tparam A2 the element type of the second resulting collection
+ * @param f the 'split function' mapping the elements of this array to an [[scala.util.Either]]
+ *
+ * @return a pair of arrays: the first one made of those values returned by `f` that were wrapped in [[scala.util.Left]],
+ * and the second one made of those wrapped in [[scala.util.Right]]. */
+ def partitionMap[sealed A1: ClassTag, sealed A2: ClassTag](f: A => Either[A1, A2]): (Array[A1], Array[A2]) = {
+ val res1 = ArrayBuilder.make[A1]
+ val res2 = ArrayBuilder.make[A2]
+ var i = 0
+ while(i < xs.length) {
+ f(xs(i)) match {
+ case Left(x) => res1 += x
+ case Right(x) => res2 += x
+ }
+ i += 1
+ }
+ (res1.result(), res2.result())
+ }
+
+ /** Returns a new array with the elements in reversed order. */
+ @inline def reverse: Array[A] = {
+ val len = xs.length
+ val res = new Array[A](len)
+ var i = 0
+ while(i < len) {
+ res(len-i-1) = xs(i)
+ i += 1
+ }
+ res
+ }
+
+ /** An iterator yielding elements in reversed order.
+ *
+ * Note: `xs.reverseIterator` is the same as `xs.reverse.iterator` but implemented more efficiently.
+ *
+ * @return an iterator yielding the elements of this array in reversed order
+ */
+ def reverseIterator: Iterator[A] =
+ ((xs: Any @unchecked) match {
+ case xs: Array[AnyRef] => new ArrayOps.ReverseIterator(xs)
+ case xs: Array[Int] => new ArrayOps.ReverseIterator(xs)
+ case xs: Array[Double] => new ArrayOps.ReverseIterator(xs)
+ case xs: Array[Long] => new ArrayOps.ReverseIterator(xs)
+ case xs: Array[Float] => new ArrayOps.ReverseIterator(xs)
+ case xs: Array[Char] => new ArrayOps.ReverseIterator(xs)
+ case xs: Array[Byte] => new ArrayOps.ReverseIterator(xs)
+ case xs: Array[Short] => new ArrayOps.ReverseIterator(xs)
+ case xs: Array[Boolean] => new ArrayOps.ReverseIterator(xs)
+ case xs: Array[Unit] => new ArrayOps.ReverseIterator(xs)
+ case null => throw new NullPointerException
+ }).asInstanceOf[Iterator[A]]
+
+ /** Selects all elements of this array which satisfy a predicate.
+ *
+ * @param p the predicate used to test elements.
+ * @return a new array consisting of all elements of this array that satisfy the given predicate `p`.
+ */
+ def filter(p: A => Boolean): Array[A] = {
+ val res = ArrayBuilder.make[A]
+ var i = 0
+ while(i < xs.length) {
+ val x = xs(i)
+ if(p(x)) res += x
+ i += 1
+ }
+ res.result()
+ }
+
+ /** Selects all elements of this array which do not satisfy a predicate.
+ *
+ * @param p the predicate used to test elements.
+ * @return a new array consisting of all elements of this array that do not satisfy the given predicate `p`.
+ */
+ def filterNot(p: A => Boolean): Array[A] = filter(x => !p(x))
+
+ /** Sorts this array according to an Ordering.
+ *
+ * The sort is stable. That is, elements that are equal (as determined by
+ * `lt`) appear in the same order in the sorted sequence as in the original.
+ *
+ * @see [[scala.math.Ordering]]
+ *
+ * @param ord the ordering to be used to compare elements.
+ * @return an array consisting of the elements of this array
+ * sorted according to the ordering `ord`.
+ */
+ def sorted[B >: A](implicit ord: Ordering[B]): Array[A] = {
+ val len = xs.length
+ def boxed = if(len < ArrayOps.MaxStableSortLength) {
+ val a = xs.clone()
+ Sorting.stableSort(a)(ord.asInstanceOf[Ordering[A]])
+ a
+ } else {
+ val a = Array.copyAs[AnyRef](xs, len)(ClassTag.AnyRef)
+ Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]])
+ Array.copyAs[A](a, len)
+ }
+ if(len <= 1) xs.clone()
+ else ((xs: Array[_]) match {
+ case xs: Array[AnyRef] =>
+ val a = Arrays.copyOf(xs, len); Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]); a
+ case xs: Array[Int] =>
+ if(ord eq Ordering.Int) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a }
+ else boxed
+ case xs: Array[Long] =>
+ if(ord eq Ordering.Long) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a }
+ else boxed
+ case xs: Array[Char] =>
+ if(ord eq Ordering.Char) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a }
+ else boxed
+ case xs: Array[Byte] =>
+ if(ord eq Ordering.Byte) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a }
+ else boxed
+ case xs: Array[Short] =>
+ if(ord eq Ordering.Short) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a }
+ else boxed
+ case xs: Array[Boolean] =>
+ if(ord eq Ordering.Boolean) { val a = Arrays.copyOf(xs, len); Sorting.stableSort(a); a }
+ else boxed
+ case xs => boxed
+ }).asInstanceOf[Array[A]]
+ }
+
+ /** Sorts this array according to a comparison function.
+ *
+ * The sort is stable. That is, elements that are equal (as determined by
+ * `lt`) appear in the same order in the sorted sequence as in the original.
+ *
+ * @param lt the comparison function which tests whether
+ * its first argument precedes its second argument in
+ * the desired ordering.
+ * @return an array consisting of the elements of this array
+ * sorted according to the comparison function `lt`.
+ */
+ def sortWith(lt: (A, A) => Boolean): Array[A] = sorted(Ordering.fromLessThan(lt))
+
+ /** Sorts this array according to the Ordering which results from transforming
+ * an implicitly given Ordering with a transformation function.
+ *
+ * @see [[scala.math.Ordering]]
+ * @param f the transformation function mapping elements
+ * to some other domain `B`.
+ * @param ord the ordering assumed on domain `B`.
+ * @tparam B the target type of the transformation `f`, and the type where
+ * the ordering `ord` is defined.
+ * @return an array consisting of the elements of this array
+ * sorted according to the ordering where `x < y` if
+ * `ord.lt(f(x), f(y))`.
+ */
+ def sortBy[B](f: A => B)(implicit ord: Ordering[B]): Array[A] = sorted(ord on f)
+
+ /** Creates a non-strict filter of this array.
+ *
+ * Note: the difference between `c filter p` and `c withFilter p` is that
+ * the former creates a new array, whereas the latter only
+ * restricts the domain of subsequent `map`, `flatMap`, `foreach`,
+ * and `withFilter` operations.
+ *
+ * @param p the predicate used to test elements.
+ * @return an object of class `ArrayOps.WithFilter`, which supports
+ * `map`, `flatMap`, `foreach`, and `withFilter` operations.
+ * All these operations apply to those elements of this array
+ * which satisfy the predicate `p`.
+ */
+ def withFilter(p: A => Boolean): ArrayOps.WithFilter[A]^{p} = new ArrayOps.WithFilter[A](p, xs)
+
+ /** Finds index of first occurrence of some value in this array after or at some start index.
+ *
+ * @param elem the element value to search for.
+ * @param from the start index
+ * @return the index `>= from` of the first element of this array that is equal (as determined by `==`)
+ * to `elem`, or `-1`, if none exists.
+ */
+ def indexOf(elem: A, from: Int = 0): Int = {
+ var i = from
+ while(i < xs.length) {
+ if(elem == xs(i)) return i
+ i += 1
+ }
+ -1
+ }
+
+ /** Finds index of the first element satisfying some predicate after or at some start index.
+ *
+ * @param p the predicate used to test elements.
+ * @param from the start index
+ * @return the index `>= from` of the first element of this array that satisfies the predicate `p`,
+ * or `-1`, if none exists.
+ */
+ def indexWhere(@deprecatedName("f", "2.13.3") p: A => Boolean, from: Int = 0): Int = {
+ var i = from
+ while(i < xs.length) {
+ if(p(xs(i))) return i
+ i += 1
+ }
+ -1
+ }
+
+ /** Finds index of last occurrence of some value in this array before or at a given end index.
+ *
+ * @param elem the element value to search for.
+ * @param end the end index.
+ * @return the index `<= end` of the last element of this array that is equal (as determined by `==`)
+ * to `elem`, or `-1`, if none exists.
+ */
+ def lastIndexOf(elem: A, end: Int = xs.length - 1): Int = {
+ var i = min(end, xs.length-1)
+ while(i >= 0) {
+ if(elem == xs(i)) return i
+ i -= 1
+ }
+ -1
+ }
+
+ /** Finds index of last element satisfying some predicate before or at given end index.
+ *
+ * @param p the predicate used to test elements.
+ * @return the index `<= end` of the last element of this array that satisfies the predicate `p`,
+ * or `-1`, if none exists.
+ */
+ def lastIndexWhere(p: A => Boolean, end: Int = xs.length - 1): Int = {
+ var i = min(end, xs.length-1)
+ while(i >= 0) {
+ if(p(xs(i))) return i
+ i -= 1
+ }
+ -1
+ }
+
+ /** Finds the first element of the array satisfying a predicate, if any.
+ *
+ * @param p the predicate used to test elements.
+ * @return an option value containing the first element in the array
+ * that satisfies `p`, or `None` if none exists.
+ */
+ def find(@deprecatedName("f", "2.13.3") p: A => Boolean): Option[A] = {
+ val idx = indexWhere(p)
+ if(idx == -1) None else Some(xs(idx))
+ }
+
+ /** Tests whether a predicate holds for at least one element of this array.
+ *
+ * @param p the predicate used to test elements.
+ * @return `true` if the given predicate `p` is satisfied by at least one element of this array, otherwise `false`
+ */
+ def exists(@deprecatedName("f", "2.13.3") p: A => Boolean): Boolean = indexWhere(p) >= 0
+
+ /** Tests whether a predicate holds for all elements of this array.
+ *
+ * @param p the predicate used to test elements.
+ * @return `true` if this array is empty or the given predicate `p`
+ * holds for all elements of this array, otherwise `false`.
+ */
+ def forall(@deprecatedName("f", "2.13.3") p: A => Boolean): Boolean = {
+ var i = 0
+ while(i < xs.length) {
+ if(!p(xs(i))) return false
+ i += 1
+ }
+ true
+ }
+
+ /** Applies a binary operator to a start value and all elements of this array,
+ * going left to right.
+ *
+ * @param z the start value.
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return the result of inserting `op` between consecutive elements of this array,
+ * going left to right with the start value `z` on the left:
+ * {{{
+ * op(...op(z, x_1), x_2, ..., x_n)
+ * }}}
+ * where `x,,1,,, ..., x,,n,,` are the elements of this array.
+ * Returns `z` if this array is empty.
+ */
+ def foldLeft[B](z: B)(op: (B, A) => B): B = {
+ def f[@specialized(Specializable.Everything) sealed T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = {
+ val length = xs.length
+ var v: Any = z
+ var i = 0
+ while(i < length) {
+ v = op(v, xs(i))
+ i += 1
+ }
+ v
+ }
+ ((xs: Any @unchecked) match {
+ case null => throw new NullPointerException // null-check first helps static analysis of instanceOf
+ case xs: Array[AnyRef] => f(xs, op.asInstanceOf[(Any, Any) => Any], z)
+ case xs: Array[Int] => f(xs, op.asInstanceOf[(Any, Any) => Any], z)
+ case xs: Array[Double] => f(xs, op.asInstanceOf[(Any, Any) => Any], z)
+ case xs: Array[Long] => f(xs, op.asInstanceOf[(Any, Any) => Any], z)
+ case xs: Array[Float] => f(xs, op.asInstanceOf[(Any, Any) => Any], z)
+ case xs: Array[Char] => f(xs, op.asInstanceOf[(Any, Any) => Any], z)
+ case xs: Array[Byte] => f(xs, op.asInstanceOf[(Any, Any) => Any], z)
+ case xs: Array[Short] => f(xs, op.asInstanceOf[(Any, Any) => Any], z)
+ case xs: Array[Boolean] => f(xs, op.asInstanceOf[(Any, Any) => Any], z)
+ case xs: Array[Unit] => f(xs, op.asInstanceOf[(Any, Any) => Any], z)
+ }).asInstanceOf[B]
+ }
+
+ /** Produces an array containing cumulative results of applying the binary
+ * operator going left to right.
+ *
+ * @param z the start value.
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return array with intermediate values.
+ *
+ * Example:
+ * {{{
+ * Array(1, 2, 3, 4).scanLeft(0)(_ + _) == Array(0, 1, 3, 6, 10)
+ * }}}
+ *
+ */
+ def scanLeft[sealed B : ClassTag](z: B)(op: (B, A) => B): Array[B] = {
+ var v = z
+ var i = 0
+ val res = new Array[B](xs.length + 1)
+ while(i < xs.length) {
+ res(i) = v
+ v = op(v, xs(i))
+ i += 1
+ }
+ res(i) = v
+ res
+ }
+
+ /** Computes a prefix scan of the elements of the array.
+ *
+ * Note: The neutral element `z` may be applied more than once.
+ *
+ * @tparam B element type of the resulting array
+ * @param z neutral element for the operator `op`
+ * @param op the associative operator for the scan
+ *
+ * @return a new array containing the prefix scan of the elements in this array
+ */
+ def scan[sealed B >: A : ClassTag](z: B)(op: (B, B) => B): Array[B] = scanLeft(z)(op)
+
+ /** Produces an array containing cumulative results of applying the binary
+ * operator going right to left.
+ *
+ * @param z the start value.
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return array with intermediate values.
+ *
+ * Example:
+ * {{{
+ * Array(4, 3, 2, 1).scanRight(0)(_ + _) == Array(10, 6, 3, 1, 0)
+ * }}}
+ *
+ */
+ def scanRight[sealed B : ClassTag](z: B)(op: (A, B) => B): Array[B] = {
+ var v = z
+ var i = xs.length - 1
+ val res = new Array[B](xs.length + 1)
+ res(xs.length) = z
+ while(i >= 0) {
+ v = op(xs(i), v)
+ res(i) = v
+ i -= 1
+ }
+ res
+ }
+
+ /** Applies a binary operator to all elements of this array and a start value,
+ * going right to left.
+ *
+ * @param z the start value.
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return the result of inserting `op` between consecutive elements of this array,
+ * going right to left with the start value `z` on the right:
+ * {{{
+ * op(x_1, op(x_2, ... op(x_n, z)...))
+ * }}}
+ * where `x,,1,,, ..., x,,n,,` are the elements of this array.
+ * Returns `z` if this array is empty.
+ */
+ def foldRight[B](z: B)(op: (A, B) => B): B = {
+ def f[@specialized(Specializable.Everything) sealed T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = {
+ var v = z
+ var i = xs.length - 1
+ while(i >= 0) {
+ v = op(xs(i), v)
+ i -= 1
+ }
+ v
+ }
+ ((xs: Any @unchecked) match {
+ case null => throw new NullPointerException
+ case xs: Array[AnyRef] => f(xs, op.asInstanceOf[(Any, Any) => Any], z)
+ case xs: Array[Int] => f(xs, op.asInstanceOf[(Any, Any) => Any], z)
+ case xs: Array[Double] => f(xs, op.asInstanceOf[(Any, Any) => Any], z)
+ case xs: Array[Long] => f(xs, op.asInstanceOf[(Any, Any) => Any], z)
+ case xs: Array[Float] => f(xs, op.asInstanceOf[(Any, Any) => Any], z)
+ case xs: Array[Char] => f(xs, op.asInstanceOf[(Any, Any) => Any], z)
+ case xs: Array[Byte] => f(xs, op.asInstanceOf[(Any, Any) => Any], z)
+ case xs: Array[Short] => f(xs, op.asInstanceOf[(Any, Any) => Any], z)
+ case xs: Array[Boolean] => f(xs, op.asInstanceOf[(Any, Any) => Any], z)
+ case xs: Array[Unit] => f(xs, op.asInstanceOf[(Any, Any) => Any], z)
+ }).asInstanceOf[B]
+
+ }
+
+ /** Folds the elements of this array using the specified associative binary operator.
+ *
+ * @tparam A1 a type parameter for the binary operator, a supertype of `A`.
+ * @param z a neutral element for the fold operation; may be added to the result
+ * an arbitrary number of times, and must not change the result (e.g., `Nil` for list concatenation,
+ * 0 for addition, or 1 for multiplication).
+ * @param op a binary operator that must be associative.
+ * @return the result of applying the fold operator `op` between all the elements, or `z` if this array is empty.
+ */
+ def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = foldLeft(z)(op)
+
+ /** Builds a new array by applying a function to all elements of this array.
+ *
+ * @param f the function to apply to each element.
+ * @tparam B the element type of the returned array.
+ * @return a new array resulting from applying the given function
+ * `f` to each element of this array and collecting the results.
+ */
+ def map[sealed B](f: A => B)(implicit ct: ClassTag[B]): Array[B] = {
+ val len = xs.length
+ val ys = new Array[B](len)
+ if(len > 0) {
+ var i = 0
+ (xs: Any @unchecked) match {
+ case xs: Array[AnyRef] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 }
+ case xs: Array[Int] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 }
+ case xs: Array[Double] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 }
+ case xs: Array[Long] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 }
+ case xs: Array[Float] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 }
+ case xs: Array[Char] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 }
+ case xs: Array[Byte] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 }
+ case xs: Array[Short] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 }
+ case xs: Array[Boolean] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 }
+ }
+ }
+ ys
+ }
+
+ def mapInPlace(f: A => A): Array[A] = {
+ var i = 0
+ while (i < xs.length) {
+ xs.update(i, f(xs(i)))
+ i = i + 1
+ }
+ xs
+ }
+
+ /** Builds a new array by applying a function to all elements of this array
+ * and using the elements of the resulting collections.
+ *
+ * @param f the function to apply to each element.
+ * @tparam B the element type of the returned array.
+ * @return a new array resulting from applying the given collection-valued function
+ * `f` to each element of this array and concatenating the results.
+ */
+ def flatMap[sealed B : ClassTag](f: A => IterableOnce[B]): Array[B] = {
+ val b = ArrayBuilder.make[B]
+ var i = 0
+ while(i < xs.length) {
+ b ++= f(xs(i))
+ i += 1
+ }
+ b.result()
+ }
+
+ def flatMap[BS, sealed B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] =
+ flatMap[B](x => asIterable(f(x)))
+
+ /** Flattens a two-dimensional array by concatenating all its rows
+ * into a single array.
+ *
+ * @tparam B Type of row elements.
+ * @param asIterable A function that converts elements of this array to rows - Iterables of type `B`.
+ * @return An array obtained by concatenating rows of this array.
+ */
+ def flatten[sealed B](implicit asIterable: A => IterableOnce[B], m: ClassTag[B]): Array[B] = {
+ val b = ArrayBuilder.make[B]
+ val len = xs.length
+ var size = 0
+ var i = 0
+ while(i < len) {
+ xs(i) match {
+ case it: IterableOnce[_] =>
+ val k = it.knownSize
+ if(k > 0) size += k
+ case a: Array[_] => size += a.length
+ case _ =>
+ }
+ i += 1
+ }
+ if(size > 0) b.sizeHint(size)
+ i = 0
+ while(i < len) {
+ b ++= asIterable(xs(i))
+ i += 1
+ }
+ b.result()
+ }
+
+ /** Builds a new array by applying a partial function to all elements of this array
+ * on which the function is defined.
+ *
+ * @param pf the partial function which filters and maps the array.
+ * @tparam B the element type of the returned array.
+ * @return a new array resulting from applying the given partial function
+ * `pf` to each element on which it is defined and collecting the results.
+ * The order of the elements is preserved.
+ */
+ def collect[sealed B: ClassTag](pf: PartialFunction[A, B]): Array[B] = {
+ val fallback: Any => Any = ArrayOps.fallback
+ val b = ArrayBuilder.make[B]
+ var i = 0
+ while (i < xs.length) {
+ val v = pf.applyOrElse(xs(i), fallback)
+ if (v.asInstanceOf[AnyRef] ne fallback) b.addOne(v.asInstanceOf[B])
+ i += 1
+ }
+ b.result()
+ }
+
+ /** Finds the first element of the array for which the given partial function is defined, and applies the
+ * partial function to it. */
+ def collectFirst[B](@deprecatedName("f","2.13.9") pf: PartialFunction[A, B]): Option[B] = {
+ val fallback: Any => Any = ArrayOps.fallback
+ var i = 0
+ while (i < xs.length) {
+ val v = pf.applyOrElse(xs(i), fallback)
+ if (v.asInstanceOf[AnyRef] ne fallback) return Some(v.asInstanceOf[B])
+ i += 1
+ }
+ None
+ }
+
+ /** Returns an array formed from this array and another iterable collection
+ * by combining corresponding elements in pairs.
+ * If one of the two collections is longer than the other, its remaining elements are ignored.
+ *
+ * @param that The iterable providing the second half of each result pair
+ * @tparam B the type of the second half of the returned pairs
+ * @return a new array containing pairs consisting of corresponding elements of this array and `that`.
+ * The length of the returned array is the minimum of the lengths of this array and `that`.
+ */
+ def zip[sealed B](that: IterableOnce[B]): Array[(A, B)] = {
+ val b = new ArrayBuilder.ofRef[(A, B)]()
+ val k = that.knownSize
+ b.sizeHint(if(k >= 0) min(k, xs.length) else xs.length)
+ var i = 0
+ val it = that.iterator
+ while(i < xs.length && it.hasNext) {
+ b += ((xs(i), it.next()))
+ i += 1
+ }
+ b.result()
+ }
+
+ /** Analogous to `zip` except that the elements in each collection are not consumed until a strict operation is
+ * invoked on the returned `LazyZip2` decorator.
+ *
+ * Calls to `lazyZip` can be chained to support higher arities (up to 4) without incurring the expense of
+ * constructing and deconstructing intermediary tuples.
+ *
+ * {{{
+ * val xs = List(1, 2, 3)
+ * val res = (xs lazyZip xs lazyZip xs lazyZip xs).map((a, b, c, d) => a + b + c + d)
+ * // res == List(4, 8, 12)
+ * }}}
+ *
+ * @param that the iterable providing the second element of each eventual pair
+ * @tparam B the type of the second element in each eventual pair
+ * @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs
+ * or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported.
+ */
+ def lazyZip[B](that: Iterable[B]): LazyZip2[A, B, Array[A]] = new LazyZip2(xs, immutable.ArraySeq.unsafeWrapArray(xs), that)
+
+ /** Returns an array formed from this array and another iterable collection
+ * by combining corresponding elements in pairs.
+ * If one of the two collections is shorter than the other,
+ * placeholder elements are used to extend the shorter collection to the length of the longer.
+ *
+ * @param that the iterable providing the second half of each result pair
+ * @param thisElem the element to be used to fill up the result if this array is shorter than `that`.
+ * @param thatElem the element to be used to fill up the result if `that` is shorter than this array.
+ * @return a new array containing pairs consisting of corresponding elements of this array and `that`.
+ * The length of the returned array is the maximum of the lengths of this array and `that`.
+ * If this array is shorter than `that`, `thisElem` values are used to pad the result.
+ * If `that` is shorter than this array, `thatElem` values are used to pad the result.
+ */
+ def zipAll[sealed A1 >: A, sealed B](that: Iterable[B], thisElem: A1, thatElem: B): Array[(A1, B)] = {
+ val b = new ArrayBuilder.ofRef[(A1, B)]()
+ val k = that.knownSize
+ b.sizeHint(max(k, xs.length))
+ var i = 0
+ val it = that.iterator
+ while(i < xs.length && it.hasNext) {
+ b += ((xs(i), it.next()))
+ i += 1
+ }
+ while(it.hasNext) {
+ b += ((thisElem, it.next()))
+ i += 1
+ }
+ while(i < xs.length) {
+ b += ((xs(i), thatElem))
+ i += 1
+ }
+ b.result()
+ }
+
+ /** Zips this array with its indices.
+ *
+ * @return A new array containing pairs consisting of all elements of this array paired with their index.
+ * Indices start at `0`.
+ */
+ def zipWithIndex: Array[(A, Int)] = {
+ val b = new Array[(A, Int)](xs.length)
+ var i = 0
+ while(i < xs.length) {
+ b(i) = ((xs(i), i))
+ i += 1
+ }
+ b
+ }
+
+ /** A copy of this array with an element appended. */
+ def appended[sealed B >: A : ClassTag](x: B): Array[B] = {
+ val dest = Array.copyAs[B](xs, xs.length+1)
+ dest(xs.length) = x
+ dest
+ }
+
+ @`inline` final def :+ [sealed B >: A : ClassTag](x: B): Array[B] = appended(x)
+
+ /** A copy of this array with an element prepended. */
+ def prepended[sealed B >: A : ClassTag](x: B): Array[B] = {
+ val dest = new Array[B](xs.length + 1)
+ dest(0) = x
+ Array.copy(xs, 0, dest, 1, xs.length)
+ dest
+ }
+
+ @`inline` final def +: [sealed B >: A : ClassTag](x: B): Array[B] = prepended(x)
+
+ /** A copy of this array with all elements of a collection prepended. */
+ def prependedAll[sealed B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = {
+ val b = ArrayBuilder.make[B]
+ val k = prefix.knownSize
+ if(k >= 0) b.sizeHint(k + xs.length)
+ b.addAll(prefix)
+ if(k < 0) b.sizeHint(b.length + xs.length)
+ b.addAll(xs)
+ b.result()
+ }
+
+ /** A copy of this array with all elements of an array prepended. */
+ def prependedAll[sealed B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = {
+ val dest = Array.copyAs[B](prefix, prefix.length+xs.length)
+ Array.copy(xs, 0, dest, prefix.length, xs.length)
+ dest
+ }
+
+ @`inline` final def ++: [sealed B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = prependedAll(prefix)
+
+ @`inline` final def ++: [sealed B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = prependedAll(prefix)
+
+ /** A copy of this array with all elements of a collection appended. */
+ def appendedAll[sealed B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = {
+ val b = ArrayBuilder.make[B]
+ val k = suffix.knownSize
+ if(k >= 0) b.sizeHint(k + xs.length)
+ b.addAll(xs)
+ b.addAll(suffix)
+ b.result()
+ }
+
+ /** A copy of this array with all elements of an array appended. */
+ def appendedAll[sealed B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = {
+ val dest = Array.copyAs[B](xs, xs.length+suffix.length)
+ Array.copy(suffix, 0, dest, xs.length, suffix.length)
+ dest
+ }
+
+ @`inline` final def :++ [sealed B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix)
+
+ @`inline` final def :++ [sealed B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix)
+
+ @`inline` final def concat[sealed B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix)
+
+ @`inline` final def concat[sealed B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix)
+
+ @`inline` final def ++[sealed B >: A : ClassTag](xs: IterableOnce[B]): Array[B] = appendedAll(xs)
+
+ @`inline` final def ++[sealed B >: A : ClassTag](xs: Array[_ <: B]): Array[B] = appendedAll(xs)
+
+ /** Tests whether this array contains a given value as an element.
+ *
+ * @param elem the element to test.
+ * @return `true` if this array has an element that is equal (as
+ * determined by `==`) to `elem`, `false` otherwise.
+ */
+ def contains(elem: A): Boolean = exists (_ == elem)
+
+ /** Returns a copy of this array with patched values.
+ * Patching at negative indices is the same as patching starting at 0.
+ * Patching at indices at or larger than the length of the original array appends the patch to the end.
+ * If more values are replaced than actually exist, the excess is ignored.
+ *
+ * @param from The start index from which to patch
+ * @param other The patch values
+ * @param replaced The number of values in the original array that are replaced by the patch.
+ */
+ def patch[sealed B >: A : ClassTag](from: Int, other: IterableOnce[B], replaced: Int): Array[B] = {
+ val b = ArrayBuilder.make[B]
+ val k = other.knownSize
+ val r = if(replaced < 0) 0 else replaced
+ if(k >= 0) b.sizeHint(xs.length + k - r)
+ val chunk1 = if(from > 0) min(from, xs.length) else 0
+ if(chunk1 > 0) b.addAll(xs, 0, chunk1)
+ b ++= other
+ val remaining = xs.length - chunk1 - r
+ if(remaining > 0) b.addAll(xs, xs.length - remaining, remaining)
+ b.result()
+ }
+
+ /** Converts an array of pairs into an array of first elements and an array of second elements.
+ *
+ * @tparam A1 the type of the first half of the element pairs
+ * @tparam A2 the type of the second half of the element pairs
+ * @param asPair an implicit conversion which asserts that the element type
+ * of this Array is a pair.
+ * @param ct1 a class tag for `A1` type parameter that is required to create an instance
+ * of `Array[A1]`
+ * @param ct2 a class tag for `A2` type parameter that is required to create an instance
+ * of `Array[A2]`
+ * @return a pair of Arrays, containing, respectively, the first and second half
+ * of each element pair of this Array.
+ */
+ def unzip[sealed A1, sealed A2](implicit asPair: A => (A1, A2), ct1: ClassTag[A1], ct2: ClassTag[A2]): (Array[A1], Array[A2]) = {
+ val a1 = new Array[A1](xs.length)
+ val a2 = new Array[A2](xs.length)
+ var i = 0
+ while (i < xs.length) {
+ val e = asPair(xs(i))
+ a1(i) = e._1
+ a2(i) = e._2
+ i += 1
+ }
+ (a1, a2)
+ }
+
+ /** Converts an array of triples into three arrays, one containing the elements from each position of the triple.
+ *
+ * @tparam A1 the type of the first of three elements in the triple
+ * @tparam A2 the type of the second of three elements in the triple
+ * @tparam A3 the type of the third of three elements in the triple
+ * @param asTriple an implicit conversion which asserts that the element type
+ * of this Array is a triple.
+ * @param ct1 a class tag for T1 type parameter that is required to create an instance
+ * of Array[T1]
+ * @param ct2 a class tag for T2 type parameter that is required to create an instance
+ * of Array[T2]
+ * @param ct3 a class tag for T3 type parameter that is required to create an instance
+ * of Array[T3]
+ * @return a triple of Arrays, containing, respectively, the first, second, and third
+ * elements from each element triple of this Array.
+ */
+ def unzip3[sealed A1, sealed A2, sealed A3](implicit asTriple: A => (A1, A2, A3), ct1: ClassTag[A1], ct2: ClassTag[A2],
+ ct3: ClassTag[A3]): (Array[A1], Array[A2], Array[A3]) = {
+ val a1 = new Array[A1](xs.length)
+ val a2 = new Array[A2](xs.length)
+ val a3 = new Array[A3](xs.length)
+ var i = 0
+ while (i < xs.length) {
+ val e = asTriple(xs(i))
+ a1(i) = e._1
+ a2(i) = e._2
+ a3(i) = e._3
+ i += 1
+ }
+ (a1, a2, a3)
+ }
+
+ /** Transposes a two dimensional array.
+ *
+ * @tparam B Type of row elements.
+ * @param asArray A function that converts elements of this array to rows - arrays of type `B`.
+ * @return An array obtained by replacing elements of this arrays with rows the represent.
+ */
+ def transpose[sealed B](implicit asArray: A => Array[B]): Array[Array[B]] = {
+ val aClass = xs.getClass.getComponentType
+ val bb = new ArrayBuilder.ofRef[Array[B]]()(ClassTag[Array[B]](aClass))
+ if (xs.length == 0) bb.result()
+ else {
+ def mkRowBuilder() = ArrayBuilder.make[B](ClassTag[B](aClass.getComponentType))
+ val bs = new ArrayOps(asArray(xs(0))).map((x: B) => mkRowBuilder())
+ for (xs <- this) {
+ var i = 0
+ for (x <- new ArrayOps(asArray(xs))) {
+ bs(i) += x
+ i += 1
+ }
+ }
+ for (b <- new ArrayOps(bs)) bb += b.result()
+ bb.result()
+ }
+ }
+
+ /** Apply `f` to each element for its side effects.
+ * Note: [U] parameter needed to help scalac's type inference.
+ */
+ def foreach[U](f: A => U): Unit = {
+ val len = xs.length
+ var i = 0
+ (xs: Any @unchecked) match {
+ case xs: Array[AnyRef] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 }
+ case xs: Array[Int] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 }
+ case xs: Array[Double] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 }
+ case xs: Array[Long] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 }
+ case xs: Array[Float] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 }
+ case xs: Array[Char] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 }
+ case xs: Array[Byte] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 }
+ case xs: Array[Short] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 }
+ case xs: Array[Boolean] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 }
+ }
+ }
+
+ /** Selects all the elements of this array ignoring the duplicates.
+ *
+ * @return a new array consisting of all the elements of this array without duplicates.
+ */
+ def distinct: Array[A] = distinctBy(identity)
+
+ /** Selects all the elements of this array ignoring the duplicates as determined by `==` after applying
+ * the transforming function `f`.
+ *
+ * @param f The transforming function whose result is used to determine the uniqueness of each element
+ * @tparam B the type of the elements after being transformed by `f`
+ * @return a new array consisting of all the elements of this array without duplicates.
+ */
+ def distinctBy[B](f: A -> B): Array[A] =
+ ArrayBuilder.make[A].addAll(iterator.distinctBy(f)).result()
+
+ /** A copy of this array with an element value appended until a given target length is reached.
+ *
+ * @param len the target length
+ * @param elem the padding value
+ * @tparam B the element type of the returned array.
+ * @return a new array consisting of
+ * all elements of this array followed by the minimal number of occurrences of `elem` so
+ * that the resulting collection has a length of at least `len`.
+ */
+ def padTo[sealed B >: A : ClassTag](len: Int, elem: B): Array[B] = {
+ var i = xs.length
+ val newlen = max(i, len)
+ val dest = Array.copyAs[B](xs, newlen)
+ while(i < newlen) {
+ dest(i) = elem
+ i += 1
+ }
+ dest
+ }
+
+ /** Produces the range of all indices of this sequence.
+ *
+ * @return a `Range` value from `0` to one less than the length of this array.
+ */
+ def indices: Range = Range(0, xs.length)
+
+ /** Partitions this array into a map of arrays according to some discriminator function.
+ *
+ * @param f the discriminator function.
+ * @tparam K the type of keys returned by the discriminator function.
+ * @return A map from keys to arrays such that the following invariant holds:
+ * {{{
+ * (xs groupBy f)(k) = xs filter (x => f(x) == k)
+ * }}}
+ * That is, every key `k` is bound to an array of those elements `x`
+ * for which `f(x)` equals `k`.
+ */
+ def groupBy[K](f: A => K): immutable.Map[K, Array[A]] = {
+ val m = mutable.Map.empty[K, ArrayBuilder[A]]
+ val len = xs.length
+ var i = 0
+ while(i < len) {
+ val elem = xs(i)
+ val key = f(elem)
+ val bldr = m.getOrElseUpdate(key, ArrayBuilder.make[A])
+ bldr += elem
+ i += 1
+ }
+ m.view.mapValues(_.result()).toMap
+ }
+
+ /**
+ * Partitions this array into a map of arrays according to a discriminator function `key`.
+ * Each element in a group is transformed into a value of type `B` using the `value` function.
+ *
+ * It is equivalent to `groupBy(key).mapValues(_.map(f))`, but more efficient.
+ *
+ * {{{
+ * case class User(name: String, age: Int)
+ *
+ * def namesByAge(users: Array[User]): Map[Int, Array[String]] =
+ * users.groupMap(_.age)(_.name)
+ * }}}
+ *
+ * @param key the discriminator function
+ * @param f the element transformation function
+ * @tparam K the type of keys returned by the discriminator function
+ * @tparam B the type of values returned by the transformation function
+ */
+ def groupMap[K, sealed B : ClassTag](key: A => K)(f: A => B): immutable.Map[K, Array[B]] = {
+ val m = mutable.Map.empty[K, ArrayBuilder[B]]
+ val len = xs.length
+ var i = 0
+ while(i < len) {
+ val elem = xs(i)
+ val k = key(elem)
+ val bldr = m.getOrElseUpdate(k, ArrayBuilder.make[B])
+ bldr += f(elem)
+ i += 1
+ }
+ m.view.mapValues(_.result()).toMap
+ }
+
+ @`inline` final def toSeq: immutable.Seq[A] = toIndexedSeq
+
+ def toIndexedSeq: immutable.IndexedSeq[A] =
+ immutable.ArraySeq.unsafeWrapArray(Array.copyOf(xs, xs.length))
+
+ /** Copy elements of this array to another array.
+ * Fills the given array `xs` starting at index 0.
+ * Copying will stop once either all the elements of this array have been copied,
+ * or the end of the array is reached.
+ *
+ * @param xs the array to fill.
+ * @tparam B the type of the elements of the array.
+ */
+ def copyToArray[sealed B >: A](xs: Array[B]): Int = copyToArray(xs, 0)
+
+ /** Copy elements of this array to another array.
+ * Fills the given array `xs` starting at index `start`.
+ * Copying will stop once either all the elements of this array have been copied,
+ * or the end of the array is reached.
+ *
+ * @param xs the array to fill.
+ * @param start the starting index within the destination array.
+ * @tparam B the type of the elements of the array.
+ */
+ def copyToArray[sealed B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue)
+
+ /** Copy elements of this array to another array.
+ * Fills the given array `xs` starting at index `start` with at most `len` values.
+ * Copying will stop once either all the elements of this array have been copied,
+ * or the end of the array is reached, or `len` elements have been copied.
+ *
+ * @param xs the array to fill.
+ * @param start the starting index within the destination array.
+ * @param len the maximal number of elements to copy.
+ * @tparam B the type of the elements of the array.
+ */
+ def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = {
+ val copied = IterableOnce.elemsToCopyToArray(this.xs.length, xs.length, start, len)
+ if (copied > 0) {
+ Array.copy(this.xs, 0, xs, start, copied)
+ }
+ copied
+ }
+
+ /** Create a copy of this array with the specified element type. */
+ def toArray[sealed B >: A: ClassTag]: Array[B] = {
+ val destination = new Array[B](xs.length)
+ copyToArray(destination, 0)
+ destination
+ }
+
+ /** Counts the number of elements in this array which satisfy a predicate */
+ def count(p: A => Boolean): Int = {
+ var i, res = 0
+ val len = xs.length
+ while(i < len) {
+ if(p(xs(i))) res += 1
+ i += 1
+ }
+ res
+ }
+
+ // can't use a default arg because we already have another overload with a default arg
+ /** Tests whether this array starts with the given array. */
+ @`inline` def startsWith[sealed B >: A](that: Array[B]): Boolean = startsWith(that, 0)
+
+ /** Tests whether this array contains the given array at a given index.
+ *
+ * @param that the array to test
+ * @param offset the index where the array is searched.
+ * @return `true` if the array `that` is contained in this array at
+ * index `offset`, otherwise `false`.
+ */
+ def startsWith[sealed B >: A](that: Array[B], offset: Int): Boolean = {
+ val safeOffset = offset.max(0)
+ val thatl = that.length
+ if(thatl > xs.length-safeOffset) thatl == 0
+ else {
+ var i = 0
+ while(i < thatl) {
+ if(xs(i+safeOffset) != that(i)) return false
+ i += 1
+ }
+ true
+ }
+ }
+
+ /** Tests whether this array ends with the given array.
+ *
+ * @param that the array to test
+ * @return `true` if this array has `that` as a suffix, `false` otherwise.
+ */
+ def endsWith[sealed B >: A](that: Array[B]): Boolean = {
+ val thatl = that.length
+ val off = xs.length - thatl
+ if(off < 0) false
+ else {
+ var i = 0
+ while(i < thatl) {
+ if(xs(i+off) != that(i)) return false
+ i += 1
+ }
+ true
+ }
+ }
+
+ /** A copy of this array with one single replaced element.
+ * @param index the position of the replacement
+ * @param elem the replacing element
+ * @return a new array which is a copy of this array with the element at position `index` replaced by `elem`.
+ * @throws IndexOutOfBoundsException if `index` does not satisfy `0 <= index < length`.
+ */
+ def updated[sealed B >: A : ClassTag](index: Int, elem: B): Array[B] = {
+ if(index < 0 || index >= xs.length) throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${xs.length-1})")
+ val dest = toArray[B]
+ dest(index) = elem
+ dest
+ }
+
+ @`inline` def view: IndexedSeqView[A] = new ArrayOps.ArrayView[A](xs)
+
+
+ /* ************************************************************************************************************
+ The remaining methods are provided for completeness but they delegate to mutable.ArraySeq implementations which
+ may not provide the best possible performance. We need them in `ArrayOps` because their return type
+ mentions `C` (which is `Array[A]` in `StringOps` and `mutable.ArraySeq[A]` in `mutable.ArraySeq`).
+ ************************************************************************************************************ */
+
+
+ /** Computes the multiset difference between this array and another sequence.
+ *
+ * @param that the sequence of elements to remove
+ * @return a new array which contains all elements of this array
+ * except some of occurrences of elements that also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
+ * part of the result, but any following occurrences will.
+ */
+ def diff[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).diff(that).toArray[A]
+
+ /** Computes the multiset intersection between this array and another sequence.
+ *
+ * @param that the sequence of elements to intersect with.
+ * @return a new array which contains all elements of this array
+ * which also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
+ * in the result, but any following occurrences will be omitted.
+ */
+ def intersect[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).intersect(that).toArray[A]
+
+ /** Groups elements in fixed size blocks by passing a "sliding window"
+ * over them (as opposed to partitioning them, as is done in grouped.)
+ * @see [[scala.collection.Iterator]], method `sliding`
+ *
+ * @param size the number of elements per group
+ * @param step the distance between the first elements of successive groups
+ * @return An iterator producing arrays of size `size`, except the
+ * last element (which may be the only element) will be truncated
+ * if there are fewer than `size` elements remaining to be grouped.
+ */
+ def sliding(size: Int, step: Int = 1): Iterator[Array[A]] = mutable.ArraySeq.make(xs).sliding(size, step).map(_.toArray[A])
+
+ /** Iterates over combinations of elements.
+ *
+ * A '''combination''' of length `n` is a sequence of `n` elements selected in order of their first index in this sequence.
+ *
+ * For example, `"xyx"` has two combinations of length 2. The `x` is selected first: `"xx"`, `"xy"`.
+ * The sequence `"yx"` is not returned as a combination because it is subsumed by `"xy"`.
+ *
+ * If there is more than one way to generate the same combination, only one will be returned.
+ *
+ * For example, the result `"xy"` arbitrarily selected one of the `x` elements.
+ *
+ * As a further illustration, `"xyxx"` has three different ways to generate `"xy"` because there are three elements `x`
+ * to choose from. Moreover, there are three unordered pairs `"xx"` but only one is returned.
+ *
+ * It is not specified which of these equal combinations is returned. It is an implementation detail
+ * that should not be relied on. For example, the combination `"xx"` does not necessarily contain
+ * the first `x` in this sequence. This behavior is observable if the elements compare equal
+ * but are not identical.
+ *
+ * As a consequence, `"xyx".combinations(3).next()` is `"xxy"`: the combination does not reflect the order
+ * of the original sequence, but the order in which elements were selected, by "first index";
+ * the order of each `x` element is also arbitrary.
+ *
+ * @return An Iterator which traverses the n-element combinations of this array
+ * @example {{{
+ * Array('a', 'b', 'b', 'b', 'c').combinations(2).map(runtime.ScalaRunTime.stringOf).foreach(println)
+ * // Array(a, b)
+ * // Array(a, c)
+ * // Array(b, b)
+ * // Array(b, c)
+ * Array('b', 'a', 'b').combinations(2).map(runtime.ScalaRunTime.stringOf).foreach(println)
+ * // Array(b, b)
+ * // Array(b, a)
+ * }}}
+ */
+ def combinations(n: Int): Iterator[Array[A]] = mutable.ArraySeq.make(xs).combinations(n).map(_.toArray[A])
+
+ /** Iterates over distinct permutations of elements.
+ *
+ * @return An Iterator which traverses the distinct permutations of this array.
+ * @example {{{
+ * Array('a', 'b', 'b').permutations.map(runtime.ScalaRunTime.stringOf).foreach(println)
+ * // Array(a, b, b)
+ * // Array(b, a, b)
+ * // Array(b, b, a)
+ * }}}
+ */
+ def permutations: Iterator[Array[A]] = mutable.ArraySeq.make(xs).permutations.map(_.toArray[A])
+
+ // we have another overload here, so we need to duplicate this method
+ /** Tests whether this array contains the given sequence at a given index.
+ *
+ * @param that the sequence to test
+ * @param offset the index where the sequence is searched.
+ * @return `true` if the sequence `that` is contained in this array at
+ * index `offset`, otherwise `false`.
+ */
+ def startsWith[B >: A](that: IterableOnce[B], offset: Int = 0): Boolean = mutable.ArraySeq.make(xs).startsWith(that, offset)
+
+ // we have another overload here, so we need to duplicate this method
+ /** Tests whether this array ends with the given sequence.
+ *
+ * @param that the sequence to test
+ * @return `true` if this array has `that` as a suffix, `false` otherwise.
+ */
+ def endsWith[B >: A](that: Iterable[B]): Boolean = mutable.ArraySeq.make(xs).endsWith(that)
+}
diff --git a/tests/pos-special/stdlib/collection/BitSet.scala b/tests/pos-special/stdlib/collection/BitSet.scala
new file mode 100644
index 000000000000..39c15dbe808f
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/BitSet.scala
@@ -0,0 +1,348 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+
+import java.io.{ObjectInputStream, ObjectOutputStream}
+
+import scala.annotation.nowarn
+import scala.collection.Stepper.EfficientSplit
+import scala.collection.mutable.Builder
+import language.experimental.captureChecking
+
+/** Base type of bitsets.
+ *
+ * This trait provides most of the operations of a `BitSet` independently of its representation.
+ * It is inherited by all concrete implementations of bitsets.
+ *
+ * @define bitsetinfo
+ * Bitsets are sets of non-negative integers which are represented as
+ * variable-size arrays of bits packed into 64-bit words. The lower bound of memory footprint of a bitset is
+ * determined by the largest number stored in it.
+ * @define coll bitset
+ * @define Coll `BitSet`
+ */
+trait BitSet extends SortedSet[Int] with BitSetOps[BitSet] {
+ override protected def fromSpecific(coll: IterableOnce[Int]^): BitSet = bitSetFactory.fromSpecific(coll)
+ override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder
+ override def empty: BitSet = bitSetFactory.empty
+ @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""")
+ override protected[this] def stringPrefix = "BitSet"
+ override def unsorted: Set[Int] = this
+}
+
+@SerialVersionUID(3L)
+object BitSet extends SpecificIterableFactory[Int, BitSet] {
+ private[collection] final val ordMsg = "No implicit Ordering[${B}] found to build a SortedSet[${B}]. You may want to upcast to a Set[Int] first by calling `unsorted`."
+ private[collection] final val zipOrdMsg = "No implicit Ordering[${B}] found to build a SortedSet[(Int, ${B})]. You may want to upcast to a Set[Int] first by calling `unsorted`."
+
+ def empty: BitSet = immutable.BitSet.empty
+ def newBuilder: Builder[Int, BitSet] = immutable.BitSet.newBuilder
+ def fromSpecific(it: IterableOnce[Int]^): BitSet = immutable.BitSet.fromSpecific(it)
+
+ @SerialVersionUID(3L)
+ private[collection] abstract class SerializationProxy(@transient protected val coll: BitSet) extends Serializable {
+
+ @transient protected var elems: Array[Long] = _
+
+ private[this] def writeObject(out: ObjectOutputStream): Unit = {
+ out.defaultWriteObject()
+ val nwords = coll.nwords
+ out.writeInt(nwords)
+ var i = 0
+ while(i < nwords) {
+ out.writeLong(coll.word(i))
+ i += 1
+ }
+ }
+
+ private[this] def readObject(in: ObjectInputStream): Unit = {
+ in.defaultReadObject()
+ val nwords = in.readInt()
+ elems = new Array[Long](nwords)
+ var i = 0
+ while(i < nwords) {
+ elems(i) = in.readLong()
+ i += 1
+ }
+ }
+
+ protected[this] def readResolve(): Any
+ }
+}
+
+/** Base implementation type of bitsets */
+trait BitSetOps[+C <: BitSet with BitSetOps[C]]
+ extends SortedSetOps[Int, SortedSet, C] { self =>
+ import BitSetOps._
+
+ def bitSetFactory: SpecificIterableFactory[Int, C]
+
+ def unsorted: Set[Int]
+
+ final def ordering: Ordering[Int] = Ordering.Int
+
+ /** The number of words (each with 64 bits) making up the set */
+ protected[collection] def nwords: Int
+
+ /** The words at index `idx`, or 0L if outside the range of the set
+ * '''Note:''' requires `idx >= 0`
+ */
+ protected[collection] def word(idx: Int): Long
+
+ /** Creates a new set of this kind from an array of longs
+ */
+ protected[collection] def fromBitMaskNoCopy(elems: Array[Long]): C
+
+ def contains(elem: Int): Boolean =
+ 0 <= elem && (word(elem >> LogWL) & (1L << elem)) != 0L
+
+ def iterator: Iterator[Int] = iteratorFrom(0)
+
+ def iteratorFrom(start: Int): Iterator[Int] = new AbstractIterator[Int] {
+ private[this] var currentPos = if (start > 0) start >> LogWL else 0
+ private[this] var currentWord = if (start > 0) word(currentPos) & (-1L << (start & (WordLength - 1))) else word(0)
+ final override def hasNext: Boolean = {
+ while (currentWord == 0) {
+ if (currentPos + 1 >= nwords) return false
+ currentPos += 1
+ currentWord = word(currentPos)
+ }
+ true
+ }
+ final override def next(): Int = {
+ if (hasNext) {
+ val bitPos = java.lang.Long.numberOfTrailingZeros(currentWord)
+ currentWord &= currentWord - 1
+ (currentPos << LogWL) + bitPos
+ } else Iterator.empty.next()
+ }
+ }
+
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = {
+ val st = scala.collection.convert.impl.BitSetStepper.from(this)
+ val r =
+ if (shape.shape == StepperShape.IntShape) st
+ else {
+ assert(shape.shape == StepperShape.ReferenceShape, s"unexpected StepperShape: $shape")
+ AnyStepper.ofParIntStepper(st)
+ }
+ r.asInstanceOf[S with EfficientSplit]
+ }
+
+ override def size: Int = {
+ var s = 0
+ var i = nwords
+ while (i > 0) {
+ i -= 1
+ s += java.lang.Long.bitCount(word(i))
+ }
+ s
+ }
+
+ override def isEmpty: Boolean = 0 until nwords forall (i => word(i) == 0)
+
+ @inline private[this] def smallestInt: Int = {
+ val thisnwords = nwords
+ var i = 0
+ while(i < thisnwords) {
+ val currentWord = word(i)
+ if (currentWord != 0L) {
+ return java.lang.Long.numberOfTrailingZeros(currentWord) + (i * WordLength)
+ }
+ i += 1
+ }
+ throw new UnsupportedOperationException("empty.smallestInt")
+ }
+
+ @inline private[this] def largestInt: Int = {
+ var i = nwords - 1
+ while(i >= 0) {
+ val currentWord = word(i)
+ if (currentWord != 0L) {
+ return ((i + 1) * WordLength) - java.lang.Long.numberOfLeadingZeros(currentWord) - 1
+ }
+ i -= 1
+ }
+ throw new UnsupportedOperationException("empty.largestInt")
+ }
+
+ override def max[B >: Int](implicit ord: Ordering[B]): Int =
+ if (Ordering.Int eq ord) largestInt
+ else if (Ordering.Int isReverseOf ord) smallestInt
+ else super.max(ord)
+
+
+ override def min[B >: Int](implicit ord: Ordering[B]): Int =
+ if (Ordering.Int eq ord) smallestInt
+ else if (Ordering.Int isReverseOf ord) largestInt
+ else super.min(ord)
+
+ override def foreach[U](f: Int => U): Unit = {
+ /* NOTE: while loops are significantly faster as of 2.11 and
+ one major use case of bitsets is performance. Also, there
+ is nothing to do when all bits are clear, so use that as
+ the inner loop condition. */
+ var i = 0
+ while (i < nwords) {
+ var w = word(i)
+ var j = i * WordLength
+ while (w != 0L) {
+ if ((w&1L) == 1L) f(j)
+ w = w >>> 1
+ j += 1
+ }
+ i += 1
+ }
+ }
+
+ /** Creates a bit mask for this set as a new array of longs
+ */
+ def toBitMask: Array[Long] = {
+ val a = new Array[Long](nwords)
+ var i = a.length
+ while(i > 0) {
+ i -= 1
+ a(i) = word(i)
+ }
+ a
+ }
+
+ def rangeImpl(from: Option[Int], until: Option[Int]): C = {
+ val a = coll.toBitMask
+ val len = a.length
+ if (from.isDefined) {
+ val f = from.get
+ val w = f >> LogWL
+ val b = f & (WordLength - 1)
+ if (w >= 0) {
+ java.util.Arrays.fill(a, 0, math.min(w, len), 0)
+ if (b > 0 && w < len) a(w) &= ~((1L << b) - 1)
+ }
+ }
+ if (until.isDefined) {
+ val u = until.get
+ val w = u >> LogWL
+ val b = u & (WordLength - 1)
+ if (w < len) {
+ java.util.Arrays.fill(a, math.max(w + 1, 0), len, 0)
+ if (w >= 0) a(w) &= (1L << b) - 1
+ }
+ }
+ coll.fromBitMaskNoCopy(a)
+ }
+
+ override def concat(other: collection.IterableOnce[Int]): C = other match {
+ case otherBitset: BitSet =>
+ val len = coll.nwords max otherBitset.nwords
+ val words = new Array[Long](len)
+ for (idx <- 0 until len)
+ words(idx) = this.word(idx) | otherBitset.word(idx)
+ fromBitMaskNoCopy(words)
+ case _ => super.concat(other)
+ }
+
+ override def intersect(other: Set[Int]): C = other match {
+ case otherBitset: BitSet =>
+ val len = coll.nwords min otherBitset.nwords
+ val words = new Array[Long](len)
+ for (idx <- 0 until len)
+ words(idx) = this.word(idx) & otherBitset.word(idx)
+ fromBitMaskNoCopy(words)
+ case _ => super.intersect(other)
+ }
+
+ abstract override def diff(other: Set[Int]): C = other match {
+ case otherBitset: BitSet =>
+ val len = coll.nwords
+ val words = new Array[Long](len)
+ for (idx <- 0 until len)
+ words(idx) = this.word(idx) & ~otherBitset.word(idx)
+ fromBitMaskNoCopy(words)
+ case _ => super.diff(other)
+ }
+
+ /** Computes the symmetric difference of this bitset and another bitset by performing
+ * a bitwise "exclusive-or".
+ *
+ * @param other the other bitset to take part in the symmetric difference.
+ * @return a bitset containing those bits of this
+ * bitset or the other bitset that are not contained in both bitsets.
+ */
+ def xor(other: BitSet): C = {
+ val len = coll.nwords max other.nwords
+ val words = new Array[Long](len)
+ for (idx <- 0 until len)
+ words(idx) = coll.word(idx) ^ other.word(idx)
+ coll.fromBitMaskNoCopy(words)
+ }
+
+ @`inline` final def ^ (other: BitSet): C = xor(other)
+
+ /**
+ * Builds a new bitset by applying a function to all elements of this bitset
+ * @param f the function to apply to each element.
+ * @return a new bitset resulting from applying the given function ''f'' to
+ * each element of this bitset and collecting the results
+ */
+ def map(f: Int => Int): C = fromSpecific(new View.Map(this, f))
+
+ def flatMap(f: Int => IterableOnce[Int]): C = fromSpecific(new View.FlatMap(this, f))
+
+ def collect(pf: PartialFunction[Int, Int]): C = fromSpecific(super[SortedSetOps].collect(pf))
+
+ override def partition(p: Int => Boolean): (C, C) = {
+ val left = filter(p)
+ (left, this &~ left)
+ }
+}
+
+object BitSetOps {
+
+ /* Final vals can sometimes be inlined as constants (faster) */
+ private[collection] final val LogWL = 6
+ private[collection] final val WordLength = 64
+ private[collection] final val MaxSize = (Int.MaxValue >> LogWL) + 1
+
+ private[collection] def updateArray(elems: Array[Long], idx: Int, w: Long): Array[Long] = {
+ var len = elems.length
+ while (len > 0 && (elems(len - 1) == 0L || w == 0L && idx == len - 1)) len -= 1
+ var newlen = len
+ if (idx >= newlen && w != 0L) newlen = idx + 1
+ val newelems = new Array[Long](newlen)
+ Array.copy(elems, 0, newelems, 0, len)
+ if (idx < newlen) newelems(idx) = w
+ else assert(w == 0L)
+ newelems
+ }
+
+ private[collection] def computeWordForFilter(pred: Int => Boolean, isFlipped: Boolean, oldWord: Long, wordIndex: Int): Long =
+ if (oldWord == 0L) 0L else {
+ var w = oldWord
+ val trailingZeroes = java.lang.Long.numberOfTrailingZeros(w)
+ var jmask = 1L << trailingZeroes
+ var j = wordIndex * BitSetOps.WordLength + trailingZeroes
+ val maxJ = (wordIndex + 1) * BitSetOps.WordLength - java.lang.Long.numberOfLeadingZeros(w)
+ while (j != maxJ) {
+ if ((w & jmask) != 0L) {
+ if (pred(j) == isFlipped) {
+ // j did not pass the filter here
+ w = w & ~jmask
+ }
+ }
+ jmask = jmask << 1
+ j += 1
+ }
+ w
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/BufferedIterator.scala b/tests/pos-special/stdlib/collection/BufferedIterator.scala
new file mode 100644
index 000000000000..cca40dd31d40
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/BufferedIterator.scala
@@ -0,0 +1,32 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+import language.experimental.captureChecking
+
+/** Buffered iterators are iterators which provide a method `head`
+ * that inspects the next element without discarding it.
+ */
+trait BufferedIterator[+A] extends Iterator[A] {
+
+ /** Returns next element of iterator without advancing beyond it.
+ */
+ def head: A
+
+ /** Returns an option of the next element of an iterator without advancing beyond it.
+ * @return the next element of this iterator if it has a next element
+ * `None` if it does not
+ */
+ def headOption : Option[A] = if (hasNext) Some(head) else None
+
+ override def buffered: this.type = this
+}
diff --git a/tests/pos-special/stdlib/collection/BuildFrom.scala b/tests/pos-special/stdlib/collection/BuildFrom.scala
new file mode 100644
index 000000000000..0a3cc199d4dc
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/BuildFrom.scala
@@ -0,0 +1,128 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+
+import scala.annotation.implicitNotFound
+import scala.collection.mutable.Builder
+import scala.collection.immutable.WrappedString
+import scala.reflect.ClassTag
+import language.experimental.captureChecking
+import caps.unsafe.unsafeAssumePure
+
+/** Builds a collection of type `C` from elements of type `A` when a source collection of type `From` is available.
+ * Implicit instances of `BuildFrom` are available for all collection types.
+ *
+ * @tparam From Type of source collection
+ * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.)
+ * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.)
+ */
+@implicitNotFound(msg = "Cannot construct a collection of type ${C} with elements of type ${A} based on a collection of type ${From}.")
+trait BuildFrom[-From, -A, +C] extends Any { self =>
+ def fromSpecific(from: From)(it: IterableOnce[A]^): C
+ // !!! this is wrong, we need two versions of fromSpecific; one mapping
+ // to C^{it} when C is an Iterable, and one mapping to C when C is a Seq, Map, or Set.
+ // But that requires a large scale refactoring of BuildFrom. The unsafeAssumePure
+ // calls in this file are needed to sweep that problem under the carpet.
+
+ /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer.
+ * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */
+ def newBuilder(from: From): Builder[A, C]
+
+ @deprecated("Use newBuilder() instead of apply()", "2.13.0")
+ @`inline` def apply(from: From): Builder[A, C] = newBuilder(from)
+
+ /** Partially apply a BuildFrom to a Factory */
+ def toFactory(from: From): Factory[A, C] = new Factory[A, C] {
+ def fromSpecific(it: IterableOnce[A]^): C = self.fromSpecific(from)(it)
+ def newBuilder: Builder[A, C] = self.newBuilder(from)
+ }
+}
+
+object BuildFrom extends BuildFromLowPriority1 {
+
+ /** Build the source collection type from a MapOps */
+ implicit def buildFromMapOps[CC[X, Y] <: Map[X, Y] with MapOps[X, Y, CC, _], K0, V0, K, V]: BuildFrom[CC[K0, V0] with Map[K0, V0], (K, V), CC[K, V] with Map[K, V]] = new BuildFrom[CC[K0, V0], (K, V), CC[K, V]] {
+ //TODO: Reuse a prototype instance
+ def newBuilder(from: CC[K0, V0]): Builder[(K, V), CC[K, V]] = (from: MapOps[K0, V0, CC, _]).mapFactory.newBuilder[K, V]
+ def fromSpecific(from: CC[K0, V0])(it: IterableOnce[(K, V)]^): CC[K, V] = (from: MapOps[K0, V0, CC, _]).mapFactory.from(it)
+ }
+
+ /** Build the source collection type from a SortedMapOps */
+ implicit def buildFromSortedMapOps[CC[X, Y] <: SortedMap[X, Y] with SortedMapOps[X, Y, CC, _], K0, V0, K : Ordering, V]: BuildFrom[CC[K0, V0] with SortedMap[K0, V0], (K, V), CC[K, V] with SortedMap[K, V]] = new BuildFrom[CC[K0, V0], (K, V), CC[K, V]] {
+ def newBuilder(from: CC[K0, V0]): Builder[(K, V), CC[K, V]] = (from: SortedMapOps[K0, V0, CC, _]).sortedMapFactory.newBuilder[K, V]
+ def fromSpecific(from: CC[K0, V0])(it: IterableOnce[(K, V)]^): CC[K, V] = (from: SortedMapOps[K0, V0, CC, _]).sortedMapFactory.from(it)
+ }
+
+ implicit def buildFromBitSet[C <: BitSet with BitSetOps[C]]: BuildFrom[C, Int, C] =
+ new BuildFrom[C, Int, C] {
+ def fromSpecific(from: C)(it: IterableOnce[Int]^): C = from.bitSetFactory.fromSpecific(it)
+ def newBuilder(from: C): Builder[Int, C] = from.bitSetFactory.newBuilder
+ }
+
+ implicit val buildFromString: BuildFrom[String, Char, String] =
+ new BuildFrom[String, Char, String] {
+ def fromSpecific(from: String)(it: IterableOnce[Char]^): String = Factory.stringFactory.fromSpecific(it)
+ def newBuilder(from: String): Builder[Char, String] = Factory.stringFactory.newBuilder
+ }
+
+ implicit val buildFromWrappedString: BuildFrom[WrappedString, Char, WrappedString] =
+ new BuildFrom[WrappedString, Char, WrappedString] {
+ def fromSpecific(from: WrappedString)(it: IterableOnce[Char]^): WrappedString = WrappedString.fromSpecific(it)
+ def newBuilder(from: WrappedString): mutable.Builder[Char, WrappedString] = WrappedString.newBuilder
+ }
+
+ implicit def buildFromArray[sealed A : ClassTag]: BuildFrom[Array[_], A, Array[A]] =
+ new BuildFrom[Array[_], A, Array[A]] {
+ def fromSpecific(from: Array[_])(it: IterableOnce[A]^): Array[A] = Factory.arrayFactory[A].fromSpecific(it)
+ def newBuilder(from: Array[_]): Builder[A, Array[A]] = Factory.arrayFactory[A].newBuilder
+ }
+
+ implicit def buildFromView[A, sealed B]: BuildFrom[View[A], B, View[B]] =
+ new BuildFrom[View[A], B, View[B]] {
+ def fromSpecific(from: View[A])(it: IterableOnce[B]^): View[B] = View.from(it).unsafeAssumePure
+ def newBuilder(from: View[A]): Builder[B, View[B]] = View.newBuilder
+ }
+
+}
+
+trait BuildFromLowPriority1 extends BuildFromLowPriority2 {
+
+ /** Build the source collection type from an Iterable with SortedOps */
+ // Restating the upper bound of CC in the result type seems redundant, but it serves to prune the
+ // implicit search space for faster compilation and reduced change of divergence. See the compilation
+ // test in test/junit/scala/collection/BuildFromTest.scala and discussion in https://github.com/scala/scala/pull/10209
+ implicit def buildFromSortedSetOps[CC[X] <: SortedSet[X] with SortedSetOps[X, CC, _], A0, A : Ordering]: BuildFrom[CC[A0] with SortedSet[A0], A, CC[A] with SortedSet[A]] = new BuildFrom[CC[A0], A, CC[A]] {
+ def newBuilder(from: CC[A0]): Builder[A, CC[A]] = (from: SortedSetOps[A0, CC, _]).sortedIterableFactory.newBuilder[A]
+ def fromSpecific(from: CC[A0])(it: IterableOnce[A]^): CC[A] = (from: SortedSetOps[A0, CC, _]).sortedIterableFactory.from(it)
+ }
+
+ implicit def fallbackStringCanBuildFrom[A]: BuildFrom[String, A, immutable.IndexedSeq[A]] =
+ new BuildFrom[String, A, immutable.IndexedSeq[A]] {
+ def fromSpecific(from: String)(it: IterableOnce[A]^): immutable.IndexedSeq[A] = immutable.IndexedSeq.from(it)
+ def newBuilder(from: String): Builder[A, immutable.IndexedSeq[A]] = immutable.IndexedSeq.newBuilder[A]
+ }
+}
+
+trait BuildFromLowPriority2 {
+ /** Build the source collection type from an IterableOps */
+ implicit def buildFromIterableOps[CC[X] <: Iterable[X] with IterableOps[X, CC, _], A0, A]: BuildFrom[CC[A0], A, CC[A]] = new BuildFrom[CC[A0], A, CC[A]] {
+ //TODO: Reuse a prototype instance
+ def newBuilder(from: CC[A0]): Builder[A, CC[A]] = (from: IterableOps[A0, CC, _]).iterableFactory.newBuilder[A]
+ def fromSpecific(from: CC[A0])(it: IterableOnce[A]^): CC[A] = (from: IterableOps[A0, CC, _]).iterableFactory.from(it).unsafeAssumePure
+ }
+
+ implicit def buildFromIterator[A]: BuildFrom[Iterator[_], A, Iterator[A]] = new BuildFrom[Iterator[_], A, Iterator[A]] {
+ def newBuilder(from: Iterator[_]): mutable.Builder[A, Iterator[A]] = Iterator.newBuilder
+ def fromSpecific(from: Iterator[_])(it: IterableOnce[A]^): Iterator[A] = Iterator.from(it).unsafeAssumePure
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/DefaultMap.scala b/tests/pos-special/stdlib/collection/DefaultMap.scala
new file mode 100644
index 000000000000..baa9eceadae5
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/DefaultMap.scala
@@ -0,0 +1,21 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+import language.experimental.captureChecking
+
+/** A default map which builds a default `immutable.Map` implementation for all
+ * transformations.
+ */
+@deprecated("DefaultMap is no longer necessary; extend Map directly", "2.13.0")
+trait DefaultMap[K, +V] extends Map[K, V]
diff --git a/tests/pos-special/stdlib/collection/Factory.scala b/tests/pos-special/stdlib/collection/Factory.scala
new file mode 100644
index 000000000000..c45776b62b9c
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/Factory.scala
@@ -0,0 +1,798 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+
+import scala.collection.immutable.NumericRange
+import scala.language.implicitConversions
+import scala.collection.mutable.Builder
+import scala.annotation.unchecked.uncheckedVariance
+import scala.reflect.ClassTag
+import language.experimental.captureChecking
+import caps.unsafe.unsafeAssumePure
+
+/**
+ * A factory that builds a collection of type `C` with elements of type `A`.
+ *
+ * This is a general form of any factory ([[IterableFactory]],
+ * [[SortedIterableFactory]], [[MapFactory]] and [[SortedMapFactory]]) whose
+ * element type is fixed.
+ *
+ * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.)
+ * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.)
+ */
+trait Factory[-A, +C] extends Pure {
+
+ /**
+ * @return A collection of type `C` containing the same elements
+ * as the source collection `it`.
+ * @param it Source collection
+ */
+ def fromSpecific(it: IterableOnce[A]^): C
+
+ /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer.
+ * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */
+ def newBuilder: Builder[A, C]
+}
+
+object Factory {
+
+ implicit val stringFactory: Factory[Char, String] = new StringFactory
+ @SerialVersionUID(3L)
+ private class StringFactory extends Factory[Char, String] with Serializable {
+ def fromSpecific(it: IterableOnce[Char]^): String = {
+ val b = new mutable.StringBuilder(scala.math.max(0, it.knownSize))
+ b ++= it
+ b.result()
+ }
+ def newBuilder: Builder[Char, String] = new mutable.StringBuilder()
+ }
+
+ implicit def arrayFactory[sealed A: ClassTag]: Factory[A, Array[A]] = new ArrayFactory[A]
+ @SerialVersionUID(3L)
+ private class ArrayFactory[sealed A: ClassTag] extends Factory[A, Array[A]] with Serializable {
+ def fromSpecific(it: IterableOnce[A]^): Array[A] = {
+ val b = newBuilder
+ b.sizeHint(scala.math.max(0, it.knownSize))
+ b ++= it
+ b.result()
+ }
+ def newBuilder: Builder[A, Array[A]] = mutable.ArrayBuilder.make[A]
+ }
+
+}
+
+/** Base trait for companion objects of unconstrained collection types that may require
+ * multiple traversals of a source collection to build a target collection `CC`.
+ *
+ * @tparam CC Collection type constructor (e.g. `List`)
+ * @define factoryInfo
+ * This object provides a set of operations to create $Coll values.
+ *
+ * @define coll collection
+ * @define Coll `Iterable`
+ */
+trait IterableFactory[+CC[_]] extends Serializable, Pure {
+
+ /** Creates a target $coll from an existing source collection
+ *
+ * @param source Source collection
+ * @tparam A the type of the collection’s elements
+ * @return a new $coll with the elements of `source`
+ */
+ def from[A](source: IterableOnce[A]^): CC[A]^{source}
+
+ /** An empty collection
+ * @tparam A the type of the ${coll}'s elements
+ */
+ def empty[A]: CC[A]
+
+ /** Creates a $coll with the specified elements.
+ * @tparam A the type of the ${coll}'s elements
+ * @param elems the elements of the created $coll
+ * @return a new $coll with elements `elems`
+ */
+ def apply[A](elems: A*): CC[A] = from(elems)
+
+ /** Produces a $coll containing repeated applications of a function to a start value.
+ *
+ * @param start the start value of the $coll
+ * @param len the number of elements contained in the $coll
+ * @param f the function that's repeatedly applied
+ * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...`
+ */
+ def iterate[A](start: A, len: Int)(f: A => A): CC[A]^{f} = from(new View.Iterate(start, len)(f))
+
+ /** Produces a $coll that uses a function `f` to produce elements of type `A`
+ * and update an internal state of type `S`.
+ *
+ * @param init State initial value
+ * @param f Computes the next element (or returns `None` to signal
+ * the end of the collection)
+ * @tparam A Type of the elements
+ * @tparam S Type of the internal state
+ * @return a $coll that produces elements using `f` until `f` returns `None`
+ */
+ def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A]^{f} = from(new View.Unfold(init)(f))
+
+ /** Produces a $coll containing a sequence of increasing of integers.
+ *
+ * @param start the first element of the $coll
+ * @param end the end value of the $coll (the first value NOT contained)
+ * @return a $coll with values `start, start + 1, ..., end - 1`
+ */
+ def range[A : Integral](start: A, end: A): CC[A] = from(NumericRange(start, end, implicitly[Integral[A]].one))
+
+ /** Produces a $coll containing equally spaced values in some integer interval.
+ * @param start the start value of the $coll
+ * @param end the end value of the $coll (the first value NOT contained)
+ * @param step the difference between successive elements of the $coll (must be positive or negative)
+ * @return a $coll with values `start, start + step, ...` up to, but excluding `end`
+ */
+ def range[A : Integral](start: A, end: A, step: A): CC[A] = from(NumericRange(start, end, step))
+
+ /**
+ * @return A builder for $Coll objects.
+ * @tparam A the type of the ${coll}’s elements
+ */
+ def newBuilder[A]: Builder[A, CC[A]]
+
+ /** Produces a $coll containing the results of some element computation a number of times.
+ * @param n the number of elements contained in the $coll.
+ * @param elem the element computation
+ * @return A $coll that contains the results of `n` evaluations of `elem`.
+ */
+ def fill[A](n: Int)(elem: => A): CC[A]^{elem} = from(new View.Fill(n)(elem))
+
+ /** Produces a two-dimensional $coll containing the results of some element computation a number of times.
+ * @param n1 the number of elements in the 1st dimension
+ * @param n2 the number of elements in the 2nd dimension
+ * @param elem the element computation
+ * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`.
+ */
+ def fill[A](n1: Int, n2: Int)(elem: => A): CC[CC[A] @uncheckedVariance]^{elem} = // !!! problem with checking rhs under cc
+ ??? // fill(n1)(fill(n2)(elem))
+
+ /** Produces a three-dimensional $coll containing the results of some element computation a number of times.
+ * @param n1 the number of elements in the 1st dimension
+ * @param n2 the number of elements in the 2nd dimension
+ * @param n3 the number of elements in the 3rd dimension
+ * @param elem the element computation
+ * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`.
+ */
+ def fill[A](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]] @uncheckedVariance]^{elem} = // !!! problem with checking rhs under cc
+ ??? // fill(n1)(fill(n2, n3)(elem)).unsafeAssumePure
+
+ /** Produces a four-dimensional $coll containing the results of some element computation a number of times.
+ * @param n1 the number of elements in the 1st dimension
+ * @param n2 the number of elements in the 2nd dimension
+ * @param n3 the number of elements in the 3rd dimension
+ * @param n4 the number of elements in the 4th dimension
+ * @param elem the element computation
+ * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`.
+ */
+ def fill[A](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]] @uncheckedVariance]^{elem} = // !!! problem with checking rhs under cc
+ ??? // fill(n1)(fill(n2, n3, n4)(elem))
+
+ /** Produces a five-dimensional $coll containing the results of some element computation a number of times.
+ * @param n1 the number of elements in the 1st dimension
+ * @param n2 the number of elements in the 2nd dimension
+ * @param n3 the number of elements in the 3rd dimension
+ * @param n4 the number of elements in the 4th dimension
+ * @param n5 the number of elements in the 5th dimension
+ * @param elem the element computation
+ * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`.
+ */
+ def fill[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance]^{elem} = // !!! problem with checking rhs under cc
+ ??? // fill(n1)(fill(n2, n3, n4, n5)(elem))
+
+ /** Produces a $coll containing values of a given function over a range of integer values starting from 0.
+ * @param n The number of elements in the $coll
+ * @param f The function computing element values
+ * @return A $coll consisting of elements `f(0), ..., f(n -1)`
+ */
+ def tabulate[A](n: Int)(f: Int => A): CC[A]^{f} = from(new View.Tabulate(n)(f))
+
+ /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0.
+ * @param n1 the number of elements in the 1st dimension
+ * @param n2 the number of elements in the 2nd dimension
+ * @param f The function computing element values
+ * @return A $coll consisting of elements `f(i1, i2)`
+ * for `0 <= i1 < n1` and `0 <= i2 < n2`.
+ */
+ def tabulate[A](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A] @uncheckedVariance]^{f} = // !!! problem with checking rhs under cc
+ ??? // tabulate(n1)(i1 => tabulate(n2)(f(i1, _)))
+
+ /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0.
+ * @param n1 the number of elements in the 1st dimension
+ * @param n2 the number of elements in the 2nd dimension
+ * @param n3 the number of elements in the 3rd dimension
+ * @param f The function computing element values
+ * @return A $coll consisting of elements `f(i1, i2, i3)`
+ * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`.
+ */
+ def tabulate[A](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]] @uncheckedVariance]^{f} = // !!! problem with checking rhs under cc
+ ??? // tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _)))
+
+ /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0.
+ * @param n1 the number of elements in the 1st dimension
+ * @param n2 the number of elements in the 2nd dimension
+ * @param n3 the number of elements in the 3rd dimension
+ * @param n4 the number of elements in the 4th dimension
+ * @param f The function computing element values
+ * @return A $coll consisting of elements `f(i1, i2, i3, i4)`
+ * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`.
+ */
+ def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]] @uncheckedVariance]^{f} = // !!! problem with checking rhs under cc
+ ??? // tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _)))
+
+ /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0.
+ * @param n1 the number of elements in the 1st dimension
+ * @param n2 the number of elements in the 2nd dimension
+ * @param n3 the number of elements in the 3rd dimension
+ * @param n4 the number of elements in the 4th dimension
+ * @param n5 the number of elements in the 5th dimension
+ * @param f The function computing element values
+ * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)`
+ * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`.
+ */
+ def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance]^{f} = // !!! problem with checking rhs under cc
+ ??? // tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _)))
+
+ /** Concatenates all argument collections into a single $coll.
+ *
+ * @param xss the collections that are to be concatenated.
+ * @return the concatenation of all the collections.
+ */
+ def concat[A](xss: Iterable[A]*): CC[A] = {
+ from(xss.foldLeft(View.empty[A])(_ ++ _))
+ }
+
+ implicit def iterableFactory[A]: Factory[A, CC[A]] = IterableFactory.toFactory(this)
+}
+
+object IterableFactory {
+
+ /**
+ * Fixes the element type of `factory` to `A`
+ * @param factory The factory to fix the element type
+ * @tparam A Type of elements
+ * @tparam CC Collection type constructor of the factory (e.g. `Seq`, `List`)
+ * @return A [[Factory]] that uses the given `factory` to build a collection of elements
+ * of type `A`
+ */
+ implicit def toFactory[A, CC[_]](factory: IterableFactory[CC]): Factory[A, CC[A]] = new ToFactory[A, CC](factory)
+
+ @SerialVersionUID(3L)
+ private[this] class ToFactory[A, CC[_]](factory: IterableFactory[CC]) extends Factory[A, CC[A]] with Serializable {
+ def fromSpecific(it: IterableOnce[A]^): CC[A] = factory.from[A](it).unsafeAssumePure
+ // unsafeAssumePure needed but is unsound, since we confuse Seq and Iterable fromSpecific
+ def newBuilder: Builder[A, CC[A]] = factory.newBuilder[A]
+ }
+
+ implicit def toBuildFrom[A, CC[_]](factory: IterableFactory[CC]): BuildFrom[Any, A, CC[A]] =
+ new BuildFrom[Any, A, CC[A]] {
+ def fromSpecific(from: Any)(it: IterableOnce[A]^): CC[A] =
+ factory.from(it).unsafeAssumePure // !!! see remark in BuildFrom why this is necessary
+ def newBuilder(from: Any) = factory.newBuilder
+ }
+
+ @SerialVersionUID(3L)
+ class Delegate[CC[_]](delegate: IterableFactory[CC]) extends IterableFactory[CC] {
+ override def apply[A](elems: A*): CC[A] = delegate.apply(elems: _*)
+ def empty[A]: CC[A] = delegate.empty
+ def from[E](it: IterableOnce[E]^): CC[E]^{it} = delegate.from(it)
+ def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder[A]
+ }
+}
+
+// !!! Needed to add this separate trait
+trait FreeSeqFactory[+CC[A]] extends IterableFactory[CC]:
+ def from[A](source: IterableOnce[A]^): CC[A]
+ override def apply[A](elems: A*): CC[A] = from(elems)
+
+/**
+ * @tparam CC Collection type constructor (e.g. `List`)
+ */
+trait SeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends FreeSeqFactory[CC] {
+ import SeqFactory.UnapplySeqWrapper
+ final def unapplySeq[A](x: CC[A] @uncheckedVariance): UnapplySeqWrapper[A] = new UnapplySeqWrapper(x) // TODO is uncheckedVariance sound here?
+}
+
+object SeqFactory {
+ @SerialVersionUID(3L)
+ class Delegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: SeqFactory[CC]) extends SeqFactory[CC] {
+ override def apply[A](elems: A*): CC[A] = delegate.apply(elems: _*)
+ def empty[A]: CC[A] = delegate.empty
+ def from[E](it: IterableOnce[E]^): CC[E] = delegate.from(it)
+ def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder[A]
+ }
+
+ final class UnapplySeqWrapper[A](private val c: SeqOps[A, Seq, Seq[A]]) extends AnyVal {
+ def isEmpty: false = false
+ def get: UnapplySeqWrapper[A] = this
+ def lengthCompare(len: Int): Int = c.lengthCompare(len)
+ def apply(i: Int): A = c(i)
+ def drop(n: Int): scala.Seq[A] = c match {
+ case seq: scala.Seq[A] => seq.drop(n)
+ case _ => c.view.drop(n).toSeq
+ }
+ def toSeq: scala.Seq[A] = c.toSeq
+ }
+}
+
+trait StrictOptimizedSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends SeqFactory[CC] {
+
+ override def fill[A](n: Int)(elem: => A): CC[A] = {
+ val b = newBuilder[A]
+ b.sizeHint(n)
+ var i = 0
+ while (i < n) {
+ b += elem
+ i += 1
+ }
+ b.result()
+ }
+
+ override def tabulate[A](n: Int)(f: Int => A): CC[A] = {
+ val b = newBuilder[A]
+ b.sizeHint(n)
+ var i = 0
+ while (i < n) {
+ b += f(i)
+ i += 1
+ }
+ b.result()
+ }
+
+ override def concat[A](xss: Iterable[A]*): CC[A] = {
+ val b = newBuilder[A]
+ val knownSizes = xss.view.map(_.knownSize)
+ if (knownSizes forall (_ >= 0)) {
+ b.sizeHint(knownSizes.sum)
+ }
+ for (xs <- xss) b ++= xs
+ b.result()
+ }
+
+}
+
+/**
+ * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.)
+ * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.)
+ * @define factoryInfo
+ * This object provides a set of operations to create $Coll values.
+ *
+ * @define coll collection
+ * @define Coll `Iterable`
+ */
+trait SpecificIterableFactory[-A, +C] extends Factory[A, C] {
+ this: SpecificIterableFactory[A, C] =>
+
+ def empty: C
+ def apply(xs: A*): C = fromSpecific(xs)
+ def fill(n: Int)(elem: => A): C = fromSpecific(new View.Fill(n)(elem))
+ def newBuilder: Builder[A, C]
+
+ implicit def specificIterableFactory: Factory[A, C] = this
+}
+
+/**
+ * @define factoryInfo
+ * This object provides a set of operations to create $Coll values.
+ *
+ * @define coll collection
+ * @define Coll `Iterable`
+ */
+trait MapFactory[+CC[_, _]] extends Serializable, Pure {
+
+ /**
+ * An empty Map
+ */
+ def empty[K, V]: CC[K, V]
+
+ /**
+ * A collection of type Map generated from given iterable object.
+ */
+ def from[K, V](it: IterableOnce[(K, V)]^): CC[K, V]
+
+ /**
+ * A collection of type Map that contains given key/value bindings.
+ */
+ def apply[K, V](elems: (K, V)*): CC[K, V] = from(elems)
+
+ /**
+ * The default builder for Map objects.
+ */
+ def newBuilder[K, V]: Builder[(K, V), CC[K, V]]
+
+ /**
+ * The default Factory instance for maps.
+ */
+ implicit def mapFactory[K, V]: Factory[(K, V), CC[K, V]] = MapFactory.toFactory(this)
+}
+
+object MapFactory {
+
+ /**
+ * Fixes the key and value types of `factory` to `K` and `V`, respectively
+ * @param factory The factory to fix the key and value types
+ * @tparam K Type of keys
+ * @tparam V Type of values
+ * @tparam CC Collection type constructor of the factory (e.g. `Map`, `HashMap`, etc.)
+ * @return A [[Factory]] that uses the given `factory` to build a map with keys of type `K`
+ * and values of type `V`
+ */
+ implicit def toFactory[K, V, CC[_, _]](factory: MapFactory[CC]): Factory[(K, V), CC[K, V]] = new ToFactory[K, V, CC](factory)
+
+ @SerialVersionUID(3L)
+ private[this] class ToFactory[K, V, CC[_, _]](factory: MapFactory[CC]) extends Factory[(K, V), CC[K, V]] with Serializable {
+ def fromSpecific(it: IterableOnce[(K, V)]^): CC[K, V] = factory.from[K, V](it)
+ def newBuilder: Builder[(K, V), CC[K, V]] = factory.newBuilder[K, V]
+ }
+
+ implicit def toBuildFrom[K, V, CC[_, _]](factory: MapFactory[CC]): BuildFrom[Any, (K, V), CC[K, V]] =
+ new BuildFrom[Any, (K, V), CC[K, V]] {
+ def fromSpecific(from: Any)(it: IterableOnce[(K, V)]^) = factory.from(it)
+ def newBuilder(from: Any) = factory.newBuilder[K, V]
+ }
+
+ @SerialVersionUID(3L)
+ class Delegate[C[_, _]](delegate: MapFactory[C]) extends MapFactory[C] {
+ override def apply[K, V](elems: (K, V)*): C[K, V] = delegate.apply(elems: _*)
+ def from[K, V](it: IterableOnce[(K, V)]^): C[K, V] = delegate.from(it)
+ def empty[K, V]: C[K, V] = delegate.empty
+ def newBuilder[K, V]: Builder[(K, V), C[K, V]] = delegate.newBuilder
+ }
+}
+
+/** Base trait for companion objects of collections that require an implicit evidence.
+ * @tparam CC Collection type constructor (e.g. `ArraySeq`)
+ * @tparam Ev Unary type constructor for the implicit evidence required for an element type
+ * (typically `Ordering` or `ClassTag`)
+ *
+ * @define factoryInfo
+ * This object provides a set of operations to create $Coll values.
+ *
+ * @define coll collection
+ * @define Coll `Iterable`
+ */
+trait EvidenceIterableFactory[+CC[_], Ev[_]] extends Serializable, Pure {
+
+ def from[E : Ev](it: IterableOnce[E]^): CC[E]
+
+ def empty[A : Ev]: CC[A]
+
+ def apply[A : Ev](xs: A*): CC[A] = from(xs)
+
+ /** Produces a $coll containing the results of some element computation a number of times.
+ * @param n the number of elements contained in the $coll.
+ * @param elem the element computation
+ * @return A $coll that contains the results of `n` evaluations of `elem`.
+ */
+ def fill[A : Ev](n: Int)(elem: => A): CC[A] = from(new View.Fill(n)(elem))
+
+ /** Produces a $coll containing values of a given function over a range of integer values starting from 0.
+ * @param n The number of elements in the $coll
+ * @param f The function computing element values
+ * @return A $coll consisting of elements `f(0), ..., f(n -1)`
+ */
+ def tabulate[A : Ev](n: Int)(f: Int => A): CC[A] = from(new View.Tabulate(n)(f))
+
+ /** Produces a $coll containing repeated applications of a function to a start value.
+ *
+ * @param start the start value of the $coll
+ * @param len the number of elements contained in the $coll
+ * @param f the function that's repeatedly applied
+ * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...`
+ */
+ def iterate[A : Ev](start: A, len: Int)(f: A => A): CC[A] = from(new View.Iterate(start, len)(f))
+
+ /** Produces a $coll that uses a function `f` to produce elements of type `A`
+ * and update an internal state of type `S`.
+ *
+ * @param init State initial value
+ * @param f Computes the next element (or returns `None` to signal
+ * the end of the collection)
+ * @tparam A Type of the elements
+ * @tparam S Type of the internal state
+ * @return a $coll that produces elements using `f` until `f` returns `None`
+ */
+ def unfold[A : Ev, S](init: S)(f: S => Option[(A, S)]): CC[A] = from(new View.Unfold(init)(f))
+
+ def newBuilder[A : Ev]: Builder[A, CC[A]]
+
+ implicit def evidenceIterableFactory[A : Ev]: Factory[A, CC[A]] = EvidenceIterableFactory.toFactory(this)
+}
+
+object EvidenceIterableFactory {
+
+ /**
+ * Fixes the element type of `factory` to `A`
+ * @param factory The factory to fix the element type
+ * @tparam A Type of elements
+ * @tparam CC Collection type constructor of the factory (e.g. `TreeSet`)
+ * @tparam Ev Type constructor of the evidence (usually `Ordering` or `ClassTag`)
+ * @return A [[Factory]] that uses the given `factory` to build a collection of elements
+ * of type `A`
+ */
+ implicit def toFactory[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]): Factory[A, CC[A]] = new ToFactory[Ev, A, CC](factory)
+
+ @SerialVersionUID(3L)
+ private[this] class ToFactory[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]) extends Factory[A, CC[A]] with Serializable {
+ def fromSpecific(it: IterableOnce[A]^): CC[A] = factory.from[A](it)
+ def newBuilder: Builder[A, CC[A]] = factory.newBuilder[A]
+ }
+
+ implicit def toBuildFrom[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]): BuildFrom[Any, A, CC[A]] = new EvidenceIterableFactoryToBuildFrom(factory)
+ private class EvidenceIterableFactoryToBuildFrom[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]) extends BuildFrom[Any, A, CC[A]] {
+ def fromSpecific(from: Any)(it: IterableOnce[A]^): CC[A] = factory.from[A](it)
+ def newBuilder(from: Any): Builder[A, CC[A]] = factory.newBuilder[A]
+ }
+
+ @SerialVersionUID(3L)
+ class Delegate[CC[_], Ev[_]](delegate: EvidenceIterableFactory[CC, Ev]) extends EvidenceIterableFactory[CC, Ev] {
+ override def apply[A: Ev](xs: A*): CC[A] = delegate.apply(xs: _*)
+ def empty[A : Ev]: CC[A] = delegate.empty
+ def from[E : Ev](it: IterableOnce[E]^): CC[E] = delegate.from(it)
+ def newBuilder[A : Ev]: Builder[A, CC[A]] = delegate.newBuilder[A]
+ }
+}
+
+/** Base trait for companion objects of collections that require an implicit `Ordering`.
+ * @tparam CC Collection type constructor (e.g. `SortedSet`)
+ */
+trait SortedIterableFactory[+CC[_]] extends EvidenceIterableFactory[CC, Ordering]
+
+object SortedIterableFactory {
+ @SerialVersionUID(3L)
+ class Delegate[CC[_]](delegate: EvidenceIterableFactory[CC, Ordering])
+ extends EvidenceIterableFactory.Delegate[CC, Ordering](delegate) with SortedIterableFactory[CC]
+}
+
+/** Base trait for companion objects of collections that require an implicit `ClassTag`.
+ * @tparam CC Collection type constructor (e.g. `ArraySeq`)
+ */
+trait ClassTagIterableFactory[+CC[_]] extends EvidenceIterableFactory[CC, ClassTag] {
+
+ @`inline` private[this] implicit def ccClassTag[X]: ClassTag[CC[X]] =
+ ClassTag.AnyRef.asInstanceOf[ClassTag[CC[X]]] // Good enough for boxed vs primitive arrays
+
+ /** Produces a $coll containing a sequence of increasing of integers.
+ *
+ * @param start the first element of the $coll
+ * @param end the end value of the $coll (the first value NOT contained)
+ * @return a $coll with values `start, start + 1, ..., end - 1`
+ */
+ def range[A : Integral : ClassTag](start: A, end: A): CC[A] = from(NumericRange(start, end, implicitly[Integral[A]].one))
+
+ /** Produces a $coll containing equally spaced values in some integer interval.
+ * @param start the start value of the $coll
+ * @param end the end value of the $coll (the first value NOT contained)
+ * @param step the difference between successive elements of the $coll (must be positive or negative)
+ * @return a $coll with values `start, start + step, ...` up to, but excluding `end`
+ */
+ def range[A : Integral : ClassTag](start: A, end: A, step: A): CC[A] = from(NumericRange(start, end, step))
+
+ /** Produces a two-dimensional $coll containing the results of some element computation a number of times.
+ * @param n1 the number of elements in the 1st dimension
+ * @param n2 the number of elements in the 2nd dimension
+ * @param elem the element computation
+ * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`.
+ */
+ def fill[A : ClassTag](n1: Int, n2: Int)(elem: => A): CC[CC[A] @uncheckedVariance] = fill(n1)(fill(n2)(elem))
+
+ /** Produces a three-dimensional $coll containing the results of some element computation a number of times.
+ * @param n1 the number of elements in the 1st dimension
+ * @param n2 the number of elements in the 2nd dimension
+ * @param n3 the number of elements in the 3rd dimension
+ * @param elem the element computation
+ * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`.
+ */
+ def fill[A : ClassTag](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]] @uncheckedVariance] = fill(n1)(fill(n2, n3)(elem))
+
+ /** Produces a four-dimensional $coll containing the results of some element computation a number of times.
+ * @param n1 the number of elements in the 1st dimension
+ * @param n2 the number of elements in the 2nd dimension
+ * @param n3 the number of elements in the 3rd dimension
+ * @param n4 the number of elements in the 4th dimension
+ * @param elem the element computation
+ * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`.
+ */
+ def fill[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]] @uncheckedVariance] =
+ fill(n1)(fill(n2, n3, n4)(elem))
+
+ /** Produces a five-dimensional $coll containing the results of some element computation a number of times.
+ * @param n1 the number of elements in the 1st dimension
+ * @param n2 the number of elements in the 2nd dimension
+ * @param n3 the number of elements in the 3rd dimension
+ * @param n4 the number of elements in the 4th dimension
+ * @param n5 the number of elements in the 5th dimension
+ * @param elem the element computation
+ * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`.
+ */
+ def fill[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] =
+ fill(n1)(fill(n2, n3, n4, n5)(elem))
+
+ /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0.
+ * @param n1 the number of elements in the 1st dimension
+ * @param n2 the number of elements in the 2nd dimension
+ * @param f The function computing element values
+ * @return A $coll consisting of elements `f(i1, i2)`
+ * for `0 <= i1 < n1` and `0 <= i2 < n2`.
+ */
+ def tabulate[A : ClassTag](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A] @uncheckedVariance] =
+ tabulate(n1)(i1 => tabulate(n2)(f(i1, _)))
+
+ /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0.
+ * @param n1 the number of elements in the 1st dimension
+ * @param n2 the number of elements in the 2nd dimension
+ * @param n3 the number of elements in the 3rd dimension
+ * @param f The function computing element values
+ * @return A $coll consisting of elements `f(i1, i2, i3)`
+ * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`.
+ */
+ def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]] @uncheckedVariance] =
+ tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _)))
+
+ /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0.
+ * @param n1 the number of elements in the 1st dimension
+ * @param n2 the number of elements in the 2nd dimension
+ * @param n3 the number of elements in the 3rd dimension
+ * @param n4 the number of elements in the 4th dimension
+ * @param f The function computing element values
+ * @return A $coll consisting of elements `f(i1, i2, i3, i4)`
+ * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`.
+ */
+ def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]] @uncheckedVariance] =
+ tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _)))
+
+ /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0.
+ * @param n1 the number of elements in the 1st dimension
+ * @param n2 the number of elements in the 2nd dimension
+ * @param n3 the number of elements in the 3rd dimension
+ * @param n4 the number of elements in the 4th dimension
+ * @param n5 the number of elements in the 5th dimension
+ * @param f The function computing element values
+ * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)`
+ * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`.
+ */
+ def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] =
+ tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _)))
+}
+
+object ClassTagIterableFactory {
+ @SerialVersionUID(3L)
+ class Delegate[CC[_]](delegate: EvidenceIterableFactory[CC, ClassTag])
+ extends EvidenceIterableFactory.Delegate[CC, ClassTag](delegate) with ClassTagIterableFactory[CC]
+
+ /** An IterableFactory that uses ClassTag.Any as the evidence for every element type. This may or may not be
+ * sound depending on the use of the `ClassTag` by the collection implementation. */
+ @SerialVersionUID(3L)
+ class AnyIterableDelegate[CC[_]](delegate: ClassTagIterableFactory[CC]) extends IterableFactory[CC] {
+ def empty[A]: CC[A] = delegate.empty(ClassTag.Any).asInstanceOf[CC[A]]
+ def from[A](it: IterableOnce[A]^): CC[A] = delegate.from[Any](it)(ClassTag.Any).asInstanceOf[CC[A]]
+ def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder(ClassTag.Any).asInstanceOf[Builder[A, CC[A]]]
+ override def apply[A](elems: A*): CC[A] = delegate.apply[Any](elems: _*)(ClassTag.Any).asInstanceOf[CC[A]]
+ override def iterate[A](start: A, len: Int)(f: A => A): CC[A] = delegate.iterate[A](start, len)(f)(ClassTag.Any.asInstanceOf[ClassTag[A]])
+ override def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A] = delegate.unfold[A, S](init)(f)(ClassTag.Any.asInstanceOf[ClassTag[A]])
+ override def range[A](start: A, end: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end)(i, ClassTag.Any.asInstanceOf[ClassTag[A]])
+ override def range[A](start: A, end: A, step: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end, step)(i, ClassTag.Any.asInstanceOf[ClassTag[A]])
+ override def fill[A](n: Int)(elem: => A): CC[A] = delegate.fill[Any](n)(elem)(ClassTag.Any).asInstanceOf[CC[A]]
+ override def tabulate[A](n: Int)(f: Int => A): CC[A] = delegate.tabulate[Any](n)(f)(ClassTag.Any).asInstanceOf[CC[A]]
+ }
+}
+
+/**
+ * @tparam CC Collection type constructor (e.g. `ArraySeq`)
+ */
+trait ClassTagSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends ClassTagIterableFactory[CC] {
+ import SeqFactory.UnapplySeqWrapper
+ final def unapplySeq[A](x: CC[A] @uncheckedVariance): UnapplySeqWrapper[A] = new UnapplySeqWrapper(x) // TODO is uncheckedVariance sound here?
+}
+
+object ClassTagSeqFactory {
+ @SerialVersionUID(3L)
+ class Delegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: ClassTagSeqFactory[CC])
+ extends ClassTagIterableFactory.Delegate[CC](delegate) with ClassTagSeqFactory[CC]
+
+ /** A SeqFactory that uses ClassTag.Any as the evidence for every element type. This may or may not be
+ * sound depending on the use of the `ClassTag` by the collection implementation. */
+ @SerialVersionUID(3L)
+ class AnySeqDelegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: ClassTagSeqFactory[CC])
+ extends ClassTagIterableFactory.AnyIterableDelegate[CC](delegate) with SeqFactory[CC]
+}
+
+trait StrictOptimizedClassTagSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends ClassTagSeqFactory[CC] {
+
+ override def fill[A : ClassTag](n: Int)(elem: => A): CC[A] = {
+ val b = newBuilder[A]
+ b.sizeHint(n)
+ var i = 0
+ while (i < n) {
+ b += elem
+ i += 1
+ }
+ b.result()
+ }
+
+ override def tabulate[A : ClassTag](n: Int)(f: Int => A): CC[A] = {
+ val b = newBuilder[A]
+ b.sizeHint(n)
+ var i = 0
+ while (i < n) {
+ b += f(i)
+ i += 1
+ }
+ b.result()
+ }
+
+}
+
+/**
+ * @define factoryInfo
+ * This object provides a set of operations to create $Coll values.
+ *
+ * @define coll collection
+ * @define Coll `Iterable`
+ */
+trait SortedMapFactory[+CC[_, _]] extends Serializable {
+ this: SortedMapFactory[CC] =>
+
+ def empty[K : Ordering, V]: CC[K, V]
+
+ def from[K : Ordering, V](it: IterableOnce[(K, V)]^): CC[K, V]
+
+ def apply[K : Ordering, V](elems: (K, V)*): CC[K, V] = from(elems)
+
+ def newBuilder[K : Ordering, V]: Builder[(K, V), CC[K, V]]
+
+ implicit def sortedMapFactory[K : Ordering, V]: Factory[(K, V), CC[K, V]] = SortedMapFactory.toFactory(this)
+
+}
+
+object SortedMapFactory {
+
+ /**
+ * Implicit conversion that fixes the key and value types of `factory` to `K` and `V`,
+ * respectively.
+ *
+ * @param factory The factory to fix the key and value types
+ * @tparam K Type of keys
+ * @tparam V Type of values
+ * @tparam CC Collection type constructor of the factory (e.g. `TreeMap`)
+ * @return A [[Factory]] that uses the given `factory` to build a map with keys of
+ * type `K` and values of type `V`
+ */
+ implicit def toFactory[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]): Factory[(K, V), CC[K, V]] = new ToFactory[K, V, CC](factory)
+
+ @SerialVersionUID(3L)
+ private[this] class ToFactory[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]) extends Factory[(K, V), CC[K, V]] with Serializable {
+ def fromSpecific(it: IterableOnce[(K, V)]^): CC[K, V] = factory.from[K, V](it)
+ def newBuilder: Builder[(K, V), CC[K, V]] = factory.newBuilder[K, V]
+ }
+
+ implicit def toBuildFrom[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]): BuildFrom[Any, (K, V), CC[K, V]] = new SortedMapFactoryToBuildFrom(factory)
+ private class SortedMapFactoryToBuildFrom[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]) extends BuildFrom[Any, (K, V), CC[K, V]] {
+ def fromSpecific(from: Any)(it: IterableOnce[(K, V)]^) = factory.from(it)
+ def newBuilder(from: Any) = factory.newBuilder[K, V]
+ }
+
+ @SerialVersionUID(3L)
+ class Delegate[CC[_, _]](delegate: SortedMapFactory[CC]) extends SortedMapFactory[CC] {
+ override def apply[K: Ordering, V](elems: (K, V)*): CC[K, V] = delegate.apply(elems: _*)
+ def from[K : Ordering, V](it: IterableOnce[(K, V)]^): CC[K, V] = delegate.from(it)
+ def empty[K : Ordering, V]: CC[K, V] = delegate.empty
+ def newBuilder[K : Ordering, V]: Builder[(K, V), CC[K, V]] = delegate.newBuilder
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/Hashing.scala b/tests/pos-special/stdlib/collection/Hashing.scala
new file mode 100644
index 000000000000..772dcf5c65da
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/Hashing.scala
@@ -0,0 +1,63 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+import language.experimental.captureChecking
+
+
+protected[collection] object Hashing {
+
+ def elemHashCode(key: Any): Int = key.##
+
+ def improve(hcode: Int): Int = {
+ var h: Int = hcode + ~(hcode << 9)
+ h = h ^ (h >>> 14)
+ h = h + (h << 4)
+ h ^ (h >>> 10)
+ }
+
+ def computeHash(key: Any): Int =
+ improve(elemHashCode(key))
+
+ /**
+ * Utility method to keep a subset of all bits in a given bitmap
+ *
+ * Example
+ * bitmap (binary): 00000001000000010000000100000001
+ * keep (binary): 1010
+ * result (binary): 00000001000000000000000100000000
+ *
+ * @param bitmap the bitmap
+ * @param keep a bitmask containing which bits to keep
+ * @return the original bitmap with all bits where keep is not 1 set to 0
+ */
+ def keepBits(bitmap: Int, keep: Int): Int = {
+ var result = 0
+ var current = bitmap
+ var kept = keep
+ while (kept != 0) {
+ // lowest remaining bit in current
+ val lsb = current ^ (current & (current - 1))
+ if ((kept & 1) != 0) {
+ // mark bit in result bitmap
+ result |= lsb
+ }
+ // clear lowest remaining one bit in abm
+ current &= ~lsb
+ // look at the next kept bit
+ kept >>>= 1
+ }
+ result
+ }
+
+}
diff --git a/tests/pos-special/stdlib/collection/IndexedSeq.scala b/tests/pos-special/stdlib/collection/IndexedSeq.scala
index 6e8e2bd0dc66..a2d4cc942231 100644
--- a/tests/pos-special/stdlib/collection/IndexedSeq.scala
+++ b/tests/pos-special/stdlib/collection/IndexedSeq.scala
@@ -18,6 +18,8 @@ import scala.collection.Searching.{Found, InsertionPoint, SearchResult}
import scala.collection.Stepper.EfficientSplit
import scala.math.Ordering
import language.experimental.captureChecking
+import caps.unsafe.unsafeAssumePure
+
/** Base trait for indexed sequences that have efficient `apply` and `length` */
trait IndexedSeq[+A] extends Seq[A]
@@ -33,7 +35,7 @@ trait IndexedSeq[+A] extends Seq[A]
object IndexedSeq extends SeqFactory.Delegate[IndexedSeq](immutable.IndexedSeq)
/** Base trait for indexed Seq operations */
-trait IndexedSeqOps[+A, +CC[_], +C] extends Any with SeqOps[A, CC, C] { self =>
+trait IndexedSeqOps[+A, +CC[_], +C] extends Any with IndexedSeqViewOps[A, CC, C] with SeqOps[A, CC, C] { self =>
def iterator: Iterator[A] = view.iterator
@@ -86,7 +88,7 @@ trait IndexedSeqOps[+A, +CC[_], +C] extends Any with SeqOps[A, CC, C] { self =>
override def dropRight(n: Int): C = fromSpecific(new IndexedSeqView.DropRight(this, n))
- override def map[B](f: A => B): CC[B] = iterableFactory.from(new IndexedSeqView.Map(this, f))
+ override def map[B](f: A => B): CC[B] = iterableFactory.from(new IndexedSeqView.Map(this, f)).unsafeAssumePure
override def reverse: C = fromSpecific(new IndexedSeqView.Reverse(this))
diff --git a/tests/pos-special/stdlib/collection/IndexedSeqView.scala b/tests/pos-special/stdlib/collection/IndexedSeqView.scala
new file mode 100644
index 000000000000..a16e06fa707d
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/IndexedSeqView.scala
@@ -0,0 +1,187 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+
+import scala.annotation.nowarn
+import language.experimental.captureChecking
+
+trait IndexedSeqViewOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] {
+ self: IndexedSeqViewOps[A, CC, C]^ =>
+}
+
+/** View defined in terms of indexing a range */
+trait IndexedSeqView[+A] extends IndexedSeqViewOps[A, View, View[A]] with SeqView[A] {
+ self: IndexedSeqView[A]^ =>
+
+ override def view: IndexedSeqView[A]^{this} = this
+
+ @deprecated("Use .view.slice(from, until) instead of .view(from, until)", "2.13.0")
+ override def view(from: Int, until: Int): IndexedSeqView[A]^{this} = view.slice(from, until)
+
+ override def iterator: Iterator[A]^{this} = new IndexedSeqView.IndexedSeqViewIterator(this)
+ override def reverseIterator: Iterator[A]^{this} = new IndexedSeqView.IndexedSeqViewReverseIterator(this)
+
+ override def appended[B >: A](elem: B): IndexedSeqView[B]^{this} = new IndexedSeqView.Appended(this, elem)
+ override def prepended[B >: A](elem: B): IndexedSeqView[B]^{this} = new IndexedSeqView.Prepended(elem, this)
+ override def take(n: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.Take(this, n)
+ override def takeRight(n: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.TakeRight(this, n)
+ override def drop(n: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.Drop(this, n)
+ override def dropRight(n: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.DropRight(this, n)
+ override def map[B](f: A => B): IndexedSeqView[B]^{this, f} = new IndexedSeqView.Map(this, f)
+ override def reverse: IndexedSeqView[A]^{this} = new IndexedSeqView.Reverse(this)
+ override def slice(from: Int, until: Int): IndexedSeqView[A]^{this} = new IndexedSeqView.Slice(this, from, until)
+ override def tapEach[U](f: A => U): IndexedSeqView[A]^{this, f} = new IndexedSeqView.Map(this, { (a: A) => f(a); a})
+
+ def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new IndexedSeqView.Concat(this, suffix)
+ def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new IndexedSeqView.Concat(this, suffix)
+ def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new IndexedSeqView.Concat(prefix, this)
+
+ @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""")
+ override protected[this] def stringPrefix: String = "IndexedSeqView"
+}
+
+object IndexedSeqView {
+
+ @SerialVersionUID(3L)
+ private[collection] class IndexedSeqViewIterator[A](self: IndexedSeqView[A]^) extends AbstractIterator[A] with Serializable {
+ this: IndexedSeqViewIterator[A]^ =>
+ private[this] var current = 0
+ private[this] var remainder = self.length
+ override def knownSize: Int = remainder
+ @inline private[this] def _hasNext: Boolean = remainder > 0
+ def hasNext: Boolean = _hasNext
+ def next(): A =
+ if (_hasNext) {
+ val r = self(current)
+ current += 1
+ remainder -= 1
+ r
+ } else Iterator.empty.next()
+
+ override def drop(n: Int): Iterator[A]^{this} = {
+ if (n > 0) {
+ current += n
+ remainder = Math.max(0, remainder - n)
+ }
+ this
+ }
+
+ override protected def sliceIterator(from: Int, until: Int): Iterator[A]^{this} = {
+
+ def formatRange(value : Int) : Int = if (value < 0) 0 else if (value > remainder) remainder else value
+
+ val formatFrom = formatRange(from)
+ val formatUntil = formatRange(until)
+ remainder = Math.max(0, formatUntil - formatFrom)
+ current = current + formatFrom
+ this
+ }
+ }
+ @SerialVersionUID(3L)
+ private[collection] class IndexedSeqViewReverseIterator[A](self: IndexedSeqView[A]^) extends AbstractIterator[A] with Serializable {
+ this: IndexedSeqViewReverseIterator[A]^ =>
+ private[this] var remainder = self.length
+ private[this] var pos = remainder - 1
+ @inline private[this] def _hasNext: Boolean = remainder > 0
+ def hasNext: Boolean = _hasNext
+ def next(): A =
+ if (_hasNext) {
+ val r = self(pos)
+ pos -= 1
+ remainder -= 1
+ r
+ } else Iterator.empty.next()
+
+ // from < 0 means don't move pos, until < 0 means don't limit remainder
+ //
+ override protected def sliceIterator(from: Int, until: Int): Iterator[A]^{this} = {
+ if (_hasNext) {
+ if (remainder <= from) remainder = 0 // exhausted by big skip
+ else if (from <= 0) { // no skip, pos is same
+ if (until >= 0 && until < remainder) remainder = until // ...limited by until
+ }
+ else {
+ pos -= from // skip ahead
+ if (until >= 0 && until < remainder) { // ...limited by until
+ if (until <= from) remainder = 0 // ...exhausted if limit is smaller than skip
+ else remainder = until - from // ...limited by until, less the skip
+ }
+ else remainder -= from // ...otherwise just less the skip
+ }
+ }
+ this
+ }
+ }
+
+ /** An `IndexedSeqViewOps` whose collection type and collection type constructor are unknown */
+ type SomeIndexedSeqOps[A] = IndexedSeqViewOps[A, AnyConstr, _]
+
+ @SerialVersionUID(3L)
+ class Id[+A](underlying: SomeIndexedSeqOps[A]^)
+ extends SeqView.Id(underlying) with IndexedSeqView[A]
+
+ @SerialVersionUID(3L)
+ class Appended[+A](underlying: SomeIndexedSeqOps[A]^, elem: A)
+ extends SeqView.Appended(underlying, elem) with IndexedSeqView[A]
+
+ @SerialVersionUID(3L)
+ class Prepended[+A](elem: A, underlying: SomeIndexedSeqOps[A]^)
+ extends SeqView.Prepended(elem, underlying) with IndexedSeqView[A]
+
+ @SerialVersionUID(3L)
+ class Concat[A](prefix: SomeIndexedSeqOps[A]^, suffix: SomeIndexedSeqOps[A]^)
+ extends SeqView.Concat[A](prefix, suffix) with IndexedSeqView[A]
+
+ @SerialVersionUID(3L)
+ class Take[A](underlying: SomeIndexedSeqOps[A]^, n: Int)
+ extends SeqView.Take(underlying, n) with IndexedSeqView[A]
+
+ @SerialVersionUID(3L)
+ class TakeRight[A](underlying: SomeIndexedSeqOps[A]^, n: Int)
+ extends SeqView.TakeRight(underlying, n) with IndexedSeqView[A]
+
+ @SerialVersionUID(3L)
+ class Drop[A](underlying: SomeIndexedSeqOps[A]^, n: Int)
+ extends SeqView.Drop[A](underlying, n) with IndexedSeqView[A]
+
+ @SerialVersionUID(3L)
+ class DropRight[A](underlying: SomeIndexedSeqOps[A]^, n: Int)
+ extends SeqView.DropRight[A](underlying, n) with IndexedSeqView[A]
+
+ @SerialVersionUID(3L)
+ class Map[A, B](underlying: SomeIndexedSeqOps[A]^, f: A => B)
+ extends SeqView.Map(underlying, f) with IndexedSeqView[B]
+
+ @SerialVersionUID(3L)
+ class Reverse[A](underlying: SomeIndexedSeqOps[A]^) extends SeqView.Reverse[A](underlying) with IndexedSeqView[A] {
+ override def reverse: IndexedSeqView[A] = underlying match {
+ case x: IndexedSeqView[A] => x
+ case _ => super.reverse
+ }
+ }
+
+ @SerialVersionUID(3L)
+ class Slice[A](underlying: SomeIndexedSeqOps[A]^, from: Int, until: Int) extends AbstractIndexedSeqView[A] {
+ protected val lo = from max 0
+ protected val hi = (until max 0) min underlying.length
+ protected val len = (hi - lo) max 0
+ @throws[IndexOutOfBoundsException]
+ def apply(i: Int): A = underlying(lo + i)
+ def length: Int = len
+ }
+}
+
+/** Explicit instantiation of the `IndexedSeqView` trait to reduce class file size in subclasses. */
+@SerialVersionUID(3L)
+abstract class AbstractIndexedSeqView[+A] extends AbstractSeqView[A] with IndexedSeqView[A]
diff --git a/tests/pos-special/stdlib/collection/Iterable.scala b/tests/pos-special/stdlib/collection/Iterable.scala
index 85c0debc6685..bca80d7be108 100644
--- a/tests/pos-special/stdlib/collection/Iterable.scala
+++ b/tests/pos-special/stdlib/collection/Iterable.scala
@@ -96,7 +96,7 @@ trait Iterable[+A] extends IterableOnce[A]
* @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs
* or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported.
*/
- def lazyZip[B](that: Iterable[B]): LazyZip2[A, B, this.type] = new LazyZip2(this, this, that)
+ def lazyZip[B](that: Iterable[B]^): LazyZip2[A, B, this.type]^{this, that} = new LazyZip2(this, this, that)
}
/** Base trait for Iterable operations
@@ -400,7 +400,7 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable
if (i != headSize)
fail
}
- iterableFactory.from(bs.map(_.result()))
+ iterableFactory.from(bs.map(_.result())).asInstanceOf // !!! needed for cc
}
def filter(pred: A => Boolean): C^{this, pred} = fromSpecific(new View.Filter(this, pred, isFlipped = false))
@@ -902,10 +902,10 @@ object IterableOps {
protected def filtered: Iterable[A]^{this} =
new View.Filter(self, p, isFlipped = false)
- def map[B](f: A => B): CC[B]^{this} =
+ def map[B](f: A => B): CC[B]^{this, f} =
self.iterableFactory.from(new View.Map(filtered, f))
- def flatMap[B](f: A => IterableOnce[B]): CC[B]^{this} =
+ def flatMap[B](f: A => IterableOnce[B]^): CC[B]^{this, f} =
self.iterableFactory.from(new View.FlatMap(filtered, f))
def foreach[U](f: A => U): Unit = filtered.foreach(f)
diff --git a/tests/pos-special/stdlib/collection/IterableOnce.scala b/tests/pos-special/stdlib/collection/IterableOnce.scala
index 6836a3bac39a..a88be4943c58 100644
--- a/tests/pos-special/stdlib/collection/IterableOnce.scala
+++ b/tests/pos-special/stdlib/collection/IterableOnce.scala
@@ -162,10 +162,10 @@ final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) ext
def to[C1](factory: Factory[A, C1]): C1 = factory.fromSpecific(it)
@deprecated("Use .iterator.to(ArrayBuffer) instead", "2.13.0")
- def toBuffer[B >: A]: mutable.Buffer[B] = mutable.ArrayBuffer.from(it)
+ def toBuffer[sealed B >: A]: mutable.Buffer[B] = mutable.ArrayBuffer.from(it)
@deprecated("Use .iterator.toArray", "2.13.0")
- def toArray[B >: A: ClassTag]: Array[B] = it match {
+ def toArray[sealed B >: A: ClassTag]: Array[B] = it match {
case it: Iterable[B] => it.toArray[B]
case _ => it.iterator.toArray[B]
}
@@ -272,10 +272,11 @@ object IterableOnce {
math.max(math.min(math.min(len, srcLen), destLen - start), 0)
/** Calls `copyToArray` on the given collection, regardless of whether or not it is an `Iterable`. */
- @inline private[collection] def copyElemsToArray[A, B >: A](elems: IterableOnce[A],
- xs: Array[B],
- start: Int = 0,
- len: Int = Int.MaxValue): Int =
+ @inline private[collection] def copyElemsToArray[A, sealed B >: A](
+ elems: IterableOnce[A]^,
+ xs: Array[B],
+ start: Int = 0,
+ len: Int = Int.MaxValue): Int =
elems match {
case src: Iterable[A] => src.copyToArray[B](xs, start, len)
case src => src.iterator.copyToArray[B](xs, start, len)
@@ -889,7 +890,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ =>
* @note Reuse: $consumesIterator
*/
@deprecatedOverriding("This should always forward to the 3-arg version of this method", since = "2.13.4")
- def copyToArray[B >: A](xs: Array[B]): Int = copyToArray(xs, 0, Int.MaxValue)
+ def copyToArray[sealed B >: A](xs: Array[B]): Int = copyToArray(xs, 0, Int.MaxValue)
/** Copy elements to an array, returning the number of elements written.
*
@@ -906,7 +907,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ =>
* @note Reuse: $consumesIterator
*/
@deprecatedOverriding("This should always forward to the 3-arg version of this method", since = "2.13.4")
- def copyToArray[B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue)
+ def copyToArray[sealed B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue)
/** Copy elements to an array, returning the number of elements written.
*
@@ -923,7 +924,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ =>
*
* @note Reuse: $consumesIterator
*/
- def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = {
+ def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = {
val it = iterator
var i = start
val end = start + math.min(len, xs.length - start)
@@ -1312,13 +1313,13 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A]^ =>
@deprecated("Use .to(LazyList) instead of .toStream", "2.13.0")
@`inline` final def toStream: immutable.Stream[A] = to(immutable.Stream)
- @`inline` final def toBuffer[B >: A]: mutable.Buffer[B] = mutable.Buffer.from(this)
+ @`inline` final def toBuffer[sealed B >: A]: mutable.Buffer[B] = mutable.Buffer.from(this)
/** Convert collection to array.
*
* Implementation note: DO NOT call [[Array.from]] from this method.
*/
- def toArray[B >: A: ClassTag]: Array[B] =
+ def toArray[sealed B >: A: ClassTag]: Array[B] =
if (knownSize >= 0) {
val destination = new Array[B](knownSize)
copyToArray(destination, 0)
diff --git a/tests/pos-special/stdlib/collection/Iterator.scala b/tests/pos-special/stdlib/collection/Iterator.scala
index ecd8d985bbf0..90fd387069b0 100644
--- a/tests/pos-special/stdlib/collection/Iterator.scala
+++ b/tests/pos-special/stdlib/collection/Iterator.scala
@@ -17,7 +17,7 @@ import scala.annotation.tailrec
import scala.annotation.unchecked.{uncheckedVariance, uncheckedCaptures}
import scala.runtime.Statics
import language.experimental.captureChecking
-import caps.unsafe.unsafeAssumePure
+import annotation.unchecked.uncheckedCaptures
/** Iterators are data structures that allow to iterate over a sequence
@@ -258,7 +258,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite
}
// segment must have data, and must be complete unless they allow partial
val ok = index > 0 && (partial || index == size)
- if (ok) buffer = builder.result().asInstanceOf[Array[B]]
+ if (ok) buffer = builder.result().asInstanceOf[Array[B @uncheckedCaptures]]
else prev = null
ok
}
@@ -416,7 +416,9 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite
}
@deprecated("Call scanRight on an Iterable instead.", "2.13.0")
- def scanRight[B](z: B)(op: (A, B) => B): Iterator[B]^{this} = ArrayBuffer.from(this).scanRight(z)(op).iterator
+ def scanRight[B](z: B)(op: (A, B) => B): Iterator[B]^{this} =
+ ArrayBuffer.from[A @uncheckedCaptures](this).scanRight(z)(op).iterator
+ // @uncheckedCaptures is safe since the ArrayBuffer is local temporrary storage
def indexWhere(p: A => Boolean, from: Int = 0): Int = {
var i = math.max(from, 0)
@@ -559,7 +561,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite
*/
def distinctBy[B](f: A -> B): Iterator[A]^{this} = new AbstractIterator[A] {
- private[this] val traversedValues = mutable.HashSet.empty[B]
+ private[this] val traversedValues = mutable.HashSet.empty[B @uncheckedCaptures]
private[this] var nextElementDefined: Boolean = false
private[this] var nextElement: A = _
@@ -702,7 +704,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite
*/
private[this] var status = 0
private def store(a: A): Unit = {
- if (lookahead == null) lookahead = new mutable.Queue[A]
+ if (lookahead == null) lookahead = new mutable.Queue[A @uncheckedCaptures]
lookahead += a
}
def hasNext = {
@@ -865,8 +867,8 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite
* @note Reuse: $consumesOneAndProducesTwoIterators
*/
def duplicate: (Iterator[A]^{this}, Iterator[A]^{this}) = {
- val gap = new scala.collection.mutable.Queue[A]
- var ahead: Iterator[A] = null
+ val gap = new scala.collection.mutable.Queue[A @uncheckedCaptures]
+ var ahead: Iterator[A @uncheckedCaptures] = null // ahead is captured by Partner, so A is not recognized as parametric
class Partner extends AbstractIterator[A] {
override def knownSize: Int = self.synchronized {
val thisSize = self.knownSize
@@ -1143,9 +1145,7 @@ object Iterator extends IterableFactory[Iterator] {
* Nested ConcatIterators are merged to avoid blowing the stack.
*/
private final class ConcatIterator[+A](val from: Iterator[A]^) extends AbstractIterator[A] {
- private var current: Iterator[A @uncheckedCaptures] = from.unsafeAssumePure
- // This should be Iteratpr[A]^, but fails since mutable variables can't capture cap.
- // To do better we'd need to track nesting levels for universal capabiltities.
+ private var current: Iterator[A @uncheckedCaptures]^{cap[ConcatIterator]} = from
private var tail: ConcatIteratorCell[A @uncheckedVariance @uncheckedCaptures] = null
private var last: ConcatIteratorCell[A @uncheckedVariance @uncheckedCaptures] = null
private var currentHasNextChecked = false
diff --git a/tests/pos-special/stdlib/collection/JavaConverters.scala b/tests/pos-special/stdlib/collection/JavaConverters.scala
new file mode 100644
index 000000000000..69130eae1829
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/JavaConverters.scala
@@ -0,0 +1,336 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+
+import java.util.{concurrent => juc}
+import java.{lang => jl, util => ju}
+
+import scala.collection.convert._
+import scala.language.implicitConversions
+import language.experimental.captureChecking
+
+/** A variety of decorators that enable converting between
+ * Scala and Java collections using extension methods, `asScala` and `asJava`.
+ *
+ * The extension methods return adapters for the corresponding API.
+ *
+ * The following conversions are supported via `asScala` and `asJava`:
+ *{{{
+ * scala.collection.Iterable <=> java.lang.Iterable
+ * scala.collection.Iterator <=> java.util.Iterator
+ * scala.collection.mutable.Buffer <=> java.util.List
+ * scala.collection.mutable.Set <=> java.util.Set
+ * scala.collection.mutable.Map <=> java.util.Map
+ * scala.collection.concurrent.Map <=> java.util.concurrent.ConcurrentMap
+ *}}}
+ * The following conversions are supported via `asScala` and through
+ * specially-named extension methods to convert to Java collections, as shown:
+ *{{{
+ * scala.collection.Iterable <=> java.util.Collection (via asJavaCollection)
+ * scala.collection.Iterator <=> java.util.Enumeration (via asJavaEnumeration)
+ * scala.collection.mutable.Map <=> java.util.Dictionary (via asJavaDictionary)
+ *}}}
+ * In addition, the following one-way conversions are provided via `asJava`:
+ *{{{
+ * scala.collection.Seq => java.util.List
+ * scala.collection.mutable.Seq => java.util.List
+ * scala.collection.Set => java.util.Set
+ * scala.collection.Map => java.util.Map
+ *}}}
+ * The following one way conversion is provided via `asScala`:
+ *{{{
+ * java.util.Properties => scala.collection.mutable.Map
+ *}}}
+ * In all cases, converting from a source type to a target type and back
+ * again will return the original source object. For example:
+ * {{{
+ * import scala.collection.JavaConverters._
+ *
+ * val source = new scala.collection.mutable.ListBuffer[Int]
+ * val target: java.util.List[Int] = source.asJava
+ * val other: scala.collection.mutable.Buffer[Int] = target.asScala
+ * assert(source eq other)
+ * }}}
+ * Alternatively, the conversion methods have descriptive names and can be invoked explicitly.
+ * {{{
+ * scala> val vs = java.util.Arrays.asList("hi", "bye")
+ * vs: java.util.List[String] = [hi, bye]
+ *
+ * scala> val ss = asScalaIterator(vs.iterator)
+ * ss: Iterator[String] =
+ *
+ * scala> .toList
+ * res0: List[String] = List(hi, bye)
+ *
+ * scala> val ss = asScalaBuffer(vs)
+ * ss: scala.collection.mutable.Buffer[String] = Buffer(hi, bye)
+ * }}}
+ */
+@deprecated("Use `scala.jdk.CollectionConverters` instead", "2.13.0")
+object JavaConverters extends AsJavaConverters with AsScalaConverters {
+ @deprecated("Use `asJava` instead", "2.13.0")
+ def asJavaIterator[A](i: Iterator[A]): ju.Iterator[A] = asJava(i)
+
+ @deprecated("Use `asJava` instead", "2.13.0")
+ def asJavaIterable[A](i: Iterable[A]): jl.Iterable[A] = asJava(i)
+
+ @deprecated("Use `asJava` instead", "2.13.0")
+ def bufferAsJavaList[A](b: mutable.Buffer[A]): ju.List[A] = asJava(b)
+
+ @deprecated("Use `asJava` instead", "2.13.0")
+ def mutableSeqAsJavaList[A](s: mutable.Seq[A]): ju.List[A] = asJava(s)
+
+ @deprecated("Use `asJava` instead", "2.13.0")
+ def seqAsJavaList[A](s: Seq[A]): ju.List[A] = asJava(s)
+
+ @deprecated("Use `asJava` instead", "2.13.0")
+ def mutableSetAsJavaSet[A](s: mutable.Set[A]): ju.Set[A] = asJava(s)
+
+ @deprecated("Use `asJava` instead", "2.13.0")
+ def setAsJavaSet[A](s: Set[A]): ju.Set[A] = asJava(s)
+
+ @deprecated("Use `asJava` instead", "2.13.0")
+ def mutableMapAsJavaMap[K, V](m: mutable.Map[K, V]): ju.Map[K, V] = asJava(m)
+
+ @deprecated("Use `asJava` instead", "2.13.0")
+ def mapAsJavaMap[K, V](m: Map[K, V]): ju.Map[K, V] = asJava(m)
+
+ @deprecated("Use `asJava` instead", "2.13.0")
+ def mapAsJavaConcurrentMap[K, V](m: concurrent.Map[K, V]): juc.ConcurrentMap[K, V] = asJava(m)
+
+
+ @deprecated("Use `asScala` instead", "2.13.0")
+ def asScalaIterator[A](i: ju.Iterator[A]): Iterator[A] = asScala(i)
+
+ @deprecated("Use `asScala` instead", "2.13.0")
+ def enumerationAsScalaIterator[A](i: ju.Enumeration[A]): Iterator[A] = asScala(i)
+
+ @deprecated("Use `asScala` instead", "2.13.0")
+ def iterableAsScalaIterable[A](i: jl.Iterable[A]): Iterable[A] = asScala(i)
+
+ @deprecated("Use `asScala` instead", "2.13.0")
+ def collectionAsScalaIterable[A](i: ju.Collection[A]): Iterable[A] = asScala(i)
+
+ @deprecated("Use `asScala` instead", "2.13.0")
+ def asScalaBuffer[A](l: ju.List[A]): mutable.Buffer[A] = asScala(l)
+
+ @deprecated("Use `asScala` instead", "2.13.0")
+ def asScalaSet[A](s: ju.Set[A]): mutable.Set[A] = asScala(s)
+
+ @deprecated("Use `asScala` instead", "2.13.0")
+ def mapAsScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = asScala(m)
+
+ @deprecated("Use `asScala` instead", "2.13.0")
+ def mapAsScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = asScala(m)
+
+ @deprecated("Use `asScala` instead", "2.13.0")
+ def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = asScala(p)
+
+ @deprecated("Use `asScala` instead", "2.13.0")
+ def propertiesAsScalaMap(p: ju.Properties): mutable.Map[String, String] = asScala(p)
+
+ // Deprecated implicit conversions for code that directly imports them
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala `Iterator` to a Java `Iterator`.
+ * @see [[asJavaIterator]]
+ */
+ implicit def asJavaIteratorConverter[A](i : Iterator[A]): AsJava[ju.Iterator[A]] =
+ new AsJava(asJavaIterator(i))
+
+ /**
+ * Adds an `asJavaEnumeration` method that implicitly converts a Scala `Iterator` to a Java `Enumeration`.
+ * @see [[asJavaEnumeration]]
+ */
+ implicit def asJavaEnumerationConverter[A](i : Iterator[A]): AsJavaEnumeration[A] =
+ new AsJavaEnumeration(i)
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala `Iterable` to a Java `Iterable`.
+ * @see [[asJavaIterable]]
+ */
+ implicit def asJavaIterableConverter[A](i : Iterable[A]): AsJava[jl.Iterable[A]] =
+ new AsJava(asJavaIterable(i))
+
+ /**
+ * Adds an `asJavaCollection` method that implicitly converts a Scala `Iterable` to an immutable Java `Collection`.
+ * @see [[asJavaCollection]]
+ */
+ implicit def asJavaCollectionConverter[A](i : Iterable[A]): AsJavaCollection[A] =
+ new AsJavaCollection(i)
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala mutable `Buffer` to a Java `List`.
+ * @see [[bufferAsJavaList]]
+ */
+ implicit def bufferAsJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] =
+ new AsJava(bufferAsJavaList(b))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala mutable `Seq` to a Java `List`.
+ * @see [[mutableSeqAsJavaList]]
+ */
+ implicit def mutableSeqAsJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] =
+ new AsJava(mutableSeqAsJavaList(b))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala `Seq` to a Java `List`.
+ * @see [[seqAsJavaList]]
+ */
+ implicit def seqAsJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] =
+ new AsJava(seqAsJavaList(b))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala mutable `Set` to a Java `Set`.
+ * @see [[mutableSetAsJavaSet]]
+ */
+ implicit def mutableSetAsJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] =
+ new AsJava(mutableSetAsJavaSet(s))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala `Set` to a Java `Set`.
+ * @see [[setAsJavaSet]]
+ */
+ implicit def setAsJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] =
+ new AsJava(setAsJavaSet(s))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala mutable `Map` to a Java `Map`.
+ * @see [[mutableMapAsJavaMap]]
+ */
+ implicit def mutableMapAsJavaMapConverter[K, V](m : mutable.Map[K, V]): AsJava[ju.Map[K, V]] =
+ new AsJava(mutableMapAsJavaMap(m))
+
+ /**
+ * Adds an `asJavaDictionary` method that implicitly converts a Scala mutable `Map` to a Java `Dictionary`.
+ * @see [[asJavaDictionary]]
+ */
+ implicit def asJavaDictionaryConverter[K, V](m : mutable.Map[K, V]): AsJavaDictionary[K, V] =
+ new AsJavaDictionary(m)
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala `Map` to a Java `Map`.
+ * @see [[mapAsJavaMap]]
+ */
+ implicit def mapAsJavaMapConverter[K, V](m : Map[K, V]): AsJava[ju.Map[K, V]] =
+ new AsJava(mapAsJavaMap(m))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala mutable `concurrent.Map` to a Java `ConcurrentMap`.
+ * @see [[mapAsJavaConcurrentMap]].
+ */
+ implicit def mapAsJavaConcurrentMapConverter[K, V](m: concurrent.Map[K, V]): AsJava[juc.ConcurrentMap[K, V]] =
+ new AsJava(mapAsJavaConcurrentMap(m))
+
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Iterator` to a Scala `Iterator`.
+ * @see [[asScalaIterator]]
+ */
+ implicit def asScalaIteratorConverter[A](i : ju.Iterator[A]): AsScala[Iterator[A]] =
+ new AsScala(asScalaIterator(i))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Enumeration` to a Scala `Iterator`.
+ * @see [[enumerationAsScalaIterator]]
+ */
+ implicit def enumerationAsScalaIteratorConverter[A](i : ju.Enumeration[A]): AsScala[Iterator[A]] =
+ new AsScala(enumerationAsScalaIterator(i))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Iterable` to a Scala `Iterable`.
+ * @see [[iterableAsScalaIterable]]
+ */
+ implicit def iterableAsScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] =
+ new AsScala(iterableAsScalaIterable(i))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Collection` to an Scala `Iterable`.
+ * @see [[collectionAsScalaIterable]]
+ */
+ implicit def collectionAsScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] =
+ new AsScala(collectionAsScalaIterable(i))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `List` to a Scala mutable `Buffer`.
+ * @see [[asScalaBuffer]]
+ */
+ implicit def asScalaBufferConverter[A](l : ju.List[A]): AsScala[mutable.Buffer[A]] =
+ new AsScala(asScalaBuffer(l))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Set` to a Scala mutable `Set`.
+ * @see [[asScalaSet]]
+ */
+ implicit def asScalaSetConverter[A](s : ju.Set[A]): AsScala[mutable.Set[A]] =
+ new AsScala(asScalaSet(s))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Map` to a Scala mutable `Map`.
+ * @see [[mapAsScalaMap]]
+ */
+ implicit def mapAsScalaMapConverter[K, V](m : ju.Map[K, V]): AsScala[mutable.Map[K, V]] =
+ new AsScala(mapAsScalaMap(m))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `ConcurrentMap` to a Scala mutable `concurrent.Map`.
+ * @see [[mapAsScalaConcurrentMap]]
+ */
+ implicit def mapAsScalaConcurrentMapConverter[K, V](m: juc.ConcurrentMap[K, V]): AsScala[concurrent.Map[K, V]] =
+ new AsScala(mapAsScalaConcurrentMap(m))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Dictionary` to a Scala mutable `Map`.
+ * @see [[dictionaryAsScalaMap]]
+ */
+ implicit def dictionaryAsScalaMapConverter[K, V](p: ju.Dictionary[K, V]): AsScala[mutable.Map[K, V]] =
+ new AsScala(dictionaryAsScalaMap(p))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Properties` to a Scala mutable `Map[String, String]`.
+ * @see [[propertiesAsScalaMap]]
+ */
+ implicit def propertiesAsScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] =
+ new AsScala(propertiesAsScalaMap(p))
+
+
+ /** Generic class containing the `asJava` converter method */
+ class AsJava[A](op: => A) {
+ /** Converts a Scala collection to the corresponding Java collection */
+ def asJava: A = op
+ }
+
+ /** Generic class containing the `asScala` converter method */
+ class AsScala[A](op: => A) {
+ /** Converts a Java collection to the corresponding Scala collection */
+ def asScala: A = op
+ }
+
+ /** Generic class containing the `asJavaCollection` converter method */
+ class AsJavaCollection[A](i: Iterable[A]) {
+ /** Converts a Scala `Iterable` to a Java `Collection` */
+ def asJavaCollection: ju.Collection[A] = JavaConverters.asJavaCollection(i)
+ }
+
+ /** Generic class containing the `asJavaEnumeration` converter method */
+ class AsJavaEnumeration[A](i: Iterator[A]) {
+ /** Converts a Scala `Iterator` to a Java `Enumeration` */
+ def asJavaEnumeration: ju.Enumeration[A] = JavaConverters.asJavaEnumeration(i)
+ }
+
+ /** Generic class containing the `asJavaDictionary` converter method */
+ class AsJavaDictionary[K, V](m : mutable.Map[K, V]) {
+ /** Converts a Scala `Map` to a Java `Dictionary` */
+ def asJavaDictionary: ju.Dictionary[K, V] = JavaConverters.asJavaDictionary(m)
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/LazyZipOps.scala b/tests/pos-special/stdlib/collection/LazyZipOps.scala
new file mode 100644
index 000000000000..1bb4173d219f
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/LazyZipOps.scala
@@ -0,0 +1,423 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+
+import scala.language.implicitConversions
+import language.experimental.captureChecking
+
+/** Decorator representing lazily zipped pairs.
+ *
+ * @define coll pair
+ * @define willNotTerminateInf
+ *
+ * Note: will not terminate for infinite-sized collections.
+ */
+final class LazyZip2[+El1, +El2, C1] private[collection](src: C1, coll1: Iterable[El1]^, coll2: Iterable[El2]^) {
+
+ /** Zips `that` iterable collection with an existing `LazyZip2`. The elements in each collection are
+ * not consumed until a strict operation is invoked on the returned `LazyZip3` decorator.
+ *
+ * @param that the iterable providing the third element of each eventual triple
+ * @tparam B the type of the third element in each eventual triple
+ * @return a decorator `LazyZip3` that allows strict operations to be performed on the lazily evaluated tuples or
+ * chained calls to `lazyZip`. Implicit conversion to `Iterable[(El1, El2, B)]` is also supported.
+ */
+ def lazyZip[B](that: Iterable[B]^): LazyZip3[El1, El2, B, C1]^{this, that} = new LazyZip3(src, coll1, coll2, that)
+
+ def map[B, C](f: (El1, El2) => B)(implicit bf: BuildFrom[C1, B, C]): C = {
+ bf.fromSpecific(src)(new AbstractView[B] {
+ def iterator = new AbstractIterator[B] {
+ private[this] val elems1 = coll1.iterator
+ private[this] val elems2 = coll2.iterator
+ def hasNext = elems1.hasNext && elems2.hasNext
+ def next() = f(elems1.next(), elems2.next())
+ }
+ override def knownSize: Int = zipKnownSize
+ override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty
+ })
+ }
+
+ def flatMap[B, C](f: (El1, El2) => Iterable[B])(implicit bf: BuildFrom[C1, B, C]): C = {
+ bf.fromSpecific(src)(new AbstractView[B] {
+ def iterator = new AbstractIterator[B] {
+ private[this] val elems1 = coll1.iterator
+ private[this] val elems2 = coll2.iterator
+ private[this] var _current: Iterator[B] = Iterator.empty
+ private def current = {
+ while (!_current.hasNext && elems1.hasNext && elems2.hasNext)
+ _current = f(elems1.next(), elems2.next()).iterator
+ _current
+ }
+ def hasNext = current.hasNext
+ def next() = current.next()
+ }
+ override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0) 0 else super.knownSize
+ override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty
+ })
+ }
+
+ def filter[C](p: (El1, El2) => Boolean)(implicit bf: BuildFrom[C1, (El1, El2), C]): C = {
+ bf.fromSpecific(src)(new AbstractView[(El1, El2)] {
+ def iterator = new AbstractIterator[(El1, El2)] {
+ private[this] val elems1 = coll1.iterator
+ private[this] val elems2 = coll2.iterator
+ private[this] var _current: (El1, El2) = _
+ private def current = {
+ while ((_current eq null) && elems1.hasNext && elems2.hasNext) {
+ val e1 = elems1.next()
+ val e2 = elems2.next()
+ if (p(e1, e2)) _current = (e1, e2)
+ }
+ _current
+ }
+ def hasNext = current ne null
+ def next() = {
+ val c = current
+ if (c ne null) {
+ _current = null
+ c
+ } else Iterator.empty.next()
+ }
+ }
+ override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0) 0 else super.knownSize
+ override def isEmpty: Boolean = iterator.hasNext
+ })
+ }
+
+ def exists(p: (El1, El2) => Boolean): Boolean = {
+ val elems1 = coll1.iterator
+ val elems2 = coll2.iterator
+ var res = false
+
+ while (!res && elems1.hasNext && elems2.hasNext) res = p(elems1.next(), elems2.next())
+
+ res
+ }
+
+ def forall(p: (El1, El2) => Boolean): Boolean = !exists((el1, el2) => !p(el1, el2))
+
+ def foreach[U](f: (El1, El2) => U): Unit = {
+ val elems1 = coll1.iterator
+ val elems2 = coll2.iterator
+
+ while (elems1.hasNext && elems2.hasNext) f(elems1.next(), elems2.next())
+ }
+
+ private def toIterable: View[(El1, El2)] = new AbstractView[(El1, El2)] {
+ def iterator = new AbstractIterator[(El1, El2)] {
+ private[this] val elems1 = coll1.iterator
+ private[this] val elems2 = coll2.iterator
+ def hasNext = elems1.hasNext && elems2.hasNext
+ def next() = (elems1.next(), elems2.next())
+ }
+ override def knownSize: Int = zipKnownSize
+ override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty
+ }
+
+ private def zipKnownSize: Int = {
+ val s1 = coll1.knownSize
+ if (s1 == 0) 0 else {
+ val s2 = coll2.knownSize
+ if (s2 == 0) 0 else s1 min s2
+ }
+ }
+
+ override def toString = s"$coll1.lazyZip($coll2)"
+}
+
+object LazyZip2 {
+ implicit def lazyZip2ToIterable[El1, El2](zipped2: LazyZip2[El1, El2, _]): View[(El1, El2)] = zipped2.toIterable
+}
+
+
+/** Decorator representing lazily zipped triples.
+ *
+ * @define coll triple
+ * @define willNotTerminateInf
+ *
+ * Note: will not terminate for infinite-sized collections.
+ */
+final class LazyZip3[+El1, +El2, +El3, C1] private[collection](src: C1,
+ coll1: Iterable[El1]^,
+ coll2: Iterable[El2]^,
+ coll3: Iterable[El3]^) {
+
+ /** Zips `that` iterable collection with an existing `LazyZip3`. The elements in each collection are
+ * not consumed until a strict operation is invoked on the returned `LazyZip4` decorator.
+ *
+ * @param that the iterable providing the fourth element of each eventual 4-tuple
+ * @tparam B the type of the fourth element in each eventual 4-tuple
+ * @return a decorator `LazyZip4` that allows strict operations to be performed on the lazily evaluated tuples.
+ * Implicit conversion to `Iterable[(El1, El2, El3, B)]` is also supported.
+ */
+ def lazyZip[B](that: Iterable[B]^): LazyZip4[El1, El2, El3, B, C1]^{this, that} = new LazyZip4(src, coll1, coll2, coll3, that)
+
+ def map[B, C](f: (El1, El2, El3) => B)(implicit bf: BuildFrom[C1, B, C]): C = {
+ bf.fromSpecific(src)(new AbstractView[B] {
+ def iterator = new AbstractIterator[B] {
+ private[this] val elems1 = coll1.iterator
+ private[this] val elems2 = coll2.iterator
+ private[this] val elems3 = coll3.iterator
+ def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext
+ def next() = f(elems1.next(), elems2.next(), elems3.next())
+ }
+ override def knownSize: Int = zipKnownSize
+ override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty
+ })
+ }
+
+ def flatMap[B, C](f: (El1, El2, El3) => Iterable[B])(implicit bf: BuildFrom[C1, B, C]): C = {
+ bf.fromSpecific(src)(new AbstractView[B] {
+ def iterator = new AbstractIterator[B] {
+ private[this] val elems1 = coll1.iterator
+ private[this] val elems2 = coll2.iterator
+ private[this] val elems3 = coll3.iterator
+ private[this] var _current: Iterator[B] = Iterator.empty
+ private def current = {
+ while (!_current.hasNext && elems1.hasNext && elems2.hasNext && elems3.hasNext)
+ _current = f(elems1.next(), elems2.next(), elems3.next()).iterator
+ _current
+ }
+ def hasNext = current.hasNext
+ def next() = current.next()
+ }
+ override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0) 0 else super.knownSize
+ override def isEmpty: Boolean = iterator.isEmpty
+ })
+ }
+
+ def filter[C](p: (El1, El2, El3) => Boolean)(implicit bf: BuildFrom[C1, (El1, El2, El3), C]): C = {
+ bf.fromSpecific(src)(new AbstractView[(El1, El2, El3)] {
+ def iterator = new AbstractIterator[(El1, El2, El3)] {
+ private[this] val elems1 = coll1.iterator
+ private[this] val elems2 = coll2.iterator
+ private[this] val elems3 = coll3.iterator
+ private[this] var _current: (El1, El2, El3) = _
+ private def current = {
+ while ((_current eq null) && elems1.hasNext && elems2.hasNext && elems3.hasNext) {
+ val e1 = elems1.next()
+ val e2 = elems2.next()
+ val e3 = elems3.next()
+ if (p(e1, e2, e3)) _current = (e1, e2, e3)
+ }
+ _current
+ }
+ def hasNext = current ne null
+ def next() = {
+ val c = current
+ if (c ne null) {
+ _current = null
+ c
+ } else Iterator.empty.next()
+ }
+ }
+ override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0) 0 else super.knownSize
+ override def isEmpty: Boolean = iterator.isEmpty
+ })
+ }
+
+ def exists(p: (El1, El2, El3) => Boolean): Boolean = {
+ val elems1 = coll1.iterator
+ val elems2 = coll2.iterator
+ val elems3 = coll3.iterator
+ var res = false
+
+ while (!res && elems1.hasNext && elems2.hasNext && elems3.hasNext)
+ res = p(elems1.next(), elems2.next(), elems3.next())
+
+ res
+ }
+
+ def forall(p: (El1, El2, El3) => Boolean): Boolean = !exists((el1, el2, el3) => !p(el1, el2, el3))
+
+ def foreach[U](f: (El1, El2, El3) => U): Unit = {
+ val elems1 = coll1.iterator
+ val elems2 = coll2.iterator
+ val elems3 = coll3.iterator
+
+ while (elems1.hasNext && elems2.hasNext && elems3.hasNext)
+ f(elems1.next(), elems2.next(), elems3.next())
+ }
+
+ private def toIterable: View[(El1, El2, El3)] = new AbstractView[(El1, El2, El3)] {
+ def iterator = new AbstractIterator[(El1, El2, El3)] {
+ private[this] val elems1 = coll1.iterator
+ private[this] val elems2 = coll2.iterator
+ private[this] val elems3 = coll3.iterator
+ def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext
+ def next() = (elems1.next(), elems2.next(), elems3.next())
+ }
+ override def knownSize: Int = zipKnownSize
+ override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty
+ }
+
+ private def zipKnownSize: Int = {
+ val s1 = coll1.knownSize
+ if (s1 == 0) 0 else {
+ val s2 = coll2.knownSize
+ if (s2 == 0) 0 else {
+ val s3 = coll3.knownSize
+ if (s3 == 0) 0 else s1 min s2 min s3
+ }
+ }
+ }
+
+ override def toString = s"$coll1.lazyZip($coll2).lazyZip($coll3)"
+}
+
+object LazyZip3 {
+ implicit def lazyZip3ToIterable[El1, El2, El3](zipped3: LazyZip3[El1, El2, El3, _]): View[(El1, El2, El3)] = zipped3.toIterable
+}
+
+
+
+/** Decorator representing lazily zipped 4-tuples.
+ *
+ * @define coll tuple
+ * @define willNotTerminateInf
+ *
+ * Note: will not terminate for infinite-sized collections.
+ */
+final class LazyZip4[+El1, +El2, +El3, +El4, C1] private[collection](src: C1,
+ coll1: Iterable[El1]^,
+ coll2: Iterable[El2]^,
+ coll3: Iterable[El3]^,
+ coll4: Iterable[El4]^) {
+
+ def map[B, C](f: (El1, El2, El3, El4) => B)(implicit bf: BuildFrom[C1, B, C]): C = {
+ bf.fromSpecific(src)(new AbstractView[B] {
+ def iterator = new AbstractIterator[B] {
+ private[this] val elems1 = coll1.iterator
+ private[this] val elems2 = coll2.iterator
+ private[this] val elems3 = coll3.iterator
+ private[this] val elems4 = coll4.iterator
+ def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext
+ def next() = f(elems1.next(), elems2.next(), elems3.next(), elems4.next())
+ }
+ override def knownSize: Int = zipKnownSize
+ override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty || coll4.isEmpty
+ })
+ }
+
+ def flatMap[B, C](f: (El1, El2, El3, El4) => Iterable[B])(implicit bf: BuildFrom[C1, B, C]): C = {
+ bf.fromSpecific(src)(new AbstractView[B] {
+ def iterator = new AbstractIterator[B] {
+ private[this] val elems1 = coll1.iterator
+ private[this] val elems2 = coll2.iterator
+ private[this] val elems3 = coll3.iterator
+ private[this] val elems4 = coll4.iterator
+ private[this] var _current: Iterator[B] = Iterator.empty
+ private def current = {
+ while (!_current.hasNext && elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext)
+ _current = f(elems1.next(), elems2.next(), elems3.next(), elems4.next()).iterator
+ _current
+ }
+ def hasNext = current.hasNext
+ def next() = current.next()
+ }
+ override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0 || coll4.knownSize == 0) 0 else super.knownSize
+ override def isEmpty: Boolean = iterator.isEmpty
+ })
+ }
+
+ def filter[C](p: (El1, El2, El3, El4) => Boolean)(implicit bf: BuildFrom[C1, (El1, El2, El3, El4), C]): C = {
+ bf.fromSpecific(src)(new AbstractView[(El1, El2, El3, El4)] {
+ def iterator = new AbstractIterator[(El1, El2, El3, El4)] {
+ private[this] val elems1 = coll1.iterator
+ private[this] val elems2 = coll2.iterator
+ private[this] val elems3 = coll3.iterator
+ private[this] val elems4 = coll4.iterator
+ private[this] var _current: (El1, El2, El3, El4) = _
+ private def current = {
+ while ((_current eq null) && elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext) {
+ val e1 = elems1.next()
+ val e2 = elems2.next()
+ val e3 = elems3.next()
+ val e4 = elems4.next()
+ if (p(e1, e2, e3, e4)) _current = (e1, e2, e3, e4)
+ }
+ _current
+ }
+ def hasNext = current ne null
+ def next() = {
+ val c = current
+ if (c ne null) {
+ _current = null
+ c
+ } else Iterator.empty.next()
+ }
+ }
+ override def knownSize: Int = if (coll1.knownSize == 0 || coll2.knownSize == 0 || coll3.knownSize == 0 || coll4.knownSize == 0) 0 else super.knownSize
+ override def isEmpty: Boolean = iterator.isEmpty
+ })
+ }
+
+ def exists(p: (El1, El2, El3, El4) => Boolean): Boolean = {
+ val elems1 = coll1.iterator
+ val elems2 = coll2.iterator
+ val elems3 = coll3.iterator
+ val elems4 = coll4.iterator
+ var res = false
+
+ while (!res && elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext)
+ res = p(elems1.next(), elems2.next(), elems3.next(), elems4.next())
+
+ res
+ }
+
+ def forall(p: (El1, El2, El3, El4) => Boolean): Boolean = !exists((el1, el2, el3, el4) => !p(el1, el2, el3, el4))
+
+ def foreach[U](f: (El1, El2, El3, El4) => U): Unit = {
+ val elems1 = coll1.iterator
+ val elems2 = coll2.iterator
+ val elems3 = coll3.iterator
+ val elems4 = coll4.iterator
+
+ while (elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext)
+ f(elems1.next(), elems2.next(), elems3.next(), elems4.next())
+ }
+
+ private def toIterable: View[(El1, El2, El3, El4)] = new AbstractView[(El1, El2, El3, El4)] {
+ def iterator = new AbstractIterator[(El1, El2, El3, El4)] {
+ private[this] val elems1 = coll1.iterator
+ private[this] val elems2 = coll2.iterator
+ private[this] val elems3 = coll3.iterator
+ private[this] val elems4 = coll4.iterator
+ def hasNext = elems1.hasNext && elems2.hasNext && elems3.hasNext && elems4.hasNext
+ def next() = (elems1.next(), elems2.next(), elems3.next(), elems4.next())
+ }
+ override def knownSize: Int = zipKnownSize
+ override def isEmpty: Boolean = coll1.isEmpty || coll2.isEmpty || coll3.isEmpty || coll4.isEmpty
+ }
+
+ private def zipKnownSize: Int = {
+ val s1 = coll1.knownSize
+ if (s1 == 0) 0 else {
+ val s2 = coll2.knownSize
+ if (s2 == 0) 0 else {
+ val s3 = coll3.knownSize
+ if (s3 == 0) 0 else {
+ val s4 = coll4.knownSize
+ if (s4 == 0) 0 else s1 min s2 min s3 min s4
+ }
+ }
+ }
+ }
+
+ override def toString = s"$coll1.lazyZip($coll2).lazyZip($coll3).lazyZip($coll4)"
+}
+
+object LazyZip4 {
+ implicit def lazyZip4ToIterable[El1, El2, El3, El4](zipped4: LazyZip4[El1, El2, El3, El4, _]): View[(El1, El2, El3, El4)] =
+ zipped4.toIterable
+}
diff --git a/tests/pos-special/stdlib/collection/Map.scala b/tests/pos-special/stdlib/collection/Map.scala
index ef4f915ea573..8ab25a3c13e0 100644
--- a/tests/pos-special/stdlib/collection/Map.scala
+++ b/tests/pos-special/stdlib/collection/Map.scala
@@ -18,13 +18,15 @@ import scala.collection.generic.DefaultSerializable
import scala.collection.mutable.StringBuilder
import scala.util.hashing.MurmurHash3
import language.experimental.captureChecking
+import caps.unsafe.unsafeAssumePure
/** Base Map type */
trait Map[K, +V]
extends Iterable[(K, V)]
with MapOps[K, V, Map, Map[K, V]]
with MapFactoryDefaults[K, V, Map, Iterable]
- with Equals {
+ with Equals
+ with Pure {
def mapFactory: scala.collection.MapFactory[Map] = Map
@@ -102,8 +104,9 @@ trait Map[K, +V]
trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C]
extends IterableOps[(K, V), Iterable, C]
with PartialFunction[K, V] {
+ this: MapOps[K, V, CC, C]^ =>
- override def view: MapView[K, V] = new MapView.Id(this)
+ override def view: MapView[K, V]^{this} = new MapView.Id(this)
/** Returns a [[Stepper]] for the keys of this map. See method [[stepper]]. */
def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S = {
@@ -252,7 +255,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C]
* the predicate `p`. The resulting map wraps the original map without copying any elements.
*/
@deprecated("Use .view.filterKeys(f). A future version will include a strict version of this method (for now, .view.filterKeys(p).toMap).", "2.13.0")
- def filterKeys(p: K => Boolean): MapView[K, V] = new MapView.FilterKeys(this, p)
+ def filterKeys(p: K => Boolean): MapView[K, V]^{this, p} = new MapView.FilterKeys(this, p)
/** Transforms this map by applying a function to every retrieved value.
* @param f the function used to transform values of this map.
@@ -260,7 +263,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C]
* to `f(this(key))`. The resulting map wraps the original map without copying any elements.
*/
@deprecated("Use .view.mapValues(f). A future version will include a strict version of this method (for now, .view.mapValues(f).toMap).", "2.13.0")
- def mapValues[W](f: V => W): MapView[K, W] = new MapView.MapValues(this, f)
+ def mapValues[W](f: V => W): MapView[K, W]^{this, f} = new MapView.MapValues(this, f)
/** Defines the default value computation for the map,
* returned when a key is not found
@@ -353,7 +356,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C]
@deprecated("Consider requiring an immutable Map.", "2.13.0")
@`inline` def -- (keys: IterableOnce[K]^): C = {
lazy val keysSet = keys.iterator.to(immutable.Set)
- fromSpecific(this.view.filterKeys(k => !keysSet.contains(k)))
+ fromSpecific(this.view.filterKeys(k => !keysSet.contains(k))).unsafeAssumePure
}
@deprecated("Use ++ instead of ++: for collections of type Iterable", "2.13.0")
@@ -374,17 +377,17 @@ object MapOps {
*/
@SerialVersionUID(3L)
class WithFilter[K, +V, +IterableCC[_], +CC[_, _] <: IterableOps[_, AnyConstr, _]](
- self: MapOps[K, V, CC, _] with IterableOps[(K, V), IterableCC, _],
+ self: (MapOps[K, V, CC, _] with IterableOps[(K, V), IterableCC, _])^,
p: ((K, V)) => Boolean
) extends IterableOps.WithFilter[(K, V), IterableCC](self, p) with Serializable {
- def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] =
+ def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2]^{this, f} =
self.mapFactory.from(new View.Map(filtered, f))
- def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2] =
+ def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2]^{this, f} =
self.mapFactory.from(new View.FlatMap(filtered, f))
- override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, CC]^{p, q} =
+ override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, CC]^{this, q} =
new WithFilter[K, V, IterableCC, CC](self, (kv: (K, V)) => p(kv) && q(kv))
}
diff --git a/tests/pos-special/stdlib/collection/MapView.scala b/tests/pos-special/stdlib/collection/MapView.scala
new file mode 100644
index 000000000000..ac9e88466052
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/MapView.scala
@@ -0,0 +1,196 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+
+import scala.annotation.nowarn
+import scala.collection.MapView.SomeMapOps
+import scala.collection.mutable.Builder
+import language.experimental.captureChecking
+import caps.unsafe.unsafeAssumePure
+
+trait MapView[K, +V]
+ extends MapOps[K, V, ({ type l[X, Y] = View[(X, Y)] })#l, View[(K, V)]]
+ with View[(K, V)] {
+ this: MapView[K, V]^ =>
+
+ override def view: MapView[K, V]^{this} = this
+
+ // Ideally this returns a `View`, but bincompat
+ /** Creates a view over all keys of this map.
+ *
+ * @return the keys of this map as a view.
+ */
+ override def keys: Iterable[K]^{this} = new MapView.Keys(this)
+
+ // Ideally this returns a `View`, but bincompat
+ /** Creates a view over all values of this map.
+ *
+ * @return the values of this map as a view.
+ */
+ override def values: Iterable[V]^{this} = new MapView.Values(this)
+
+ /** Filters this map by retaining only keys satisfying a predicate.
+ * @param p the predicate used to test keys
+ * @return an immutable map consisting only of those key value pairs of this map where the key satisfies
+ * the predicate `p`. The resulting map wraps the original map without copying any elements.
+ */
+ override def filterKeys(p: K => Boolean): MapView[K, V]^{this, p} = new MapView.FilterKeys(this, p)
+
+ /** Transforms this map by applying a function to every retrieved value.
+ * @param f the function used to transform values of this map.
+ * @return a map view which maps every key of this map
+ * to `f(this(key))`. The resulting map wraps the original map without copying any elements.
+ */
+ override def mapValues[W](f: V => W): MapView[K, W]^{this, f} = new MapView.MapValues(this, f)
+
+ override def filter(pred: ((K, V)) => Boolean): MapView[K, V]^{this, pred} = new MapView.Filter(this, false, pred)
+
+ override def filterNot(pred: ((K, V)) => Boolean): MapView[K, V]^{this, pred} = new MapView.Filter(this, true, pred)
+
+ override def partition(p: ((K, V)) => Boolean): (MapView[K, V]^{this, p}, MapView[K, V]^{this, p}) = (filter(p), filterNot(p))
+
+ override def tapEach[U](f: ((K, V)) => U): MapView[K, V]^{this, f} = new MapView.TapEach(this, f)
+
+ def mapFactory: MapViewFactory = MapView
+
+ override def empty: MapView[K, V] = mapFactory.empty
+
+ override def withFilter(p: ((K, V)) => Boolean): MapOps.WithFilter[K, V, View, ({ type l[X, Y] = View[(X, Y)] })#l]^{this, p} = new MapOps.WithFilter(this, p)
+
+ override def toString: String = super[View].toString
+
+ @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""")
+ override protected[this] def stringPrefix: String = "MapView"
+}
+
+object MapView extends MapViewFactory {
+
+ /** An `IterableOps` whose collection type and collection type constructor are unknown */
+ type SomeIterableConstr[X, Y] = IterableOps[_, AnyConstr, _]
+ /** A `MapOps` whose collection type and collection type constructor are (mostly) unknown */
+ type SomeMapOps[K, +V] = MapOps[K, V, SomeIterableConstr, _]
+
+ @SerialVersionUID(3L)
+ object EmptyMapView extends AbstractMapView[Any, Nothing] {
+ // !!! cc problem: crash when we replace the line with
+ // private val EmptyMapView: MapView[Any, Nothing] = new AbstractMapView[Any, Nothing] {
+ override def get(key: Any): Option[Nothing] = None
+ override def iterator: Iterator[Nothing] = Iterator.empty[Nothing]
+ override def knownSize: Int = 0
+ override def isEmpty: Boolean = true
+ override def filterKeys(p: Any => Boolean): MapView[Any, Nothing] = this
+ override def mapValues[W](f: Nothing => W): MapView[Any, Nothing] = this
+ override def filter(pred: ((Any, Nothing)) => Boolean): MapView[Any, Nothing] = this
+ override def filterNot(pred: ((Any, Nothing)) => Boolean): MapView[Any, Nothing] = this
+ override def partition(p: ((Any, Nothing)) => Boolean): (MapView[Any, Nothing], MapView[Any, Nothing]) = (this, this)
+ }
+
+ @SerialVersionUID(3L)
+ class Id[K, +V](underlying: SomeMapOps[K, V]^) extends AbstractMapView[K, V] {
+ def get(key: K): Option[V] = underlying.get(key)
+ def iterator: Iterator[(K, V)]^{this} = underlying.iterator
+ override def knownSize: Int = underlying.knownSize
+ override def isEmpty: Boolean = underlying.isEmpty
+ }
+
+ // Ideally this is public, but bincompat
+ @SerialVersionUID(3L)
+ private class Keys[K](underlying: SomeMapOps[K, _]^) extends AbstractView[K] {
+ def iterator: Iterator[K]^{this} = underlying.keysIterator
+ override def knownSize: Int = underlying.knownSize
+ override def isEmpty: Boolean = underlying.isEmpty
+ }
+
+ // Ideally this is public, but bincompat
+ @SerialVersionUID(3L)
+ private class Values[+V](underlying: SomeMapOps[_, V]^) extends AbstractView[V] {
+ def iterator: Iterator[V]^{this} = underlying.valuesIterator
+ override def knownSize: Int = underlying.knownSize
+ override def isEmpty: Boolean = underlying.isEmpty
+ }
+
+ @SerialVersionUID(3L)
+ class MapValues[K, +V, +W](underlying: SomeMapOps[K, V]^, f: V => W) extends AbstractMapView[K, W] {
+ def iterator: Iterator[(K, W)]^{this} = underlying.iterator.map(kv => (kv._1, f(kv._2)))
+ def get(key: K): Option[W] = underlying.get(key).map(f)
+ override def knownSize: Int = underlying.knownSize
+ override def isEmpty: Boolean = underlying.isEmpty
+ }
+
+ @SerialVersionUID(3L)
+ class FilterKeys[K, +V](underlying: SomeMapOps[K, V]^, p: K => Boolean) extends AbstractMapView[K, V] {
+ def iterator: Iterator[(K, V)]^{this} = underlying.iterator.filter { case (k, _) => p(k) }
+ def get(key: K): Option[V] = if (p(key)) underlying.get(key) else None
+ override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize
+ override def isEmpty: Boolean = iterator.isEmpty
+ }
+
+ @SerialVersionUID(3L)
+ class Filter[K, +V](underlying: SomeMapOps[K, V]^, isFlipped: Boolean, p: ((K, V)) => Boolean) extends AbstractMapView[K, V] {
+ def iterator: Iterator[(K, V)]^{this} = underlying.iterator.filterImpl(p, isFlipped)
+ def get(key: K): Option[V] = underlying.get(key) match {
+ case s @ Some(v) if p((key, v)) != isFlipped => s
+ case _ => None
+ }
+ override def knownSize: Int = if (underlying.knownSize == 0) 0 else super.knownSize
+ override def isEmpty: Boolean = iterator.isEmpty
+ }
+
+ @SerialVersionUID(3L)
+ class TapEach[K, +V, +U](underlying: SomeMapOps[K, V]^, f: ((K, V)) => U) extends AbstractMapView[K, V] {
+ override def get(key: K): Option[V] = {
+ underlying.get(key) match {
+ case s @ Some(v) =>
+ f((key, v))
+ s
+ case None => None
+ }
+ }
+ override def iterator: Iterator[(K, V)]^{this} = underlying.iterator.tapEach(f)
+ override def knownSize: Int = underlying.knownSize
+ override def isEmpty: Boolean = underlying.isEmpty
+ }
+
+ override def newBuilder[sealed X, sealed Y]: Builder[(X, Y), MapView[X, Y]] = mutable.HashMap.newBuilder[X, Y].mapResult(_.view)
+
+ override def empty[K, V]: MapView[K, V] = EmptyMapView.asInstanceOf[MapView[K, V]]
+
+ override def from[K, V](it: IterableOnce[(K, V)]^): View[(K, V)] =
+ View.from(it).unsafeAssumePure
+ // unsafeAssumePure needed here since MapViewFactory inherits from MapFactory,
+ // and the latter assumes maps are strict, so from's result captures nothing.
+
+ override def from[K, V](it: SomeMapOps[K, V]^): MapView[K, V]^{it} = it match {
+ case mv: MapView[K, V] => mv
+ case other => new MapView.Id(other)
+ }
+
+ override def apply[K, V](elems: (K, V)*): MapView[K, V] = from(elems.toMap)
+}
+
+trait MapViewFactory extends collection.MapFactory[({ type l[X, Y] = View[(X, Y)]})#l] {
+
+ def newBuilder[X, Y]: Builder[(X, Y), MapView[X, Y]]
+
+ def empty[X, Y]: MapView[X, Y]
+
+ def from[K, V](it: SomeMapOps[K, V]^): MapView[K, V]^{it}
+
+ override def apply[K, V](elems: (K, V)*): MapView[K, V] = from(elems.toMap)
+}
+
+/** Explicit instantiation of the `MapView` trait to reduce class file size in subclasses. */
+@SerialVersionUID(3L)
+abstract class AbstractMapView[K, +V] extends AbstractView[(K, V)] with MapView[K, V]:
+ this: AbstractMapView[K, V]^ =>
+
diff --git a/tests/pos-special/stdlib/collection/Searching.scala b/tests/pos-special/stdlib/collection/Searching.scala
new file mode 100644
index 000000000000..f5139422e24c
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/Searching.scala
@@ -0,0 +1,58 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+
+import scala.language.implicitConversions
+import scala.collection.generic.IsSeq
+import language.experimental.captureChecking
+
+object Searching {
+
+ /** The result of performing a search on a sorted sequence
+ *
+ * Example usage:
+ *
+ * {{{
+ * val list = List(1, 3, 4, 5) // list must be sorted before searching
+ * list.search(4) // Found(2)
+ * list.search(2) // InsertionPoint(1)
+ * }}}
+ *
+ * */
+ sealed abstract class SearchResult {
+ /** The index corresponding to the element searched for in the sequence, if it was found,
+ * or the index where the element would be inserted in the sequence, if it was not in the sequence */
+ def insertionPoint: Int
+ }
+
+ /** The result of performing a search on a sorted sequence, where the element was found.
+ *
+ * @param foundIndex the index corresponding to the element searched for in the sequence
+ */
+ case class Found(foundIndex: Int) extends SearchResult {
+ override def insertionPoint: Int = foundIndex
+ }
+
+ /** The result of performing a search on a sorted sequence, where the element was not found
+ *
+ * @param insertionPoint the index where the element would be inserted in the sequence
+ */
+ case class InsertionPoint(insertionPoint: Int) extends SearchResult
+
+ @deprecated("Search methods are defined directly on SeqOps and do not require scala.collection.Searching any more", "2.13.0")
+ class SearchImpl[Repr, A](private val coll: SeqOps[A, AnyConstr, _]) extends AnyVal
+
+ @deprecated("Search methods are defined directly on SeqOps and do not require scala.collection.Searching any more", "2.13.0")
+ implicit def search[Repr, A](coll: Repr)(implicit fr: IsSeq[Repr]): SearchImpl[Repr, fr.A] =
+ new SearchImpl(fr.conversion(coll))
+}
diff --git a/tests/pos-special/stdlib/collection/Seq.scala b/tests/pos-special/stdlib/collection/Seq.scala
index caabf6fa6436..365a1db1b849 100644
--- a/tests/pos-special/stdlib/collection/Seq.scala
+++ b/tests/pos-special/stdlib/collection/Seq.scala
@@ -18,6 +18,7 @@ import Searching.{Found, InsertionPoint, SearchResult}
import scala.annotation.nowarn
import language.experimental.captureChecking
import caps.unsafe.unsafeAssumePure
+import scala.annotation.unchecked.uncheckedCaptures
/** Base trait for sequence collections
*
@@ -77,10 +78,12 @@ object Seq extends SeqFactory.Delegate[Seq](immutable.Seq)
* @define coll sequence
* @define Coll `Seq`
*/
-trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self =>
+trait SeqOps[+A, +CC[_], +C] extends Any with SeqViewOps[A, CC, C] { self =>
override def view: SeqView[A] = new SeqView.Id[A](this)
+ def iterableFactory: FreeSeqFactory[CC]
+
/** Get the element at the specified index. This operation is provided for convenience in `Seq`. It should
* not be assumed to be efficient unless you have an `IndexedSeq`. */
@throws[IndexOutOfBoundsException]
@@ -234,7 +237,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self =>
*
* @return an iterator yielding the elements of this $coll in reversed order
*/
- def reverseIterator: Iterator[A] = reversed.iterator
+ override def reverseIterator: Iterator[A] = reversed.iterator
/** Tests whether this $coll contains the given sequence at a given index.
*
@@ -598,7 +601,8 @@ trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self =>
if (!hasNext)
Iterator.empty.next()
- val forcedElms = new mutable.ArrayBuffer[A](elms.size) ++= elms
+ val forcedElms = new mutable.ArrayBuffer[A @uncheckedCaptures](elms.size) ++= elms
+ // uncheckedCaptures OK since used only locally
val result = (newSpecificBuilder ++= forcedElms).result()
var i = idxs.length - 2
while(i >= 0 && idxs(i) >= idxs(i+1))
@@ -889,7 +893,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self =>
* part of the result, but any following occurrences will.
*/
def diff[B >: A](that: Seq[B]): C = {
- val occ = occCounts(that)
+ val occ = occCounts[B @uncheckedCaptures](that)
fromSpecific(iterator.filter { x =>
var include = false
occ.updateWith(x) {
@@ -914,7 +918,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self =>
* in the result, but any following occurrences will be omitted.
*/
def intersect[B >: A](that: Seq[B]): C = {
- val occ = occCounts(that)
+ val occ = occCounts[B @uncheckedCaptures](that)
fromSpecific(iterator.filter { x =>
var include = true
occ.updateWith(x) {
@@ -962,7 +966,7 @@ trait SeqOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] { self =>
iterableFactory.from(new View.Updated(this, index, elem))
}
- protected[collection] def occCounts[B](sq: Seq[B]): mutable.Map[B, Int] = {
+ protected[collection] def occCounts[sealed B](sq: Seq[B]): mutable.Map[B, Int] = {
val occ = new mutable.HashMap[B, Int]()
for (y <- sq) occ.updateWith(y) {
case None => Some(1)
diff --git a/tests/pos-special/stdlib/collection/SeqMap.scala b/tests/pos-special/stdlib/collection/SeqMap.scala
new file mode 100644
index 000000000000..a7f2c629b61d
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/SeqMap.scala
@@ -0,0 +1,41 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+import language.experimental.captureChecking
+
+import scala.annotation.nowarn
+
+/**
+ * A generic trait for ordered maps. Concrete classes have to provide
+ * functionality for the abstract methods in `SeqMap`.
+ *
+ * Note that when checking for equality [[SeqMap]] does not take into account
+ * ordering.
+ *
+ * @tparam K the type of the keys contained in this linked map.
+ * @tparam V the type of the values associated with the keys in this linked map.
+ * @define coll immutable seq map
+ * @define Coll `immutable.SeqMap`
+ */
+
+trait SeqMap[K, +V] extends Map[K, V]
+ with MapOps[K, V, SeqMap, SeqMap[K, V]]
+ with MapFactoryDefaults[K, V, SeqMap, Iterable] {
+ @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""")
+ override protected[this] def stringPrefix: String = "SeqMap"
+
+ override def mapFactory: MapFactory[SeqMap] = SeqMap
+}
+
+object SeqMap extends MapFactory.Delegate[immutable.SeqMap](immutable.SeqMap)
+
diff --git a/tests/pos-special/stdlib/collection/SeqView.scala b/tests/pos-special/stdlib/collection/SeqView.scala
new file mode 100644
index 000000000000..a4ca1143f8b4
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/SeqView.scala
@@ -0,0 +1,232 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+
+import scala.annotation.nowarn
+import language.experimental.captureChecking
+import caps.unsafe.unsafeAssumePure
+import scala.annotation.unchecked.uncheckedCaptures
+
+/** !!! Scala 2 difference: Need intermediate trait SeqViewOps to collect the
+ * necessary functionality over which SeqViews are defined, and at the same
+ * time allowing impure operations. Scala 2 uses SeqOps here, but SeqOps is
+ * pure, whereas SeqViews are Iterables which can be impure (for instance,
+ * mapping a SeqView with an impure function gives an impure view).
+ */
+trait SeqViewOps[+A, +CC[_], +C] extends Any with IterableOps[A, CC, C] {
+ self: SeqViewOps[A, CC, C]^ =>
+
+ def length: Int
+ def apply(x: Int): A
+ def appended[B >: A](elem: B): CC[B]^{this}
+ def prepended[B >: A](elem: B): CC[B]^{this}
+ def reverse: C^{this}
+ def sorted[B >: A](implicit ord: Ordering[B]): C^{this}
+
+ def reverseIterator: Iterator[A]^{this} = reversed.iterator
+}
+
+trait SeqView[+A] extends SeqViewOps[A, View, View[A]] with View[A] {
+ self: SeqView[A]^ =>
+
+ override def view: SeqView[A]^{this} = this
+
+ override def map[B](f: A => B): SeqView[B]^{this, f} = new SeqView.Map(this, f)
+ override def appended[B >: A](elem: B): SeqView[B]^{this} = new SeqView.Appended(this, elem)
+ override def prepended[B >: A](elem: B): SeqView[B]^{this} = new SeqView.Prepended(elem, this)
+ override def reverse: SeqView[A]^{this} = new SeqView.Reverse(this)
+ override def take(n: Int): SeqView[A]^{this} = new SeqView.Take(this, n)
+ override def drop(n: Int): SeqView[A]^{this} = new SeqView.Drop(this, n)
+ override def takeRight(n: Int): SeqView[A]^{this} = new SeqView.TakeRight(this, n)
+ override def dropRight(n: Int): SeqView[A]^{this} = new SeqView.DropRight(this, n)
+ override def tapEach[U](f: A => U): SeqView[A]^{this, f} = new SeqView.Map(this, { (a: A) => f(a); a })
+
+ def concat[B >: A](suffix: SeqView.SomeSeqOps[B]): SeqView[B]^{this} = new SeqView.Concat(this, suffix)
+ def appendedAll[B >: A](suffix: SeqView.SomeSeqOps[B]): SeqView[B]^{this} = new SeqView.Concat(this, suffix)
+ def prependedAll[B >: A](prefix: SeqView.SomeSeqOps[B]): SeqView[B]^{this} = new SeqView.Concat(prefix, this)
+
+ override def sorted[B >: A](implicit ord: Ordering[B]): SeqView[A]^{this} = new SeqView.Sorted(this, ord)
+
+ @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""")
+ override protected[this] def stringPrefix: String = "SeqView"
+}
+
+object SeqView {
+
+ /** A `SeqOps` whose collection type and collection type constructor are unknown */
+ private type SomeSeqOps[+A] = SeqViewOps[A, AnyConstr, _]
+
+ /** A view that doesn’t apply any transformation to an underlying sequence */
+ @SerialVersionUID(3L)
+ class Id[+A](underlying: SomeSeqOps[A]^) extends AbstractSeqView[A] {
+ def apply(idx: Int): A = underlying.apply(idx)
+ def length: Int = underlying.length
+ def iterator: Iterator[A]^{this} = underlying.iterator
+ override def knownSize: Int = underlying.knownSize
+ override def isEmpty: Boolean = underlying.isEmpty
+ }
+
+ @SerialVersionUID(3L)
+ class Map[+A, +B](underlying: SomeSeqOps[A]^, f: A => B) extends View.Map[A, B](underlying, f) with SeqView[B] {
+ def apply(idx: Int): B = f(underlying(idx))
+ def length: Int = underlying.length
+ }
+
+ @SerialVersionUID(3L)
+ class Appended[+A](underlying: SomeSeqOps[A]^, elem: A) extends View.Appended(underlying, elem) with SeqView[A] {
+ def apply(idx: Int): A = if (idx == underlying.length) elem else underlying(idx)
+ def length: Int = underlying.length + 1
+ }
+
+ @SerialVersionUID(3L)
+ class Prepended[+A](elem: A, underlying: SomeSeqOps[A]^) extends View.Prepended(elem, underlying) with SeqView[A] {
+ def apply(idx: Int): A = if (idx == 0) elem else underlying(idx - 1)
+ def length: Int = underlying.length + 1
+ }
+
+ @SerialVersionUID(3L)
+ class Concat[A](prefix: SomeSeqOps[A]^, suffix: SomeSeqOps[A]^) extends View.Concat[A](prefix, suffix) with SeqView[A] {
+ def apply(idx: Int): A = {
+ val l = prefix.length
+ if (idx < l) prefix(idx) else suffix(idx - l)
+ }
+ def length: Int = prefix.length + suffix.length
+ }
+
+ @SerialVersionUID(3L)
+ class Reverse[A](underlying: SomeSeqOps[A]^) extends AbstractSeqView[A] {
+ def apply(i: Int) = underlying.apply(size - 1 - i)
+ def length = underlying.size
+ def iterator: Iterator[A]^{this} = underlying.reverseIterator
+ override def knownSize: Int = underlying.knownSize
+ override def isEmpty: Boolean = underlying.isEmpty
+ }
+
+ @SerialVersionUID(3L)
+ class Take[+A](underlying: SomeSeqOps[A]^, n: Int) extends View.Take(underlying, n) with SeqView[A] {
+ def apply(idx: Int): A = if (idx < n) {
+ underlying(idx)
+ } else {
+ throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${if (underlying.knownSize >= 0) knownSize - 1 else "unknown"})")
+ }
+ def length: Int = underlying.length min normN
+ }
+
+ @SerialVersionUID(3L)
+ class TakeRight[+A](underlying: SomeSeqOps[A]^, n: Int) extends View.TakeRight(underlying, n) with SeqView[A] {
+ private[this] val delta = (underlying.size - (n max 0)) max 0
+ def length = underlying.size - delta
+ @throws[IndexOutOfBoundsException]
+ def apply(i: Int) = underlying.apply(i + delta)
+ }
+
+ @SerialVersionUID(3L)
+ class Drop[A](underlying: SomeSeqOps[A]^, n: Int) extends View.Drop[A](underlying, n) with SeqView[A] {
+ def length = (underlying.size - normN) max 0
+ @throws[IndexOutOfBoundsException]
+ def apply(i: Int) = underlying.apply(i + normN)
+ override def drop(n: Int): SeqView[A]^{this} = new Drop(underlying, this.n + n)
+ }
+
+ @SerialVersionUID(3L)
+ class DropRight[A](underlying: SomeSeqOps[A]^, n: Int) extends View.DropRight[A](underlying, n) with SeqView[A] {
+ private[this] val len = (underlying.size - (n max 0)) max 0
+ def length = len
+ @throws[IndexOutOfBoundsException]
+ def apply(i: Int) = underlying.apply(i)
+ }
+
+ @SerialVersionUID(3L)
+ class Sorted[A, B >: A] private (private[this] var underlying: SomeSeqOps[A]^,
+ private[this] val len: Int,
+ ord: Ordering[B])
+ extends SeqView[A] {
+ outer: Sorted[A, B]^ =>
+
+ // force evaluation immediately by calling `length` so infinite collections
+ // hang on `sorted`/`sortWith`/`sortBy` rather than on arbitrary method calls
+ def this(underlying: SomeSeqOps[A]^, ord: Ordering[B]) = this(underlying, underlying.length, ord)
+
+ @SerialVersionUID(3L)
+ private[this] class ReverseSorted extends SeqView[A] {
+ private[this] lazy val _reversed = new SeqView.Reverse(_sorted)
+
+ def apply(i: Int): A = _reversed.apply(i)
+ def length: Int = len
+ def iterator: Iterator[A]^{this} = Iterator.empty ++ _reversed.iterator // very lazy
+ override def knownSize: Int = len
+ override def isEmpty: Boolean = len == 0
+ override def to[C1](factory: Factory[A, C1]): C1 = _reversed.to(factory)
+ override def reverse: SeqView[A]^{outer} = outer
+ override protected def reversed: Iterable[A] = outer.unsafeAssumePure
+
+ override def sorted[B1 >: A](implicit ord1: Ordering[B1]): SeqView[A]^{this} =
+ if (ord1 == Sorted.this.ord) outer.unsafeAssumePure
+ else if (ord1.isReverseOf(Sorted.this.ord)) this
+ else new Sorted(elems, len, ord1)
+ }
+
+ @volatile private[this] var evaluated = false
+
+ private[this] lazy val _sorted: Seq[A] = {
+ val res = {
+ val len = this.len
+ if (len == 0) Nil
+ else if (len == 1) List(underlying.head)
+ else {
+ val arr = new Array[Any](len) // Array[Any] =:= Array[AnyRef]
+ underlying.copyToArray(arr)
+ java.util.Arrays.sort(arr.asInstanceOf[Array[AnyRef]], ord.asInstanceOf[Ordering[AnyRef]])
+ // casting the Array[AnyRef] to Array[A] and creating an ArraySeq from it
+ // is safe because:
+ // - the ArraySeq is immutable, and items that are not of type A
+ // cannot be added to it
+ // - we know it only contains items of type A (and if this collection
+ // contains items of another type, we'd get a CCE anyway)
+ // - the cast doesn't actually do anything in the runtime because the
+ // type of A is not known and Array[_] is Array[AnyRef]
+ immutable.ArraySeq.unsafeWrapArray(arr.asInstanceOf[Array[A @uncheckedCaptures]])
+ }
+ }
+ evaluated = true
+ underlying = null
+ res
+ }
+
+ private[this] def elems: SomeSeqOps[A]^{this} = {
+ val orig = underlying
+ if (evaluated) _sorted else orig
+ }
+
+ def apply(i: Int): A = _sorted.apply(i)
+ def length: Int = len
+ def iterator: Iterator[A]^{this} = Iterator.empty ++ _sorted.iterator // very lazy
+ override def knownSize: Int = len
+ override def isEmpty: Boolean = len == 0
+ override def to[C1](factory: Factory[A, C1]): C1 = _sorted.to(factory)
+ override def reverse: SeqView[A] = new ReverseSorted
+ // we know `_sorted` is either tiny or has efficient random access,
+ // so this is acceptable for `reversed`
+ override protected def reversed: Iterable[A] = new ReverseSorted
+
+ override def sorted[B1 >: A](implicit ord1: Ordering[B1]): SeqView[A]^{this} =
+ if (ord1 == this.ord) this
+ else if (ord1.isReverseOf(this.ord)) reverse
+ else new Sorted(elems, len, ord1)
+ }
+}
+
+/** Explicit instantiation of the `SeqView` trait to reduce class file size in subclasses. */
+@SerialVersionUID(3L)
+abstract class AbstractSeqView[+A] extends AbstractView[A] with SeqView[A]
diff --git a/tests/pos-special/stdlib/collection/Set.scala b/tests/pos-special/stdlib/collection/Set.scala
new file mode 100644
index 000000000000..a9c279b82a49
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/Set.scala
@@ -0,0 +1,271 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+
+import scala.util.hashing.MurmurHash3
+import java.lang.String
+
+import scala.annotation.nowarn
+import language.experimental.captureChecking
+
+/** Base trait for set collections.
+ */
+trait Set[A]
+ extends Iterable[A]
+ with SetOps[A, Set, Set[A]]
+ with Equals
+ with IterableFactoryDefaults[A, Set]
+ with Pure {
+ self: Set[A] =>
+
+ def canEqual(that: Any) = true
+
+ /**
+ * Equality of sets is implemented using the lookup method [[contains]]. This method returns `true` if
+ * - the argument `that` is a `Set`,
+ * - the two sets have the same [[size]], and
+ * - for every `element` this set, `other.contains(element) == true`.
+ *
+ * The implementation of `equals` checks the [[canEqual]] method, so subclasses of `Set` can narrow down the equality
+ * to specific set types. The `Set` implementations in the standard library can all be compared, their `canEqual`
+ * methods return `true`.
+ *
+ * Note: The `equals` method only respects the equality laws (symmetry, transitivity) if the two sets use the same
+ * element equivalence function in their lookup operation. For example, the element equivalence operation in a
+ * [[scala.collection.immutable.TreeSet]] is defined by its ordering. Comparing a `TreeSet` with a `HashSet` leads
+ * to unexpected results if `ordering.equiv(e1, e2)` (used for lookup in `TreeSet`) is different from `e1 == e2`
+ * (used for lookup in `HashSet`).
+ *
+ * {{{
+ * scala> import scala.collection.immutable._
+ * scala> val ord: Ordering[String] = _ compareToIgnoreCase _
+ *
+ * scala> TreeSet("A")(ord) == HashSet("a")
+ * val res0: Boolean = false
+ *
+ * scala> HashSet("a") == TreeSet("A")(ord)
+ * val res1: Boolean = true
+ * }}}
+ *
+ *
+ * @param that The set to which this set is compared
+ * @return `true` if the two sets are equal according to the description
+ */
+ override def equals(that: Any): Boolean =
+ (this eq that.asInstanceOf[AnyRef]) || (that match {
+ case set: Set[A @unchecked] if set.canEqual(this) =>
+ (this.size == set.size) && {
+ try this.subsetOf(set)
+ catch { case _: ClassCastException => false } // PR #9565 / scala/bug#12228
+ }
+ case _ =>
+ false
+ })
+
+ override def hashCode(): Int = MurmurHash3.setHash(this)
+
+ override def iterableFactory: IterableFactory[Set] = Set
+
+ @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""")
+ override protected[this] def stringPrefix: String = "Set"
+
+ override def toString(): String = super[Iterable].toString() // Because `Function1` overrides `toString` too
+}
+
+/** Base trait for set operations
+ *
+ * @define coll set
+ * @define Coll `Set`
+ */
+trait SetOps[A, +CC[_], +C <: SetOps[A, CC, C]]
+ extends IterableOps[A, CC, C], (A -> Boolean) { self =>
+
+ def contains(elem: A): Boolean
+
+ /** Tests if some element is contained in this set.
+ *
+ * This method is equivalent to `contains`. It allows sets to be interpreted as predicates.
+ * @param elem the element to test for membership.
+ * @return `true` if `elem` is contained in this set, `false` otherwise.
+ */
+ @`inline` final def apply(elem: A): Boolean = this.contains(elem)
+
+ /** Tests whether this set is a subset of another set.
+ *
+ * @param that the set to test.
+ * @return `true` if this set is a subset of `that`, i.e. if
+ * every element of this set is also an element of `that`.
+ */
+ def subsetOf(that: Set[A]): Boolean = this.forall(that)
+
+ /** An iterator over all subsets of this set of the given size.
+ * If the requested size is impossible, an empty iterator is returned.
+ *
+ * @param len the size of the subsets.
+ * @return the iterator.
+ */
+ def subsets(len: Int): Iterator[C] = {
+ if (len < 0 || len > size) Iterator.empty
+ else new SubsetsItr(this.to(IndexedSeq), len)
+ }
+
+ /** An iterator over all subsets of this set.
+ *
+ * @return the iterator.
+ */
+ def subsets(): Iterator[C] = new AbstractIterator[C] {
+ private[this] val elms = SetOps.this.to(IndexedSeq)
+ private[this] var len = 0
+ private[this] var itr: Iterator[C] = Iterator.empty
+
+ def hasNext = len <= elms.size || itr.hasNext
+ def next() = {
+ if (!itr.hasNext) {
+ if (len > elms.size) Iterator.empty.next()
+ else {
+ itr = new SubsetsItr(elms, len)
+ len += 1
+ }
+ }
+
+ itr.next()
+ }
+ }
+
+ /** An Iterator including all subsets containing exactly len elements.
+ * If the elements in 'This' type is ordered, then the subsets will also be in the same order.
+ * ListSet(1,2,3).subsets => {{1},{2},{3},{1,2},{1,3},{2,3},{1,2,3}}
+ *
+ * $willForceEvaluation
+ *
+ */
+ private class SubsetsItr(elms: IndexedSeq[A], len: Int) extends AbstractIterator[C] {
+ private[this] val idxs = Array.range(0, len+1)
+ private[this] var _hasNext = true
+ idxs(len) = elms.size
+
+ def hasNext = _hasNext
+ @throws[NoSuchElementException]
+ def next(): C = {
+ if (!hasNext) Iterator.empty.next()
+
+ val buf = newSpecificBuilder
+ idxs.slice(0, len) foreach (idx => buf += elms(idx))
+ val result = buf.result()
+
+ var i = len - 1
+ while (i >= 0 && idxs(i) == idxs(i+1)-1) i -= 1
+
+ if (i < 0) _hasNext = false
+ else {
+ idxs(i) += 1
+ for (j <- (i+1) until len)
+ idxs(j) = idxs(j-1) + 1
+ }
+
+ result
+ }
+ }
+
+ /** Computes the intersection between this set and another set.
+ *
+ * @param that the set to intersect with.
+ * @return a new set consisting of all elements that are both in this
+ * set and in the given set `that`.
+ */
+ def intersect(that: Set[A]): C = this.filter(that)
+
+ /** Alias for `intersect` */
+ @`inline` final def & (that: Set[A]): C = intersect(that)
+
+ /** Computes the difference of this set and another set.
+ *
+ * @param that the set of elements to exclude.
+ * @return a set containing those elements of this
+ * set that are not also contained in the given set `that`.
+ */
+ def diff(that: Set[A]): C
+
+ /** Alias for `diff` */
+ @`inline` final def &~ (that: Set[A]): C = this diff that
+
+ @deprecated("Consider requiring an immutable Set", "2.13.0")
+ def -- (that: IterableOnce[A]): C = {
+ val toRemove = that.iterator.to(immutable.Set)
+ fromSpecific(view.filterNot(toRemove))
+ }
+
+ @deprecated("Consider requiring an immutable Set or fall back to Set.diff", "2.13.0")
+ def - (elem: A): C = diff(Set(elem))
+
+ @deprecated("Use &- with an explicit collection argument instead of - with varargs", "2.13.0")
+ def - (elem1: A, elem2: A, elems: A*): C = diff(elems.toSet + elem1 + elem2)
+
+ /** Creates a new $coll by adding all elements contained in another collection to this $coll, omitting duplicates.
+ *
+ * This method takes a collection of elements and adds all elements, omitting duplicates, into $coll.
+ *
+ * Example:
+ * {{{
+ * scala> val a = Set(1, 2) concat Set(2, 3)
+ * a: scala.collection.immutable.Set[Int] = Set(1, 2, 3)
+ * }}}
+ *
+ * @param that the collection containing the elements to add.
+ * @return a new $coll with the given elements added, omitting duplicates.
+ */
+ def concat(that: collection.IterableOnce[A]): C = this match {
+ case optimizedSet @ (_ : scala.collection.immutable.Set.Set1[A] | _: scala.collection.immutable.Set.Set2[A] | _: scala.collection.immutable.Set.Set3[A] | _: scala.collection.immutable.Set.Set4[A]) =>
+ // StrictOptimizedSetOps optimization of concat (these Sets cannot extend StrictOptimizedSetOps because of binary-incompatible return type; cf. PR #10036)
+ var result = optimizedSet.asInstanceOf[scala.collection.immutable.SetOps[A, scala.collection.immutable.Set, scala.collection.immutable.Set[A]]]
+ val it = that.iterator
+ while (it.hasNext) result = result + it.next()
+ result.asInstanceOf[C]
+ case _ => fromSpecific(that match {
+ case that: collection.Iterable[A] => new View.Concat(this, that)
+ case _ => iterator.concat(that.iterator)
+ })
+ }
+
+ @deprecated("Consider requiring an immutable Set or fall back to Set.union", "2.13.0")
+ def + (elem: A): C = fromSpecific(new View.Appended(this, elem))
+
+ @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0")
+ def + (elem1: A, elem2: A, elems: A*): C = fromSpecific(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems))
+
+ /** Alias for `concat` */
+ @`inline` final def ++ (that: collection.IterableOnce[A]): C = concat(that)
+
+ /** Computes the union between of set and another set.
+ *
+ * @param that the set to form the union with.
+ * @return a new set consisting of all elements that are in this
+ * set or in the given set `that`.
+ */
+ @`inline` final def union(that: Set[A]): C = concat(that)
+
+ /** Alias for `union` */
+ @`inline` final def | (that: Set[A]): C = concat(that)
+}
+
+/**
+ * $factoryInfo
+ * @define coll set
+ * @define Coll `Set`
+ */
+@SerialVersionUID(3L)
+object Set extends IterableFactory.Delegate[Set](immutable.Set)
+
+/** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */
+abstract class AbstractSet[A] extends AbstractIterable[A] with Set[A]
diff --git a/tests/pos-special/stdlib/collection/SortedMap.scala b/tests/pos-special/stdlib/collection/SortedMap.scala
new file mode 100644
index 000000000000..7b9381ebb078
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/SortedMap.scala
@@ -0,0 +1,222 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+
+import scala.annotation.{implicitNotFound, nowarn}
+import language.experimental.captureChecking
+
+/** A Map whose keys are sorted according to a [[scala.math.Ordering]]*/
+trait SortedMap[K, +V]
+ extends Map[K, V]
+ with SortedMapOps[K, V, SortedMap, SortedMap[K, V]]
+ with SortedMapFactoryDefaults[K, V, SortedMap, Iterable, Map]{
+
+ def unsorted: Map[K, V] = this
+
+ def sortedMapFactory: SortedMapFactory[SortedMap] = SortedMap
+
+ @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""")
+ override protected[this] def stringPrefix: String = "SortedMap"
+
+ override def equals(that: Any): Boolean = that match {
+ case _ if this eq that.asInstanceOf[AnyRef] => true
+ case sm: SortedMap[K @unchecked, _] if sm.ordering == this.ordering =>
+ (sm canEqual this) &&
+ (this.size == sm.size) && {
+ val i1 = this.iterator
+ val i2 = sm.iterator
+ var allEqual = true
+ while (allEqual && i1.hasNext) {
+ val kv1 = i1.next()
+ val kv2 = i2.next()
+ allEqual = ordering.equiv(kv1._1, kv2._1) && kv1._2 == kv2._2
+ }
+ allEqual
+ }
+ case _ => super.equals(that)
+ }
+}
+
+trait SortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]]
+ extends MapOps[K, V, Map, C]
+ with SortedOps[K, C]
+ with Pure {
+
+ /** The companion object of this sorted map, providing various factory methods.
+ *
+ * @note When implementing a custom collection type and refining `CC` to the new type, this
+ * method needs to be overridden to return a factory for the new type (the compiler will
+ * issue an error otherwise).
+ */
+ def sortedMapFactory: SortedMapFactory[CC]
+
+ /** Similar to `mapFromIterable`, but returns a SortedMap collection type.
+ * Note that the return type is now `CC[K2, V2]`.
+ */
+ @`inline` protected final def sortedMapFromIterable[K2, V2](it: Iterable[(K2, V2)])(implicit ordering: Ordering[K2]): CC[K2, V2] = sortedMapFactory.from(it)
+
+ def unsorted: Map[K, V]
+
+ /**
+ * Creates an iterator over all the key/value pairs
+ * contained in this map having a key greater than or
+ * equal to `start` according to the ordering of
+ * this map. x.iteratorFrom(y) is equivalent
+ * to but often more efficient than x.from(y).iterator.
+ *
+ * @param start The lower bound (inclusive)
+ * on the keys to be returned
+ */
+ def iteratorFrom(start: K): Iterator[(K, V)]
+
+ /**
+ * Creates an iterator over all the keys(or elements) contained in this
+ * collection greater than or equal to `start`
+ * according to the ordering of this collection. x.keysIteratorFrom(y)
+ * is equivalent to but often more efficient than
+ * x.from(y).keysIterator.
+ *
+ * @param start The lower bound (inclusive)
+ * on the keys to be returned
+ */
+ def keysIteratorFrom(start: K): Iterator[K]
+
+ /**
+ * Creates an iterator over all the values contained in this
+ * map that are associated with a key greater than or equal to `start`
+ * according to the ordering of this map. x.valuesIteratorFrom(y) is
+ * equivalent to but often more efficient than
+ * x.from(y).valuesIterator.
+ *
+ * @param start The lower bound (inclusive)
+ * on the keys to be returned
+ */
+ def valuesIteratorFrom(start: K): Iterator[V] = iteratorFrom(start).map(_._2)
+
+ def firstKey: K = head._1
+ def lastKey: K = last._1
+
+ /** Find the element with smallest key larger than or equal to a given key.
+ * @param key The given key.
+ * @return `None` if there is no such node.
+ */
+ def minAfter(key: K): Option[(K, V)] = rangeFrom(key).headOption
+
+ /** Find the element with largest key less than a given key.
+ * @param key The given key.
+ * @return `None` if there is no such node.
+ */
+ def maxBefore(key: K): Option[(K, V)] = rangeUntil(key).lastOption
+
+ def rangeTo(to: K): C = {
+ val i = keySet.rangeFrom(to).iterator
+ if (i.isEmpty) return coll
+ val next = i.next()
+ if (ordering.compare(next, to) == 0)
+ if (i.isEmpty) coll
+ else rangeUntil(i.next())
+ else
+ rangeUntil(next)
+ }
+
+ override def keySet: SortedSet[K] = new KeySortedSet
+
+ /** The implementation class of the set returned by `keySet` */
+ protected class KeySortedSet extends SortedSet[K] with GenKeySet with GenKeySortedSet {
+ def diff(that: Set[K]): SortedSet[K] = fromSpecific(view.filterNot(that))
+ def rangeImpl(from: Option[K], until: Option[K]): SortedSet[K] = {
+ val map = SortedMapOps.this.rangeImpl(from, until)
+ new map.KeySortedSet
+ }
+ }
+
+ /** A generic trait that is reused by sorted keyset implementations */
+ protected trait GenKeySortedSet extends GenKeySet { this: SortedSet[K] =>
+ implicit def ordering: Ordering[K] = SortedMapOps.this.ordering
+ def iteratorFrom(start: K): Iterator[K] = SortedMapOps.this.keysIteratorFrom(start)
+ }
+
+ // And finally, we add new overloads taking an ordering
+ /** Builds a new sorted map by applying a function to all elements of this $coll.
+ *
+ * @param f the function to apply to each element.
+ * @return a new $coll resulting from applying the given function
+ * `f` to each element of this $coll and collecting the results.
+ */
+ def map[K2, V2](f: ((K, V)) => (K2, V2))(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] =
+ sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](this, f))
+
+ /** Builds a new sorted map by applying a function to all elements of this $coll
+ * and using the elements of the resulting collections.
+ *
+ * @param f the function to apply to each element.
+ * @return a new $coll resulting from applying the given collection-valued function
+ * `f` to each element of this $coll and concatenating the results.
+ */
+ def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] =
+ sortedMapFactory.from(new View.FlatMap(this, f))
+
+ /** Builds a new sorted map by applying a partial function to all elements of this $coll
+ * on which the function is defined.
+ *
+ * @param pf the partial function which filters and maps the $coll.
+ * @return a new $coll resulting from applying the given partial function
+ * `pf` to each element on which it is defined and collecting the results.
+ * The order of the elements is preserved.
+ */
+ def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] =
+ sortedMapFactory.from(new View.Collect(this, pf))
+
+ override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): CC[K, V2] = sortedMapFactory.from(suffix match {
+ case it: Iterable[(K, V2)] => new View.Concat(this, it)
+ case _ => iterator.concat(suffix.iterator)
+ })(ordering)
+
+ /** Alias for `concat` */
+ @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]^): CC[K, V2] = concat(xs)
+
+ @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0")
+ override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = sortedMapFactory.from(new View.Appended(this, kv))(ordering)
+
+ @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0")
+ override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems))(ordering)
+}
+
+object SortedMapOps {
+ private[collection] final val ordMsg = "No implicit Ordering[${K2}] found to build a SortedMap[${K2}, ${V2}]. You may want to upcast to a Map[${K}, ${V}] first by calling `unsorted`."
+
+ /** Specializes `MapWithFilter` for sorted Map collections
+ *
+ * @define coll sorted map collection
+ */
+ class WithFilter[K, +V, +IterableCC[_], +MapCC[X, Y] <: Map[X, Y], +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _]](
+ self: SortedMapOps[K, V, CC, _] with MapOps[K, V, MapCC, _] with IterableOps[(K, V), IterableCC, _],
+ p: ((K, V)) => Boolean
+ ) extends MapOps.WithFilter[K, V, IterableCC, MapCC](self, p) {
+
+ def map[K2 : Ordering, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] =
+ self.sortedMapFactory.from(new View.Map(filtered, f))
+
+ def flatMap[K2 : Ordering, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2] =
+ self.sortedMapFactory.from(new View.FlatMap(filtered, f))
+
+ override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, MapCC, CC]^{this, q} =
+ new WithFilter[K, V, IterableCC, MapCC, CC](self, (kv: (K, V)) => p(kv) && q(kv))
+
+ }
+
+}
+
+@SerialVersionUID(3L)
+object SortedMap extends SortedMapFactory.Delegate[SortedMap](immutable.SortedMap)
diff --git a/tests/pos-special/stdlib/collection/SortedOps.scala b/tests/pos-special/stdlib/collection/SortedOps.scala
new file mode 100644
index 000000000000..16751d86d9d5
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/SortedOps.scala
@@ -0,0 +1,91 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+
+import language.experimental.captureChecking
+
+/** Base trait for sorted collections */
+trait SortedOps[A, +C] {
+
+ def ordering: Ordering[A]
+
+ /** Returns the first key of the collection. */
+ def firstKey: A
+
+ /** Returns the last key of the collection. */
+ def lastKey: A
+
+ /** Comparison function that orders keys. */
+ @deprecated("Use ordering.compare instead", "2.13.0")
+ @deprecatedOverriding("Use ordering.compare instead", "2.13.0")
+ @inline def compare(k0: A, k1: A): Int = ordering.compare(k0, k1)
+
+ /** Creates a ranged projection of this collection. Any mutations in the
+ * ranged projection will update this collection and vice versa.
+ *
+ * Note: keys are not guaranteed to be consistent between this collection
+ * and the projection. This is the case for buffers where indexing is
+ * relative to the projection.
+ *
+ * @param from The lower-bound (inclusive) of the ranged projection.
+ * `None` if there is no lower bound.
+ * @param until The upper-bound (exclusive) of the ranged projection.
+ * `None` if there is no upper bound.
+ */
+ def rangeImpl(from: Option[A], until: Option[A]): C
+
+ /** Creates a ranged projection of this collection with both a lower-bound
+ * and an upper-bound.
+ *
+ * @param from The lower-bound (inclusive) of the ranged projection.
+ * @param until The upper-bound (exclusive) of the ranged projection.
+ */
+ def range(from: A, until: A): C = rangeImpl(Some(from), Some(until))
+
+ /** Creates a ranged projection of this collection with no upper-bound.
+ *
+ * @param from The lower-bound (inclusive) of the ranged projection.
+ */
+ @deprecated("Use rangeFrom", "2.13.0")
+ final def from(from: A): C = rangeFrom(from)
+
+ /** Creates a ranged projection of this collection with no upper-bound.
+ *
+ * @param from The lower-bound (inclusive) of the ranged projection.
+ */
+ def rangeFrom(from: A): C = rangeImpl(Some(from), None)
+
+ /** Creates a ranged projection of this collection with no lower-bound.
+ *
+ * @param until The upper-bound (exclusive) of the ranged projection.
+ */
+ @deprecated("Use rangeUntil", "2.13.0")
+ final def until(until: A): C = rangeUntil(until)
+
+ /** Creates a ranged projection of this collection with no lower-bound.
+ *
+ * @param until The upper-bound (exclusive) of the ranged projection.
+ */
+ def rangeUntil(until: A): C = rangeImpl(None, Some(until))
+
+ /** Create a range projection of this collection with no lower-bound.
+ * @param to The upper-bound (inclusive) of the ranged projection.
+ */
+ @deprecated("Use rangeTo", "2.13.0")
+ final def to(to: A): C = rangeTo(to)
+
+ /** Create a range projection of this collection with no lower-bound.
+ * @param to The upper-bound (inclusive) of the ranged projection.
+ */
+ def rangeTo(to: A): C
+}
diff --git a/tests/pos-special/stdlib/collection/SortedSet.scala b/tests/pos-special/stdlib/collection/SortedSet.scala
new file mode 100644
index 000000000000..fb2f879edcd2
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/SortedSet.scala
@@ -0,0 +1,190 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+
+import scala.annotation.{implicitNotFound, nowarn}
+import scala.annotation.unchecked.uncheckedVariance
+import language.experimental.captureChecking
+
+/** Base type of sorted sets */
+trait SortedSet[A] extends Set[A]
+ with SortedSetOps[A, SortedSet, SortedSet[A]]
+ with SortedSetFactoryDefaults[A, SortedSet, Set] {
+
+ def unsorted: Set[A] = this
+
+ def sortedIterableFactory: SortedIterableFactory[SortedSet] = SortedSet
+
+ @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""")
+ override protected[this] def stringPrefix: String = "SortedSet"
+
+ override def equals(that: Any): Boolean = that match {
+ case _ if this eq that.asInstanceOf[AnyRef] => true
+ case ss: SortedSet[A @unchecked] if ss.ordering == this.ordering =>
+ (ss canEqual this) &&
+ (this.size == ss.size) && {
+ val i1 = this.iterator
+ val i2 = ss.iterator
+ var allEqual = true
+ while (allEqual && i1.hasNext)
+ allEqual = ordering.equiv(i1.next(), i2.next())
+ allEqual
+ }
+ case _ =>
+ super.equals(that)
+ }
+
+}
+
+trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]]
+ extends SetOps[A, Set, C]
+ with SortedOps[A, C] {
+
+ /** The companion object of this sorted set, providing various factory methods.
+ *
+ * @note When implementing a custom collection type and refining `CC` to the new type, this
+ * method needs to be overridden to return a factory for the new type (the compiler will
+ * issue an error otherwise).
+ */
+ def sortedIterableFactory: SortedIterableFactory[CC]
+
+ def unsorted: Set[A]
+
+ /**
+ * Creates an iterator that contains all values from this collection
+ * greater than or equal to `start` according to the ordering of
+ * this collection. x.iteratorFrom(y) is equivalent to but will usually
+ * be more efficient than x.from(y).iterator
+ *
+ * @param start The lower-bound (inclusive) of the iterator
+ */
+ def iteratorFrom(start: A): Iterator[A]
+
+ @deprecated("Use `iteratorFrom` instead.", "2.13.0")
+ @`inline` def keysIteratorFrom(start: A): Iterator[A] = iteratorFrom(start)
+
+ def firstKey: A = head
+ def lastKey: A = last
+
+ /** Find the smallest element larger than or equal to a given key.
+ * @param key The given key.
+ * @return `None` if there is no such node.
+ */
+ def minAfter(key: A): Option[A] = rangeFrom(key).headOption
+
+ /** Find the largest element less than a given key.
+ * @param key The given key.
+ * @return `None` if there is no such node.
+ */
+ def maxBefore(key: A): Option[A] = rangeUntil(key).lastOption
+
+ override def min[B >: A](implicit ord: Ordering[B]): A =
+ if (isEmpty) throw new UnsupportedOperationException("empty.min")
+ else if (ord == ordering) head
+ else if (ord isReverseOf ordering) last
+ else super.min[B] // need the type annotation for it to infer the correct implicit
+
+ override def max[B >: A](implicit ord: Ordering[B]): A =
+ if (isEmpty) throw new UnsupportedOperationException("empty.max")
+ else if (ord == ordering) last
+ else if (ord isReverseOf ordering) head
+ else super.max[B] // need the type annotation for it to infer the correct implicit
+
+ def rangeTo(to: A): C = {
+ val i = rangeFrom(to).iterator
+ if (i.isEmpty) return coll
+ val next = i.next()
+ if (ordering.compare(next, to) == 0)
+ if (i.isEmpty) coll
+ else rangeUntil(i.next())
+ else
+ rangeUntil(next)
+ }
+
+ /** Builds a new sorted collection by applying a function to all elements of this $coll.
+ *
+ * @param f the function to apply to each element.
+ * @tparam B the element type of the returned collection.
+ * @return a new $coll resulting from applying the given function
+ * `f` to each element of this $coll and collecting the results.
+ */
+ def map[B](f: A => B)(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] =
+ sortedIterableFactory.from(new View.Map(this, f))
+
+ /** Builds a new sorted collection by applying a function to all elements of this $coll
+ * and using the elements of the resulting collections.
+ *
+ * @param f the function to apply to each element.
+ * @tparam B the element type of the returned collection.
+ * @return a new $coll resulting from applying the given collection-valued function
+ * `f` to each element of this $coll and concatenating the results.
+ */
+ def flatMap[B](f: A => IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] =
+ sortedIterableFactory.from(new View.FlatMap(this, f))
+
+ /** Returns a $coll formed from this $coll and another iterable collection
+ * by combining corresponding elements in pairs.
+ * If one of the two collections is longer than the other, its remaining elements are ignored.
+ *
+ * @param that The iterable providing the second half of each result pair
+ * @tparam B the type of the second half of the returned pairs
+ * @return a new $coll containing pairs consisting of corresponding elements of this $coll and `that`.
+ * The length of the returned collection is the minimum of the lengths of this $coll and `that`.
+ */
+ def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.zipOrdMsg) ev: Ordering[(A @uncheckedVariance, B)]): CC[(A @uncheckedVariance, B)] = // sound bcs of VarianceNote
+ sortedIterableFactory.from(that match {
+ case that: Iterable[B] => new View.Zip(this, that)
+ case _ => iterator.zip(that)
+ })
+
+ /** Builds a new sorted collection by applying a partial function to all elements of this $coll
+ * on which the function is defined.
+ *
+ * @param pf the partial function which filters and maps the $coll.
+ * @tparam B the element type of the returned collection.
+ * @return a new $coll resulting from applying the given partial function
+ * `pf` to each element on which it is defined and collecting the results.
+ * The order of the elements is preserved.
+ */
+ def collect[B](pf: scala.PartialFunction[A, B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] =
+ sortedIterableFactory.from(new View.Collect(this, pf))
+}
+
+object SortedSetOps {
+ private[collection] final val ordMsg = "No implicit Ordering[${B}] found to build a SortedSet[${B}]. You may want to upcast to a Set[${A}] first by calling `unsorted`."
+ private[collection] final val zipOrdMsg = "No implicit Ordering[${B}] found to build a SortedSet[(${A}, ${B})]. You may want to upcast to a Set[${A}] first by calling `unsorted`."
+
+ /** Specialize `WithFilter` for sorted collections
+ *
+ * @define coll sorted collection
+ */
+ class WithFilter[+A, +IterableCC[_], +CC[X] <: SortedSet[X]](
+ self: SortedSetOps[A, CC, _] with IterableOps[A, IterableCC, _],
+ p: A => Boolean
+ ) extends IterableOps.WithFilter[A, IterableCC](self, p) {
+
+ def map[B : Ordering](f: A => B): CC[B] =
+ self.sortedIterableFactory.from(new View.Map(filtered, f))
+
+ def flatMap[B : Ordering](f: A => IterableOnce[B]): CC[B] =
+ self.sortedIterableFactory.from(new View.FlatMap(filtered, f))
+
+ override def withFilter(q: A => Boolean): WithFilter[A, IterableCC, CC]^{this, q} =
+ new WithFilter[A, IterableCC, CC](self, (a: A) => p(a) && q(a))
+ }
+
+}
+
+@SerialVersionUID(3L)
+object SortedSet extends SortedIterableFactory.Delegate[SortedSet](immutable.SortedSet)
+
diff --git a/tests/pos-special/stdlib/collection/Stepper.scala b/tests/pos-special/stdlib/collection/Stepper.scala
new file mode 100644
index 000000000000..0a0ac0075990
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/Stepper.scala
@@ -0,0 +1,378 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+
+import java.util.function.{Consumer, DoubleConsumer, IntConsumer, LongConsumer}
+import java.util.{PrimitiveIterator, Spliterator, Iterator => JIterator}
+import java.{lang => jl}
+import language.experimental.captureChecking
+
+import scala.collection.Stepper.EfficientSplit
+
+/** Steppers exist to enable creating Java streams over Scala collections, see
+ * [[scala.jdk.StreamConverters]]. Besides that use case, they allow iterating over collections
+ * holding unboxed primitives (e.g., `Array[Int]`) without boxing the elements.
+ *
+ * Steppers have an iterator-like interface with methods `hasStep` and `nextStep()`. The difference
+ * to iterators - and the reason `Stepper` is not a subtype of `Iterator` - is that there are
+ * hand-specialized variants of `Stepper` for `Int`, `Long` and `Double` ([[IntStepper]], etc.).
+ * These enable iterating over collections holding unboxed primitives (e.g., Arrays,
+ * [[scala.jdk.Accumulator]]s) without boxing the elements.
+ *
+ * The selection of primitive types (`Int`, `Long` and `Double`) matches the hand-specialized
+ * variants of Java Streams ([[java.util.stream.Stream]], [[java.util.stream.IntStream]], etc.)
+ * and the corresponding Java Spliterators ([[java.util.Spliterator]], [[java.util.Spliterator.OfInt]], etc.).
+ *
+ * Steppers can be converted to Scala Iterators, Java Iterators and Java Spliterators. Primitive
+ * Steppers are converted to the corresponding primitive Java Iterators and Spliterators.
+ *
+ * @tparam A the element type of the Stepper
+ */
+trait Stepper[@specialized(Double, Int, Long) +A] {
+ this: Stepper[A]^ =>
+
+ /** Check if there's an element available. */
+ def hasStep: Boolean
+
+ /** Return the next element and advance the stepper */
+ def nextStep(): A
+
+ /** Split this stepper, if applicable. The elements of the current Stepper are split up between
+ * the resulting Stepper and the current stepper.
+ *
+ * May return `null`, in which case the current Stepper yields the same elements as before.
+ *
+ * See method `trySplit` in [[java.util.Spliterator]].
+ */
+ def trySplit(): Stepper[A]
+
+ /** Returns an estimate of the number of elements of this Stepper, or [[Long.MaxValue]]. See
+ * method `estimateSize` in [[java.util.Spliterator]].
+ */
+ def estimateSize: Long
+
+ /** Returns a set of characteristics of this Stepper and its elements. See method
+ * `characteristics` in [[java.util.Spliterator]].
+ */
+ def characteristics: Int
+
+ /** Returns a [[java.util.Spliterator]] corresponding to this Stepper.
+ *
+ * Note that the return type is `Spliterator[_]` instead of `Spliterator[A]` to allow returning
+ * a [[java.util.Spliterator.OfInt]] (which is a `Spliterator[Integer]`) in the subclass [[IntStepper]]
+ * (which is a `Stepper[Int]`).
+ */
+ def spliterator[B >: A]: Spliterator[_]
+
+ /** Returns a Java [[java.util.Iterator]] corresponding to this Stepper.
+ *
+ * Note that the return type is `Iterator[_]` instead of `Iterator[A]` to allow returning
+ * a [[java.util.PrimitiveIterator.OfInt]] (which is a `Iterator[Integer]`) in the subclass
+ * [[IntStepper]] (which is a `Stepper[Int]`).
+ */
+ def javaIterator[B >: A]: JIterator[_]
+
+ /** Returns an [[Iterator]] corresponding to this Stepper. Note that Iterators corresponding to
+ * primitive Steppers box the elements.
+ */
+ def iterator: Iterator[A] = new AbstractIterator[A] {
+ def hasNext: Boolean = hasStep
+ def next(): A = nextStep()
+ }
+}
+
+object Stepper {
+ /** A marker trait that indicates that a `Stepper` can call `trySplit` with at worst O(log N) time
+ * and space complexity, and that the division is likely to be reasonably even. Steppers marked
+ * with `EfficientSplit` can be converted to parallel streams with the `asJavaParStream` method
+ * defined in [[scala.jdk.StreamConverters]].
+ */
+ trait EfficientSplit
+
+ private[collection] final def throwNSEE(): Nothing = throw new NoSuchElementException("Empty Stepper")
+
+ /* These adapter classes can wrap an AnyStepper of a numeric type into a possibly widened primitive Stepper type.
+ * This provides a basis for more efficient stream processing on unboxed values provided that the original source
+ * of the data is boxed. In other cases native implementations of the primitive stepper types should be provided
+ * (see for example IntArrayStepper and WidenedByteArrayStepper). */
+
+ private[collection] class UnboxingDoubleStepper(st: AnyStepper[Double]) extends DoubleStepper {
+ def hasStep: Boolean = st.hasStep
+ def nextStep(): Double = st.nextStep()
+ def estimateSize: Long = st.estimateSize
+ def characteristics: Int = st.characteristics
+ def trySplit(): DoubleStepper = {
+ val s = st.trySplit()
+ if (s == null) null else new UnboxingDoubleStepper(s)
+ }
+ }
+
+ private[collection] class UnboxingIntStepper(st: AnyStepper[Int]) extends IntStepper {
+ def hasStep: Boolean = st.hasStep
+ def nextStep(): Int = st.nextStep()
+ def estimateSize: Long = st.estimateSize
+ def characteristics: Int = st.characteristics
+ def trySplit(): IntStepper = {
+ val s = st.trySplit()
+ if (s == null) null else new UnboxingIntStepper(s)
+ }
+ }
+
+ private[collection] class UnboxingLongStepper(st: AnyStepper[Long]) extends LongStepper {
+ def hasStep: Boolean = st.hasStep
+ def nextStep(): Long = st.nextStep()
+ def estimateSize: Long = st.estimateSize
+ def characteristics: Int = st.characteristics
+ def trySplit(): LongStepper = {
+ val s = st.trySplit()
+ if (s == null) null else new UnboxingLongStepper(s)
+ }
+ }
+
+ private[collection] class UnboxingByteStepper(st: AnyStepper[Byte]) extends IntStepper {
+ def hasStep: Boolean = st.hasStep
+ def nextStep(): Int = st.nextStep()
+ def estimateSize: Long = st.estimateSize
+ def characteristics: Int = st.characteristics
+ def trySplit(): IntStepper = {
+ val s = st.trySplit()
+ if (s == null) null else new UnboxingByteStepper(s)
+ }
+ }
+
+ private[collection] class UnboxingCharStepper(st: AnyStepper[Char]) extends IntStepper {
+ def hasStep: Boolean = st.hasStep
+ def nextStep(): Int = st.nextStep()
+ def estimateSize: Long = st.estimateSize
+ def characteristics: Int = st.characteristics
+ def trySplit(): IntStepper = {
+ val s = st.trySplit()
+ if (s == null) null else new UnboxingCharStepper(s)
+ }
+ }
+
+ private[collection] class UnboxingShortStepper(st: AnyStepper[Short]) extends IntStepper {
+ def hasStep: Boolean = st.hasStep
+ def nextStep(): Int = st.nextStep()
+ def estimateSize: Long = st.estimateSize
+ def characteristics: Int = st.characteristics
+ def trySplit(): IntStepper = {
+ val s = st.trySplit()
+ if (s == null) null else new UnboxingShortStepper(s)
+ }
+ }
+
+ private[collection] class UnboxingFloatStepper(st: AnyStepper[Float]) extends DoubleStepper {
+ def hasStep: Boolean = st.hasStep
+ def nextStep(): Double = st.nextStep()
+ def estimateSize: Long = st.estimateSize
+ def characteristics: Int = st.characteristics
+ def trySplit(): DoubleStepper = {
+ val s = st.trySplit()
+ if (s == null) null else new UnboxingFloatStepper(s)
+ }
+ }
+}
+
+/** A Stepper for arbitrary element types. See [[Stepper]]. */
+trait AnyStepper[+A] extends Stepper[A] {
+ this: AnyStepper[A]^ =>
+
+ def trySplit(): AnyStepper[A]
+
+ def spliterator[B >: A]: Spliterator[B]^{this} = new AnyStepper.AnyStepperSpliterator(this)
+
+ def javaIterator[B >: A]: JIterator[B] = new JIterator[B] {
+ def hasNext: Boolean = hasStep
+ def next(): B = nextStep()
+ }
+}
+
+object AnyStepper {
+ class AnyStepperSpliterator[A](s: AnyStepper[A]^) extends Spliterator[A] {
+ def tryAdvance(c: Consumer[_ >: A]): Boolean =
+ if (s.hasStep) { c.accept(s.nextStep()); true } else false
+ def trySplit(): Spliterator[A]^{this} = {
+ val sp = s.trySplit()
+ if (sp == null) null else sp.spliterator
+ }
+ def estimateSize(): Long = s.estimateSize
+ def characteristics(): Int = s.characteristics
+ // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance
+ override def forEachRemaining(c: Consumer[_ >: A]): Unit =
+ while (s.hasStep) { c.accept(s.nextStep()) }
+ }
+
+ def ofSeqDoubleStepper(st: DoubleStepper): AnyStepper[Double] = new BoxedDoubleStepper(st)
+ def ofParDoubleStepper(st: DoubleStepper with EfficientSplit): AnyStepper[Double] with EfficientSplit = new BoxedDoubleStepper(st) with EfficientSplit
+
+ def ofSeqIntStepper(st: IntStepper): AnyStepper[Int] = new BoxedIntStepper(st)
+ def ofParIntStepper(st: IntStepper with EfficientSplit): AnyStepper[Int] with EfficientSplit = new BoxedIntStepper(st) with EfficientSplit
+
+ def ofSeqLongStepper(st: LongStepper): AnyStepper[Long] = new BoxedLongStepper(st)
+ def ofParLongStepper(st: LongStepper with EfficientSplit): AnyStepper[Long] with EfficientSplit = new BoxedLongStepper(st) with EfficientSplit
+
+ private[collection] class BoxedDoubleStepper(st: DoubleStepper) extends AnyStepper[Double] {
+ def hasStep: Boolean = st.hasStep
+ def nextStep(): Double = st.nextStep()
+ def estimateSize: Long = st.estimateSize
+ def characteristics: Int = st.characteristics
+ def trySplit(): AnyStepper[Double] = {
+ val s = st.trySplit()
+ if (s == null) null else new BoxedDoubleStepper(s)
+ }
+ }
+
+ private[collection] class BoxedIntStepper(st: IntStepper) extends AnyStepper[Int] {
+ def hasStep: Boolean = st.hasStep
+ def nextStep(): Int = st.nextStep()
+ def estimateSize: Long = st.estimateSize
+ def characteristics: Int = st.characteristics
+ def trySplit(): AnyStepper[Int] = {
+ val s = st.trySplit()
+ if (s == null) null else new BoxedIntStepper(s)
+ }
+ }
+
+ private[collection] class BoxedLongStepper(st: LongStepper) extends AnyStepper[Long] {
+ def hasStep: Boolean = st.hasStep
+ def nextStep(): Long = st.nextStep()
+ def estimateSize: Long = st.estimateSize
+ def characteristics: Int = st.characteristics
+ def trySplit(): AnyStepper[Long] = {
+ val s = st.trySplit()
+ if (s == null) null else new BoxedLongStepper(s)
+ }
+ }
+}
+
+/** A Stepper for Ints. See [[Stepper]]. */
+trait IntStepper extends Stepper[Int] {
+ this: IntStepper^ =>
+
+ def trySplit(): IntStepper
+
+ def spliterator[B >: Int]: Spliterator.OfInt^{this} = new IntStepper.IntStepperSpliterator(this)
+
+ def javaIterator[B >: Int]: PrimitiveIterator.OfInt = new PrimitiveIterator.OfInt {
+ def hasNext: Boolean = hasStep
+ def nextInt(): Int = nextStep()
+ }
+}
+object IntStepper {
+ class IntStepperSpliterator(s: IntStepper^) extends Spliterator.OfInt {
+ def tryAdvance(c: IntConsumer): Boolean =
+ if (s.hasStep) { c.accept(s.nextStep()); true } else false
+ // Override for efficiency: don't wrap the function and call the `tryAdvance` overload
+ override def tryAdvance(c: Consumer[_ >: jl.Integer]): Boolean = (c: AnyRef) match {
+ case ic: IntConsumer => tryAdvance(ic)
+ case _ => if (s.hasStep) { c.accept(jl.Integer.valueOf(s.nextStep())); true } else false
+ }
+ // override required for dotty#6152
+ override def trySplit(): Spliterator.OfInt^{this} = {
+ val sp = s.trySplit()
+ if (sp == null) null else sp.spliterator
+ }
+ def estimateSize(): Long = s.estimateSize
+ def characteristics(): Int = s.characteristics
+ // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance
+ override def forEachRemaining(c: IntConsumer): Unit =
+ while (s.hasStep) { c.accept(s.nextStep()) }
+ // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance
+ override def forEachRemaining(c: Consumer[_ >: jl.Integer]): Unit = (c: AnyRef) match {
+ case ic: IntConsumer => forEachRemaining(ic)
+ case _ => while (s.hasStep) { c.accept(jl.Integer.valueOf(s.nextStep())) }
+ }
+ }
+}
+
+/** A Stepper for Doubles. See [[Stepper]]. */
+trait DoubleStepper extends Stepper[Double] {
+ this: DoubleStepper^ =>
+ def trySplit(): DoubleStepper
+
+ def spliterator[B >: Double]: Spliterator.OfDouble^{this} = new DoubleStepper.DoubleStepperSpliterator(this)
+
+ def javaIterator[B >: Double]: PrimitiveIterator.OfDouble^{this} = new PrimitiveIterator.OfDouble {
+ def hasNext: Boolean = hasStep
+ def nextDouble(): Double = nextStep()
+ }
+}
+
+object DoubleStepper {
+ class DoubleStepperSpliterator(s: DoubleStepper^) extends Spliterator.OfDouble {
+ def tryAdvance(c: DoubleConsumer): Boolean =
+ if (s.hasStep) { c.accept(s.nextStep()); true } else false
+ // Override for efficiency: don't wrap the function and call the `tryAdvance` overload
+ override def tryAdvance(c: Consumer[_ >: jl.Double]): Boolean = (c: AnyRef) match {
+ case ic: DoubleConsumer => tryAdvance(ic)
+ case _ => if (s.hasStep) { c.accept(java.lang.Double.valueOf(s.nextStep())); true } else false
+ }
+ // override required for dotty#6152
+ override def trySplit(): Spliterator.OfDouble^{this} = {
+ val sp = s.trySplit()
+ if (sp == null) null else sp.spliterator
+ }
+ def estimateSize(): Long = s.estimateSize
+ def characteristics(): Int = s.characteristics
+ // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance
+ override def forEachRemaining(c: DoubleConsumer): Unit =
+ while (s.hasStep) { c.accept(s.nextStep()) }
+ // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance
+ override def forEachRemaining(c: Consumer[_ >: jl.Double]): Unit = (c: AnyRef) match {
+ case ic: DoubleConsumer => forEachRemaining(ic)
+ case _ => while (s.hasStep) { c.accept(jl.Double.valueOf(s.nextStep())) }
+ }
+ }
+}
+
+/** A Stepper for Longs. See [[Stepper]]. */
+trait LongStepper extends Stepper[Long] {
+ this: LongStepper^ =>
+
+ def trySplit(): LongStepper^{this}
+
+ def spliterator[B >: Long]: Spliterator.OfLong^{this} = new LongStepper.LongStepperSpliterator(this)
+
+ def javaIterator[B >: Long]: PrimitiveIterator.OfLong^{this} = new PrimitiveIterator.OfLong {
+ def hasNext: Boolean = hasStep
+ def nextLong(): Long = nextStep()
+ }
+}
+
+object LongStepper {
+ class LongStepperSpliterator(s: LongStepper^) extends Spliterator.OfLong {
+ def tryAdvance(c: LongConsumer): Boolean =
+ if (s.hasStep) { c.accept(s.nextStep()); true } else false
+ // Override for efficiency: don't wrap the function and call the `tryAdvance` overload
+ override def tryAdvance(c: Consumer[_ >: jl.Long]): Boolean = (c: AnyRef) match {
+ case ic: LongConsumer => tryAdvance(ic)
+ case _ => if (s.hasStep) { c.accept(java.lang.Long.valueOf(s.nextStep())); true } else false
+ }
+ // override required for dotty#6152
+ override def trySplit(): Spliterator.OfLong^{this} = {
+ val sp = s.trySplit()
+ if (sp == null) null else sp.spliterator
+ }
+ def estimateSize(): Long = s.estimateSize
+ def characteristics(): Int = s.characteristics
+ // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance
+ override def forEachRemaining(c: LongConsumer): Unit =
+ while (s.hasStep) { c.accept(s.nextStep()) }
+ // Override for efficiency: implement with hasStep / nextStep instead of tryAdvance
+ override def forEachRemaining(c: Consumer[_ >: jl.Long]): Unit = (c: AnyRef) match {
+ case ic: LongConsumer => forEachRemaining(ic)
+ case _ => while (s.hasStep) { c.accept(jl.Long.valueOf(s.nextStep())) }
+ }
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/StepperShape.scala b/tests/pos-special/stdlib/collection/StepperShape.scala
new file mode 100644
index 000000000000..c6b520400d89
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/StepperShape.scala
@@ -0,0 +1,115 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+
+import java.{lang => jl}
+
+import language.experimental.captureChecking
+import scala.collection.Stepper.EfficientSplit
+
+/** An implicit StepperShape instance is used in the [[IterableOnce.stepper]] to return a possibly
+ * specialized Stepper `S` according to the element type `T`.
+ */
+sealed trait StepperShape[-T, S <: Stepper[_]^] extends Pure {
+ /** Return the Int constant (as defined in the `StepperShape` companion object) for this `StepperShape`. */
+ def shape: StepperShape.Shape
+
+ /** Create an unboxing primitive sequential Stepper from a boxed `AnyStepper`.
+ * This is an identity operation for reference shapes. */
+ def seqUnbox(st: AnyStepper[T]): S
+
+ /** Create an unboxing primitive parallel (i.e. `with EfficientSplit`) Stepper from a boxed `AnyStepper`.
+ * This is an identity operation for reference shapes. */
+ def parUnbox(st: AnyStepper[T] with EfficientSplit): S with EfficientSplit
+}
+
+object StepperShape extends StepperShapeLowPriority1 {
+ class Shape private[StepperShape] (private val s: Int) extends AnyVal
+
+ // reference
+ val ReferenceShape = new Shape(0)
+
+ // primitive
+ val IntShape = new Shape(1)
+ val LongShape = new Shape(2)
+ val DoubleShape = new Shape(3)
+
+ // widening
+ val ByteShape = new Shape(4)
+ val ShortShape = new Shape(5)
+ val CharShape = new Shape(6)
+ val FloatShape = new Shape(7)
+
+ implicit val intStepperShape: StepperShape[Int, IntStepper] = new StepperShape[Int, IntStepper] {
+ def shape = IntShape
+ def seqUnbox(st: AnyStepper[Int]): IntStepper = new Stepper.UnboxingIntStepper(st)
+ def parUnbox(st: AnyStepper[Int] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingIntStepper(st) with EfficientSplit
+ }
+ implicit val jIntegerStepperShape: StepperShape[jl.Integer, IntStepper] = intStepperShape.asInstanceOf[StepperShape[jl.Integer, IntStepper]]
+
+ implicit val longStepperShape: StepperShape[Long, LongStepper] = new StepperShape[Long, LongStepper] {
+ def shape = LongShape
+ def seqUnbox(st: AnyStepper[Long]): LongStepper = new Stepper.UnboxingLongStepper(st)
+ def parUnbox(st: AnyStepper[Long] with EfficientSplit): LongStepper with EfficientSplit = new Stepper.UnboxingLongStepper(st) with EfficientSplit
+ }
+ implicit val jLongStepperShape: StepperShape[jl.Long, LongStepper] = longStepperShape.asInstanceOf[StepperShape[jl.Long, LongStepper]]
+
+ implicit val doubleStepperShape: StepperShape[Double, DoubleStepper] = new StepperShape[Double, DoubleStepper] {
+ def shape = DoubleShape
+ def seqUnbox(st: AnyStepper[Double]): DoubleStepper = new Stepper.UnboxingDoubleStepper(st)
+ def parUnbox(st: AnyStepper[Double] with EfficientSplit): DoubleStepper with EfficientSplit = new Stepper.UnboxingDoubleStepper(st) with EfficientSplit
+ }
+ implicit val jDoubleStepperShape: StepperShape[jl.Double, DoubleStepper] = doubleStepperShape.asInstanceOf[StepperShape[jl.Double, DoubleStepper]]
+
+ implicit val byteStepperShape: StepperShape[Byte, IntStepper] = new StepperShape[Byte, IntStepper] {
+ def shape = ByteShape
+ def seqUnbox(st: AnyStepper[Byte]): IntStepper = new Stepper.UnboxingByteStepper(st)
+ def parUnbox(st: AnyStepper[Byte] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingByteStepper(st) with EfficientSplit
+ }
+ implicit val jByteStepperShape: StepperShape[jl.Byte, IntStepper] = byteStepperShape.asInstanceOf[StepperShape[jl.Byte, IntStepper]]
+
+ implicit val shortStepperShape: StepperShape[Short, IntStepper] = new StepperShape[Short, IntStepper] {
+ def shape = ShortShape
+ def seqUnbox(st: AnyStepper[Short]): IntStepper = new Stepper.UnboxingShortStepper(st)
+ def parUnbox(st: AnyStepper[Short] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingShortStepper(st) with EfficientSplit
+ }
+ implicit val jShortStepperShape: StepperShape[jl.Short, IntStepper] = shortStepperShape.asInstanceOf[StepperShape[jl.Short, IntStepper]]
+
+ implicit val charStepperShape: StepperShape[Char, IntStepper] = new StepperShape[Char, IntStepper] {
+ def shape = CharShape
+ def seqUnbox(st: AnyStepper[Char]): IntStepper = new Stepper.UnboxingCharStepper(st)
+ def parUnbox(st: AnyStepper[Char] with EfficientSplit): IntStepper with EfficientSplit = new Stepper.UnboxingCharStepper(st) with EfficientSplit
+ }
+ implicit val jCharacterStepperShape: StepperShape[jl.Character, IntStepper] = charStepperShape.asInstanceOf[StepperShape[jl.Character, IntStepper]]
+
+ implicit val floatStepperShape: StepperShape[Float, DoubleStepper] = new StepperShape[Float, DoubleStepper] {
+ def shape = FloatShape
+ def seqUnbox(st: AnyStepper[Float]): DoubleStepper = new Stepper.UnboxingFloatStepper(st)
+ def parUnbox(st: AnyStepper[Float] with EfficientSplit): DoubleStepper with EfficientSplit = new Stepper.UnboxingFloatStepper(st) with EfficientSplit
+ }
+ implicit val jFloatStepperShape: StepperShape[jl.Float, DoubleStepper] = floatStepperShape.asInstanceOf[StepperShape[jl.Float, DoubleStepper]]
+}
+
+trait StepperShapeLowPriority1 extends StepperShapeLowPriority2 {
+ implicit def anyStepperShape[T]: StepperShape[T, AnyStepper[T]] = anyStepperShapePrototype.asInstanceOf[StepperShape[T, AnyStepper[T]]]
+}
+
+trait StepperShapeLowPriority2 {
+ implicit def baseStepperShape[T]: StepperShape[T, Stepper[T]] = anyStepperShapePrototype.asInstanceOf[StepperShape[T, Stepper[T]]]
+
+ protected val anyStepperShapePrototype: StepperShape[AnyRef, Stepper[AnyRef]] = new StepperShape[AnyRef, Stepper[AnyRef]] {
+ def shape = StepperShape.ReferenceShape
+ def seqUnbox(st: AnyStepper[AnyRef]): Stepper[AnyRef] = st
+ def parUnbox(st: AnyStepper[AnyRef] with EfficientSplit): Stepper[AnyRef] with EfficientSplit = st
+ }
+}
\ No newline at end of file
diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala
new file mode 100644
index 000000000000..a9c5e0af43b3
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/StrictOptimizedMapOps.scala
@@ -0,0 +1,50 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+import language.experimental.captureChecking
+
+/**
+ * Trait that overrides map operations to take advantage of strict builders.
+ *
+ * @tparam K Type of keys
+ * @tparam V Type of values
+ * @tparam CC Collection type constructor
+ * @tparam C Collection type
+ */
+trait StrictOptimizedMapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C]
+ extends MapOps[K, V, CC, C]
+ with StrictOptimizedIterableOps[(K, V), Iterable, C]
+ with Pure {
+
+ override def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] =
+ strictOptimizedMap(mapFactory.newBuilder, f)
+
+ override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): CC[K2, V2] =
+ strictOptimizedFlatMap(mapFactory.newBuilder, f)
+
+ override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): CC[K, V2] =
+ strictOptimizedConcat(suffix, mapFactory.newBuilder)
+
+ override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]): CC[K2, V2] =
+ strictOptimizedCollect(mapFactory.newBuilder, pf)
+
+ @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0")
+ override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = {
+ val b = mapFactory.newBuilder[K, V1]
+ b ++= this
+ b += elem1
+ b += elem2
+ if (elems.nonEmpty) b ++= elems
+ b.result()
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala
index 50ddbca30f9e..bfea9eda8bd3 100644
--- a/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala
+++ b/tests/pos-special/stdlib/collection/StrictOptimizedSeqOps.scala
@@ -12,19 +12,20 @@
package scala.collection
import language.experimental.captureChecking
+import scala.annotation.unchecked.uncheckedCaptures
/**
* Trait that overrides operations on sequences in order
* to take advantage of strict builders.
*/
trait StrictOptimizedSeqOps [+A, +CC[_], +C]
- extends AnyRef
+ extends Any
with SeqOps[A, CC, C]
with StrictOptimizedIterableOps[A, CC, C] {
override def distinctBy[B](f: A -> B): C = {
val builder = newSpecificBuilder
- val seen = mutable.HashSet.empty[B]
+ val seen = mutable.HashSet.empty[B @uncheckedCaptures]
val it = this.iterator
while (it.hasNext) {
val next = it.next()
@@ -79,7 +80,7 @@ trait StrictOptimizedSeqOps [+A, +CC[_], +C]
override def diff[B >: A](that: Seq[B]): C =
if (isEmpty || that.isEmpty) coll
else {
- val occ = occCounts(that)
+ val occ = occCounts[B @uncheckedCaptures](that)
val b = newSpecificBuilder
for (x <- this) {
occ.updateWith(x) {
@@ -97,7 +98,7 @@ trait StrictOptimizedSeqOps [+A, +CC[_], +C]
override def intersect[B >: A](that: Seq[B]): C =
if (isEmpty || that.isEmpty) empty
else {
- val occ = occCounts(that)
+ val occ = occCounts[B @uncheckedCaptures](that)
val b = newSpecificBuilder
for (x <- this) {
occ.updateWith(x) {
diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala
new file mode 100644
index 000000000000..8ed337fff998
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/StrictOptimizedSetOps.scala
@@ -0,0 +1,30 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+import language.experimental.captureChecking
+
+/**
+ * Trait that overrides set operations to take advantage of strict builders.
+ *
+ * @tparam A Elements type
+ * @tparam CC Collection type constructor
+ * @tparam C Collection type
+ */
+trait StrictOptimizedSetOps[A, +CC[_], +C <: SetOps[A, CC, C]]
+ extends SetOps[A, CC, C]
+ with StrictOptimizedIterableOps[A, CC, C] {
+
+ override def concat(that: IterableOnce[A]): C =
+ strictOptimizedConcat(that, newSpecificBuilder)
+
+}
diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala
new file mode 100644
index 000000000000..9a9e6e367922
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/StrictOptimizedSortedMapOps.scala
@@ -0,0 +1,47 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+
+import scala.annotation.implicitNotFound
+import language.experimental.captureChecking
+
+/**
+ * Trait that overrides sorted map operations to take advantage of strict builders.
+ *
+ * @tparam K Type of keys
+ * @tparam V Type of values
+ * @tparam CC Collection type constructor
+ * @tparam C Collection type
+ */
+trait StrictOptimizedSortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]]
+ extends SortedMapOps[K, V, CC, C]
+ with StrictOptimizedMapOps[K, V, Map, C] {
+
+ override def map[K2, V2](f: ((K, V)) => (K2, V2))(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] =
+ strictOptimizedMap(sortedMapFactory.newBuilder, f)
+
+ override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] =
+ strictOptimizedFlatMap(sortedMapFactory.newBuilder, f)
+
+ override def concat[V2 >: V](xs: IterableOnce[(K, V2)]^): CC[K, V2] =
+ strictOptimizedConcat(xs, sortedMapFactory.newBuilder(ordering))
+
+ override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] =
+ strictOptimizedCollect(sortedMapFactory.newBuilder, pf)
+
+ @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0")
+ override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = {
+ val m = ((this + elem1).asInstanceOf[Map[K, V]] + elem2).asInstanceOf[CC[K, V1]]
+ if(elems.isEmpty) m else m.concat(elems).asInstanceOf[CC[K, V1]]
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/StrictOptimizedSortedSetOps.scala b/tests/pos-special/stdlib/collection/StrictOptimizedSortedSetOps.scala
new file mode 100644
index 000000000000..ded7deabccca
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/StrictOptimizedSortedSetOps.scala
@@ -0,0 +1,42 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+
+import scala.annotation.implicitNotFound
+import scala.annotation.unchecked.uncheckedVariance
+
+/**
+ * Trait that overrides sorted set operations to take advantage of strict builders.
+ *
+ * @tparam A Elements type
+ * @tparam CC Collection type constructor
+ * @tparam C Collection type
+ */
+trait StrictOptimizedSortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]]
+ extends SortedSetOps[A, CC, C]
+ with StrictOptimizedSetOps[A, Set, C] {
+
+ override def map[B](f: A => B)(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] =
+ strictOptimizedMap(sortedIterableFactory.newBuilder, f)
+
+ override def flatMap[B](f: A => IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] =
+ strictOptimizedFlatMap(sortedIterableFactory.newBuilder, f)
+
+ override def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.zipOrdMsg) ev: Ordering[(A @uncheckedVariance, B)]): CC[(A @uncheckedVariance, B)] =
+ strictOptimizedZip(that, sortedIterableFactory.newBuilder[(A, B)])
+
+ override def collect[B](pf: PartialFunction[A, B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] =
+ strictOptimizedCollect(sortedIterableFactory.newBuilder, pf)
+
+}
diff --git a/tests/pos-special/stdlib/collection/StringOps.scala b/tests/pos-special/stdlib/collection/StringOps.scala
index f570531def98..3e3e2f8d872e 100644
--- a/tests/pos-special/stdlib/collection/StringOps.scala
+++ b/tests/pos-special/stdlib/collection/StringOps.scala
@@ -964,7 +964,7 @@ final class StringOps(private val s: String) extends AnyVal {
else if (s.equalsIgnoreCase("false")) false
else throw new IllegalArgumentException("For input string: \""+s+"\"")
- def toArray[B >: Char](implicit tag: ClassTag[B]): Array[B] =
+ def toArray[sealed B >: Char](implicit tag: ClassTag[B]): Array[B] =
if (tag == ClassTag.Char) s.toCharArray.asInstanceOf[Array[B]]
else new WrappedString(s).toArray[B]
diff --git a/tests/pos-special/stdlib/collection/StringParsers.scala b/tests/pos-special/stdlib/collection/StringParsers.scala
new file mode 100644
index 000000000000..47281815da71
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/StringParsers.scala
@@ -0,0 +1,320 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+
+import scala.annotation.tailrec
+import language.experimental.captureChecking
+
+/** A module containing the implementations of parsers from strings to numeric types, and boolean
+ */
+private[scala] object StringParsers {
+
+ //compile-time constant helpers
+
+ //Int.MinValue == -2147483648
+ private final val intOverflowBoundary = -214748364
+ private final val intOverflowDigit = 9
+ //Long.MinValue == -9223372036854775808L
+ private final val longOverflowBoundary = -922337203685477580L
+ private final val longOverflowDigit = 9
+
+ @inline
+ private[this] final def decValue(ch: Char): Int = java.lang.Character.digit(ch, 10)
+
+ @inline
+ private[this] final def stepToOverflow(from: String, len: Int, agg: Int, isPositive: Boolean, min: Int): Option[Int] = {
+ @tailrec
+ def rec(i: Int, agg: Int): Option[Int] =
+ if (agg < min) None
+ else if (i == len) {
+ if (!isPositive) Some(agg)
+ else if (agg == min) None
+ else Some(-agg)
+ }
+ else {
+ val digit = decValue(from.charAt(i))
+ if (digit == -1) None
+ else rec(i + 1, agg * 10 - digit)
+ }
+ rec(1, agg)
+ }
+
+ @inline
+ private[this] final def isDigit(c: Char): Boolean = c >= '0' && c <= '9'
+
+ //bool
+ @inline
+ final def parseBool(from: String): Option[Boolean] =
+ if (from.equalsIgnoreCase("true")) Some(true)
+ else if (from.equalsIgnoreCase("false")) Some(false)
+ else None
+
+ //integral types
+ final def parseByte(from: String): Option[Byte] = {
+ val len = from.length()
+ //empty strings parse to None
+ if (len == 0) None
+ else {
+ val first = from.charAt(0)
+ val v = decValue(first)
+ if (len == 1) {
+ //"+" and "-" parse to None
+ if (v > -1) Some(v.toByte)
+ else None
+ }
+ else if (v > -1) stepToOverflow(from, len, -v, true, Byte.MinValue).map(_.toByte)
+ else if (first == '+') stepToOverflow(from, len, 0, true, Byte.MinValue).map(_.toByte)
+ else if (first == '-') stepToOverflow(from, len, 0, false, Byte.MinValue).map(_.toByte)
+ else None
+ }
+ }
+
+ final def parseShort(from: String): Option[Short] = {
+ val len = from.length()
+ //empty strings parse to None
+ if (len == 0) None
+ else {
+ val first = from.charAt(0)
+ val v = decValue(first)
+ if (len == 1) {
+ //"+" and "-" parse to None
+ if (v > -1) Some(v.toShort)
+ else None
+ }
+ else if (v > -1) stepToOverflow(from, len, -v, true, Short.MinValue).map(_.toShort)
+ else if (first == '+') stepToOverflow(from, len, 0, true, Short.MinValue).map(_.toShort)
+ else if (first == '-') stepToOverflow(from, len, 0, false, Short.MinValue).map(_.toShort)
+ else None
+ }
+ }
+
+ final def parseInt(from: String): Option[Int] = {
+ val len = from.length()
+
+ @tailrec
+ def step(i: Int, agg: Int, isPositive: Boolean): Option[Int] = {
+ if (i == len) {
+ if (!isPositive) Some(agg)
+ else if (agg == Int.MinValue) None
+ else Some(-agg)
+ }
+ else if (agg < intOverflowBoundary) None
+ else {
+ val digit = decValue(from.charAt(i))
+ if (digit == -1 || (agg == intOverflowBoundary && digit == intOverflowDigit)) None
+ else step(i + 1, (agg * 10) - digit, isPositive)
+ }
+ }
+ //empty strings parse to None
+ if (len == 0) None
+ else {
+ val first = from.charAt(0)
+ val v = decValue(first)
+ if (len == 1) {
+ //"+" and "-" parse to None
+ if (v > -1) Some(v)
+ else None
+ }
+ else if (v > -1) step(1, -v, true)
+ else if (first == '+') step(1, 0, true)
+ else if (first == '-') step(1, 0, false)
+ else None
+ }
+ }
+
+ final def parseLong(from: String): Option[Long] = {
+ //like parseInt, but Longer
+ val len = from.length()
+
+ @tailrec
+ def step(i: Int, agg: Long, isPositive: Boolean): Option[Long] = {
+ if (i == len) {
+ if (isPositive && agg == Long.MinValue) None
+ else if (isPositive) Some(-agg)
+ else Some(agg)
+ }
+ else if (agg < longOverflowBoundary) None
+ else {
+ val digit = decValue(from.charAt(i))
+ if (digit == -1 || (agg == longOverflowBoundary && digit == longOverflowDigit)) None
+ else step(i + 1, agg * 10 - digit, isPositive)
+ }
+ }
+ //empty strings parse to None
+ if (len == 0) None
+ else {
+ val first = from.charAt(0)
+ val v = decValue(first).toLong
+ if (len == 1) {
+ //"+" and "-" parse to None
+ if (v > -1) Some(v)
+ else None
+ }
+ else if (v > -1) step(1, -v, true)
+ else if (first == '+') step(1, 0, true)
+ else if (first == '-') step(1, 0, false)
+ else None
+ }
+ }
+
+ //floating point
+ final def checkFloatFormat(format: String): Boolean = {
+ //indices are tracked with a start index which points *at* the first index
+ //and an end index which points *after* the last index
+ //so that slice length === end - start
+ //thus start == end <=> empty slice
+ //and format.substring(start, end) is equivalent to the slice
+
+ //some utilities for working with index bounds into the original string
+ @inline
+ def forAllBetween(start: Int, end: Int, pred: Char => Boolean): Boolean = {
+ @tailrec
+ def rec(i: Int): Boolean = i >= end || pred(format.charAt(i)) && rec(i + 1)
+ rec(start)
+ }
+
+ //one after last index for the predicate to hold, or `from` if none hold
+ //may point after the end of the string
+ @inline
+ def skipIndexWhile(predicate: Char => Boolean, from: Int, until: Int): Int = {
+ @tailrec @inline
+ def rec(i: Int): Int = if ((i < until) && predicate(format.charAt(i))) rec(i + 1)
+ else i
+ rec(from)
+ }
+
+
+ def isHexFloatLiteral(startIndex: Int, endIndex: Int): Boolean = {
+ def isHexDigit(ch: Char) = ((ch >= '0' && ch <= '9') ||
+ (ch >= 'a' && ch <= 'f') ||
+ (ch >= 'A' && ch <= 'F'))
+
+ def prefixOK(startIndex: Int, endIndex: Int): Boolean = {
+ val len = endIndex - startIndex
+ (len > 0) && {
+ //the prefix part is
+ //hexDigits
+ //hexDigits.
+ //hexDigits.hexDigits
+ //.hexDigits
+ //but not .
+ if (format.charAt(startIndex) == '.') {
+ (len > 1) && forAllBetween(startIndex + 1, endIndex, isHexDigit)
+ } else {
+ val noLeading = skipIndexWhile(isHexDigit, startIndex, endIndex)
+ (noLeading >= endIndex) ||
+ ((format.charAt(noLeading) == '.') && forAllBetween(noLeading + 1, endIndex, isHexDigit))
+ }
+ }
+ }
+
+ def postfixOK(startIndex: Int, endIndex: Int): Boolean =
+ (startIndex < endIndex) && {
+ (forAllBetween(startIndex, endIndex, isDigit)) || {
+ val startchar = format.charAt(startIndex)
+ (startchar == '+' || startchar == '-') &&
+ (endIndex - startIndex > 1) &&
+ forAllBetween(startIndex + 1, endIndex, isDigit)
+ }
+ }
+ // prefix [pP] postfix
+ val pIndex = format.indexWhere(ch => ch == 'p' || ch == 'P', startIndex)
+ (pIndex <= endIndex) && prefixOK(startIndex, pIndex) && postfixOK(pIndex + 1, endIndex)
+ }
+
+ def isDecFloatLiteral(startIndex: Int, endIndex: Int): Boolean = {
+ //invariant: endIndex > startIndex
+
+ def isExp(c: Char): Boolean = c == 'e' || c == 'E'
+
+ def expOK(startIndex: Int, endIndex: Int): Boolean =
+ (startIndex < endIndex) && {
+ val startChar = format.charAt(startIndex)
+ if (startChar == '+' || startChar == '-')
+ (endIndex > (startIndex + 1)) &&
+ skipIndexWhile(isDigit, startIndex + 1, endIndex) == endIndex
+ else skipIndexWhile(isDigit, startIndex, endIndex) == endIndex
+ }
+
+ //significant can be one of
+ //* digits.digits
+ //* .digits
+ //* digits.
+ //but not just .
+ val startChar = format.charAt(startIndex)
+ if (startChar == '.') {
+ val noSignificant = skipIndexWhile(isDigit, startIndex + 1, endIndex)
+ // a digit is required followed by optional exp
+ (noSignificant > startIndex + 1) && (noSignificant >= endIndex ||
+ isExp(format.charAt(noSignificant)) && expOK(noSignificant + 1, endIndex)
+ )
+ }
+ else if (isDigit(startChar)) {
+ // one set of digits, then optionally a period, then optionally another set of digits, then optionally an exponent
+ val noInt = skipIndexWhile(isDigit, startIndex, endIndex)
+ // just the digits
+ (noInt == endIndex) || {
+ if (format.charAt(noInt) == '.') {
+ val noSignificant = skipIndexWhile(isDigit, noInt + 1, endIndex)
+ (noSignificant >= endIndex) || //no exponent
+ isExp(format.charAt(noSignificant)) && expOK(noSignificant + 1, endIndex)
+ } else
+ isExp(format.charAt(noInt)) && expOK(noInt + 1, endIndex)
+ }
+ }
+ else false
+ }
+
+ //count 0x00 to 0x20 as "whitespace", and nothing else
+ val unspacedStart = format.indexWhere(ch => ch.toInt > 0x20)
+ val unspacedEnd = format.lastIndexWhere(ch => ch.toInt > 0x20) + 1
+
+ if (unspacedStart == -1 || unspacedStart >= unspacedEnd || unspacedEnd <= 0) false
+ else {
+ //all formats can have a sign
+ val unsigned = {
+ val startchar = format.charAt(unspacedStart)
+ if (startchar == '-' || startchar == '+') unspacedStart + 1 else unspacedStart
+ }
+ if (unsigned >= unspacedEnd) false
+ //that's it for NaN and Infinity
+ else if (format.charAt(unsigned) == 'N') format.substring(unsigned, unspacedEnd) == "NaN"
+ else if (format.charAt(unsigned) == 'I') format.substring(unsigned, unspacedEnd) == "Infinity"
+ else {
+ //all other formats can have a format suffix
+ val desuffixed = {
+ val endchar = format.charAt(unspacedEnd - 1)
+ if (endchar == 'f' || endchar == 'F' || endchar == 'd' || endchar == 'D') unspacedEnd - 1
+ else unspacedEnd
+ }
+ val len = desuffixed - unsigned
+ if (len <= 0) false
+ else if (len >= 2 && (format.charAt(unsigned + 1) == 'x' || format.charAt(unsigned + 1) == 'X'))
+ format.charAt(unsigned) == '0' && isHexFloatLiteral(unsigned + 2, desuffixed)
+ else isDecFloatLiteral(unsigned, desuffixed)
+ }
+ }
+ }
+
+ @inline
+ def parseFloat(from: String): Option[Float] =
+ if (checkFloatFormat(from)) Some(java.lang.Float.parseFloat(from))
+ else None
+
+ @inline
+ def parseDouble(from: String): Option[Double] =
+ if (checkFloatFormat(from)) Some(java.lang.Double.parseDouble(from))
+ else None
+
+}
diff --git a/tests/pos-special/stdlib/collection/View.scala b/tests/pos-special/stdlib/collection/View.scala
index 85910311a4c3..d91fc0c49939 100644
--- a/tests/pos-special/stdlib/collection/View.scala
+++ b/tests/pos-special/stdlib/collection/View.scala
@@ -78,7 +78,7 @@ object View extends IterableFactory[View] {
def empty[A]: View[A] = Empty
- def newBuilder[A]: Builder[A, View[A]] = ArrayBuffer.newBuilder[A].mapResult(from)
+ def newBuilder[sealed A]: Builder[A, View[A]] = ArrayBuffer.newBuilder[A].mapResult(from)
override def apply[A](xs: A*): View[A] = new Elems(xs: _*)
diff --git a/tests/pos-special/stdlib/collection/WithFilter.scala b/tests/pos-special/stdlib/collection/WithFilter.scala
new file mode 100644
index 000000000000..0f3830e9fe25
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/WithFilter.scala
@@ -0,0 +1,72 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+import language.experimental.captureChecking
+
+/** A template trait that contains just the `map`, `flatMap`, `foreach` and `withFilter` methods
+ * of trait `Iterable`.
+ *
+ * @tparam A Element type (e.g. `Int`)
+ * @tparam CC Collection type constructor (e.g. `List`)
+ *
+ * @define coll collection
+ */
+@SerialVersionUID(3L)
+abstract class WithFilter[+A, +CC[_]] extends Serializable {
+ this: WithFilter[A, CC]^ =>
+
+ /** Builds a new collection by applying a function to all elements of the
+ * `filtered` outer $coll.
+ *
+ * @param f the function to apply to each element.
+ * @tparam B the element type of the returned collection.
+ * @return a new $coll resulting from applying
+ * the given function `f` to each element of the filtered outer $coll
+ * and collecting the results.
+ */
+ def map[B](f: A => B): CC[B]^{this, f}
+
+ /** Builds a new collection by applying a function to all elements of the
+ * `filtered` outer $coll containing this `WithFilter` instance that satisfy
+ *
+ * @param f the function to apply to each element.
+ * @tparam B the element type of the returned collection.
+ * @return a new $coll resulting from applying
+ * the given collection-valued function `f` to each element
+ * of the filtered outer $coll and
+ * concatenating the results.
+ */
+ def flatMap[B](f: A => IterableOnce[B]^): CC[B]^{this, f}
+
+ /** Applies a function `f` to all elements of the `filtered` outer $coll.
+ *
+ * @param f the function that is applied for its side-effect to every element.
+ * The result of function `f` is discarded.
+ *
+ * @tparam U the type parameter describing the result of function `f`.
+ * This result will always be ignored. Typically `U` is `Unit`,
+ * but this is not necessary.
+ */
+ def foreach[U](f: A => U): Unit
+
+ /** Further refines the filter for this `filtered` $coll.
+ *
+ * @param q the predicate used to test elements.
+ * @return an object of class `WithFilter`, which supports
+ * `map`, `flatMap`, `foreach`, and `withFilter` operations.
+ * All these operations apply to those elements of this $coll which
+ * also satisfy both `p` and `q` predicates.
+ */
+ def withFilter(q: A => Boolean): WithFilter[A, CC]^{this, q}
+
+}
diff --git a/tests/pos-special/stdlib/collection/concurrent/Map.scala b/tests/pos-special/stdlib/collection/concurrent/Map.scala
new file mode 100644
index 000000000000..d985dad2edc5
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/concurrent/Map.scala
@@ -0,0 +1,193 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection.concurrent
+
+import language.experimental.captureChecking
+import scala.annotation.tailrec
+
+/** A template trait for mutable maps that allow concurrent access.
+ *
+ * $concurrentmapinfo
+ *
+ * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#concurrent_maps "Scala's Collection Library overview"]]
+ * section on `Concurrent Maps` for more information.
+ *
+ * @tparam K the key type of the map
+ * @tparam V the value type of the map
+ *
+ * @define Coll `concurrent.Map`
+ * @define coll concurrent map
+ * @define concurrentmapinfo
+ * This is a base trait for all Scala concurrent map implementations. It
+ * provides all of the methods a `Map` does, with the difference that all the
+ * changes are atomic. It also describes methods specific to concurrent maps.
+ *
+ * '''Note''': The concurrent maps do not accept `'''null'''` for keys or values.
+ *
+ * @define atomicop
+ * This is an atomic operation.
+ */
+trait Map[K, V] extends scala.collection.mutable.Map[K, V] {
+
+ /**
+ * Associates the given key with a given value, unless the key was already
+ * associated with some other value.
+ *
+ * $atomicop
+ *
+ * @param k key with which the specified value is to be associated with
+ * @param v value to be associated with the specified key
+ * @return `Some(oldvalue)` if there was a value `oldvalue` previously
+ * associated with the specified key, or `None` if there was no
+ * mapping for the specified key
+ */
+ def putIfAbsent(k: K, v: V): Option[V]
+
+ /**
+ * Removes the entry for the specified key if it's currently mapped to the
+ * specified value.
+ *
+ * $atomicop
+ *
+ * @param k key for which the entry should be removed
+ * @param v value expected to be associated with the specified key if
+ * the removal is to take place
+ * @return `true` if the removal took place, `false` otherwise
+ */
+ def remove(k: K, v: V): Boolean
+
+ /**
+ * Replaces the entry for the given key only if it was previously mapped to
+ * a given value.
+ *
+ * $atomicop
+ *
+ * @param k key for which the entry should be replaced
+ * @param oldvalue value expected to be associated with the specified key
+ * if replacing is to happen
+ * @param newvalue value to be associated with the specified key
+ * @return `true` if the entry was replaced, `false` otherwise
+ */
+ def replace(k: K, oldvalue: V, newvalue: V): Boolean
+
+ /**
+ * Replaces the entry for the given key only if it was previously mapped
+ * to some value.
+ *
+ * $atomicop
+ *
+ * @param k key for which the entry should be replaced
+ * @param v value to be associated with the specified key
+ * @return `Some(v)` if the given key was previously mapped to some value `v`, or `None` otherwise
+ */
+ def replace(k: K, v: V): Option[V]
+
+ override def getOrElseUpdate(key: K, op: => V): V = get(key) match {
+ case Some(v) => v
+ case None =>
+ val v = op
+ putIfAbsent(key, v) match {
+ case Some(ov) => ov
+ case None => v
+ }
+ }
+
+ /**
+ * Removes the entry for the specified key if it's currently mapped to the
+ * specified value. Comparison to the specified value is done using reference
+ * equality.
+ *
+ * Not all map implementations can support removal based on reference
+ * equality, and for those implementations, object equality is used instead.
+ *
+ * $atomicop
+ *
+ * @param k key for which the entry should be removed
+ * @param v value expected to be associated with the specified key if
+ * the removal is to take place
+ * @return `true` if the removal took place, `false` otherwise
+ */
+ // TODO: make part of the API in a future version
+ private[collection] def removeRefEq(k: K, v: V): Boolean = remove(k, v)
+
+ /**
+ * Replaces the entry for the given key only if it was previously mapped to
+ * a given value. Comparison to the specified value is done using reference
+ * equality.
+ *
+ * Not all map implementations can support replacement based on reference
+ * equality, and for those implementations, object equality is used instead.
+ *
+ * $atomicop
+ *
+ * @param k key for which the entry should be replaced
+ * @param oldValue value expected to be associated with the specified key
+ * if replacing is to happen
+ * @param newValue value to be associated with the specified key
+ * @return `true` if the entry was replaced, `false` otherwise
+ */
+ // TODO: make part of the API in a future version
+ private[collection] def replaceRefEq(k: K, oldValue: V, newValue: V): Boolean = replace(k, oldValue, newValue)
+
+ /**
+ * Update a mapping for the specified key and its current optionally-mapped value
+ * (`Some` if there is current mapping, `None` if not).
+ *
+ * If the remapping function returns `Some(v)`, the mapping is updated with the new value `v`.
+ * If the remapping function returns `None`, the mapping is removed (or remains absent if initially absent).
+ * If the function itself throws an exception, the exception is rethrown, and the current mapping is left unchanged.
+ *
+ * If the map is updated by another concurrent access, the remapping function will be retried until successfully updated.
+ *
+ * @param key the key value
+ * @param remappingFunction a partial function that receives current optionally-mapped value and return a new mapping
+ * @return the new value associated with the specified key
+ */
+ override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = updateWithAux(key)(remappingFunction)
+
+ @tailrec
+ private def updateWithAux(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = {
+ val previousValue = get(key)
+ val nextValue = remappingFunction(previousValue)
+ previousValue match {
+ case Some(prev) => nextValue match {
+ case Some(next) => if (replaceRefEq(key, prev, next)) return nextValue
+ case _ => if (removeRefEq(key, prev)) return None
+ }
+ case _ => nextValue match {
+ case Some(next) => if (putIfAbsent(key, next).isEmpty) return nextValue
+ case _ => return None
+ }
+ }
+ updateWithAux(key)(remappingFunction)
+ }
+
+ private[collection] def filterInPlaceImpl(p: (K, V) => Boolean): this.type = {
+ val it = iterator
+ while (it.hasNext) {
+ val (k, v) = it.next()
+ if (!p(k, v)) removeRefEq(k, v)
+ }
+ this
+ }
+
+ private[collection] def mapValuesInPlaceImpl(f: (K, V) => V): this.type = {
+ val it = iterator
+ while (it.hasNext) {
+ val (k, v) = it.next()
+ replaceRefEq(k, v, f(k, v))
+ }
+ this
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/generic/BitOperations.scala b/tests/pos-special/stdlib/collection/generic/BitOperations.scala
new file mode 100644
index 000000000000..f76619a004fa
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/generic/BitOperations.scala
@@ -0,0 +1,51 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+package generic
+import language.experimental.captureChecking
+
+
+/** Some bit operations.
+ *
+ * See [[https://www.drmaciver.com/2008/08/unsigned-comparison-in-javascala/]] for
+ * an explanation of unsignedCompare.
+ */
+private[collection] object BitOperations {
+ trait Int {
+ type Int = scala.Int
+ def zero(i: Int, mask: Int) = (i & mask) == 0
+ def mask(i: Int, mask: Int) = i & (complement(mask - 1) ^ mask)
+ def hasMatch(key: Int, prefix: Int, m: Int) = mask(key, m) == prefix
+ def unsignedCompare(i: Int, j: Int) = (i < j) ^ (i < 0) ^ (j < 0)
+ def shorter(m1: Int, m2: Int) = unsignedCompare(m2, m1)
+ def complement(i: Int) = (-1) ^ i
+ def bits(num: Int) = 31 to 0 by -1 map (i => (num >>> i & 1) != 0)
+ def bitString(num: Int, sep: String = "") = bits(num) map (b => if (b) "1" else "0") mkString sep
+ def highestOneBit(j: Int) = java.lang.Integer.highestOneBit(j)
+ }
+ object Int extends Int
+
+ trait Long {
+ type Long = scala.Long
+ def zero(i: Long, mask: Long) = (i & mask) == 0L
+ def mask(i: Long, mask: Long) = i & (complement(mask - 1) ^ mask)
+ def hasMatch(key: Long, prefix: Long, m: Long) = mask(key, m) == prefix
+ def unsignedCompare(i: Long, j: Long) = (i < j) ^ (i < 0L) ^ (j < 0L)
+ def shorter(m1: Long, m2: Long) = unsignedCompare(m2, m1)
+ def complement(i: Long) = (-1L) ^ i
+ def bits(num: Long) = 63L to 0L by -1L map (i => (num >>> i & 1L) != 0L)
+ def bitString(num: Long, sep: String = "") = bits(num) map (b => if (b) "1" else "0") mkString sep
+ def highestOneBit(j: Long) = java.lang.Long.highestOneBit(j)
+ }
+ object Long extends Long
+}
diff --git a/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala b/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala
new file mode 100644
index 000000000000..7eba9433b8d5
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/generic/DefaultSerializationProxy.scala
@@ -0,0 +1,90 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection.generic
+
+import java.io.{ObjectInputStream, ObjectOutputStream}
+
+import scala.collection.{Factory, Iterable}
+import scala.collection.mutable.Builder
+import language.experimental.captureChecking
+import scala.annotation.unchecked.uncheckedCaptures
+
+/** The default serialization proxy for collection implementations.
+ *
+ * This class is `final` and requires an extra `Factory` object rather than leaving the details of creating a `Builder`
+ * to an abstract method that could be implemented by a subclass. This is necessary because the factory is needed
+ * for deserializing this class's private state, which happens before any subclass fields would be deserialized. Any
+ * additional state required to create the proper `Builder` needs to be captured by the `factory`.
+ */
+@SerialVersionUID(3L)
+final class DefaultSerializationProxy[A](factory: Factory[A, Any], @transient private[this] val coll: Iterable[A]) extends Serializable {
+
+ @transient protected var builder: Builder[A @uncheckedCaptures, Any] = _
+ // @uncheckedCaptures OK since builder is used only locally when reading objects
+
+ private[this] def writeObject(out: ObjectOutputStream): Unit = {
+ out.defaultWriteObject()
+ val k = coll.knownSize
+ out.writeInt(k)
+ var count = 0
+ coll.foreach { x =>
+ out.writeObject(x)
+ count += 1
+ }
+ if(k >= 0) {
+ if(count != k) throw new IllegalStateException(s"Illegal size $count of collection, expected $k")
+ } else out.writeObject(SerializeEnd)
+ }
+
+ private[this] def readObject(in: ObjectInputStream): Unit = {
+ in.defaultReadObject()
+ builder = factory.newBuilder
+ val k = in.readInt()
+ if(k >= 0) {
+ builder.sizeHint(k)
+ var count = 0
+ while(count < k) {
+ builder += in.readObject().asInstanceOf[A]
+ count += 1
+ }
+ } else {
+ while (true) in.readObject match {
+ case SerializeEnd => return
+ case a => builder += a.asInstanceOf[A]
+ }
+ }
+ }
+
+ protected[this] def readResolve(): Any = builder.result()
+}
+
+@SerialVersionUID(3L)
+private[collection] case object SerializeEnd
+
+/** Mix-in trait to enable DefaultSerializationProxy for the standard collection types. Depending on the type
+ * it is mixed into, it will dynamically choose `iterableFactory`, `mapFactory`, `sortedIterableFactory` or
+ * `sortedMapFactory` for deserialization into the respective `CC` type. Override `writeReplace` or implement
+ * it directly without using this trait if you need a non-standard factory or if you want to use a different
+ * serialization scheme.
+ */
+trait DefaultSerializable extends Serializable { this: scala.collection.Iterable[_] =>
+ protected[this] def writeReplace(): AnyRef = {
+ val f: Factory[Any, Any] = this match {
+ case it: scala.collection.SortedMap[_, _] => it.sortedMapFactory.sortedMapFactory[Any, Any](it.ordering.asInstanceOf[Ordering[Any]]).asInstanceOf[Factory[Any, Any]]
+ case it: scala.collection.Map[_, _] => it.mapFactory.mapFactory[Any, Any].asInstanceOf[Factory[Any, Any]]
+ case it: scala.collection.SortedSet[_] => it.sortedIterableFactory.evidenceIterableFactory[Any](it.ordering.asInstanceOf[Ordering[Any]])
+ case it => it.iterableFactory.iterableFactory
+ }
+ new DefaultSerializationProxy(f, this)
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/generic/IsIterable.scala b/tests/pos-special/stdlib/collection/generic/IsIterable.scala
new file mode 100644
index 000000000000..c309299b615b
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/generic/IsIterable.scala
@@ -0,0 +1,165 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+package generic
+import language.experimental.captureChecking
+
+/** A trait which can be used to avoid code duplication when defining extension
+ * methods that should be applicable both to existing Scala collections (i.e.,
+ * types extending `Iterable`) as well as other (potentially user-defined)
+ * types that could be converted to a Scala collection type. This trait
+ * makes it possible to treat Scala collections and types that can be implicitly
+ * converted to a collection type uniformly. For example, one can provide
+ * extension methods that work both on collection types and on `String`s (`String`s
+ * do not extend `Iterable`, but can be converted to `Iterable`)
+ *
+ * `IsIterable` provides three members:
+ *
+ * 1. type member `A`, which represents the element type of the target `Iterable[A]`
+ * 1. type member `C`, which represents the type returned by transformation operations that preserve the collection’s elements type
+ * 1. method `apply`, which provides a way to convert between the type we wish to add extension methods to, `Repr`, and `IterableOps[A, Iterable, C]`.
+ *
+ * ===Usage===
+ *
+ * One must provide `IsIterable` as an implicit parameter type of an implicit
+ * conversion. Its usage is shown below. Our objective in the following example
+ * is to provide a generic extension method `mapReduce` to any type that extends
+ * or can be converted to `Iterable`. In our example, this includes
+ * `String`.
+ *
+ * {{{
+ * import scala.collection.{Iterable, IterableOps}
+ * import scala.collection.generic.IsIterable
+ *
+ * class ExtensionMethods[Repr, I <: IsIterable[Repr]](coll: Repr, it: I) {
+ * def mapReduce[B](mapper: it.A => B)(reducer: (B, B) => B): B = {
+ * val iter = it(coll).iterator
+ * var res = mapper(iter.next())
+ * while (iter.hasNext)
+ * res = reducer(res, mapper(iter.next()))
+ * res
+ * }
+ * }
+ *
+ * implicit def withExtensions[Repr](coll: Repr)(implicit it: IsIterable[Repr]): ExtensionMethods[Repr, it.type] =
+ * new ExtensionMethods(coll, it)
+ *
+ * // See it in action!
+ * List(1, 2, 3).mapReduce(_ * 2)(_ + _) // res0: Int = 12
+ * "Yeah, well, you know, that's just, like, your opinion, man.".mapReduce(x => 1)(_ + _) // res1: Int = 59
+ *}}}
+ *
+ * Here, we begin by creating a class `ExtensionMethods` which contains our
+ * `mapReduce` extension method.
+ *
+ * Note that `ExtensionMethods` takes a constructor argument `coll` of type `Repr`, where
+ * `Repr` represents (typically) the collection type, and an argument `it` of a subtype of `IsIterable[Repr]`.
+ * The body of the method starts by converting the `coll` argument to an `IterableOps` in order to
+ * call the `iterator` method on it.
+ * The remaining of the implementation is straightforward.
+ *
+ * The `withExtensions` implicit conversion makes the `mapReduce` operation available
+ * on any type `Repr` for which it exists an implicit `IsIterable[Repr]` instance.
+ * Note how we keep track of the precise type of the implicit `it` argument by using the
+ * `it.type` singleton type, rather than the wider `IsIterable[Repr]` type. We do that
+ * so that the information carried by the type members `A` and `C` of the `it` argument
+ * is not lost.
+ *
+ * When the `mapReduce` method is called on some type of which it is not
+ * a member, implicit search is triggered. Because implicit conversion
+ * `withExtensions` is generic, it will be applied as long as an implicit
+ * value of type `IsIterable[Repr]` can be found. Given that the
+ * `IsIterable` companion object contains implicit members that return values of type
+ * `IsIterable`, this requirement is typically satisfied, and the chain
+ * of interactions described in the previous paragraph is set into action.
+ * (See the `IsIterable` companion object, which contains a precise
+ * specification of the available implicits.)
+ *
+ * ''Note'': Currently, it's not possible to combine the implicit conversion and
+ * the class with the extension methods into an implicit class due to
+ * limitations of type inference.
+ *
+ * ===Implementing `IsIterable` for New Types===
+ *
+ * One must simply provide an implicit value of type `IsIterable`
+ * specific to the new type, or an implicit conversion which returns an
+ * instance of `IsIterable` specific to the new type.
+ *
+ * Below is an example of an implementation of the `IsIterable` trait
+ * where the `Repr` type is `Range`.
+ *
+ *{{{
+ * implicit val rangeRepr: IsIterable[Range] { type A = Int; type C = IndexedSeq[Int] } =
+ * new IsIterable[Range] {
+ * type A = Int
+ * type C = IndexedSeq[Int]
+ * def apply(coll: Range): IterableOps[Int, IndexedSeq, IndexedSeq[Int]] = coll
+ * }
+ *}}}
+ *
+ * (Note that in practice the `IsIterable[Range]` instance is already provided by
+ * the standard library, and it is defined as an `IsSeq[Range]` instance)
+ */
+trait IsIterable[Repr] extends IsIterableOnce[Repr] {
+
+ /** The type returned by transformation operations that preserve the same elements
+ * type (e.g. `filter`, `take`).
+ *
+ * In practice, this type is often `Repr` itself, excepted in the case
+ * of `SeqView[A]` (and other `View[A]` subclasses), where it is “only” `View[A]`.
+ */
+ type C
+
+ @deprecated("'conversion' is now a method named 'apply'", "2.13.0")
+ override val conversion: Repr => IterableOps[A, Iterable, C] = apply(_)
+
+ /** A conversion from the type `Repr` to `IterableOps[A, Iterable, C]` */
+ def apply(coll: Repr): IterableOps[A, Iterable, C]
+
+}
+
+object IsIterable extends IsIterableLowPriority {
+
+ // Straightforward case: IterableOps subclasses
+ implicit def iterableOpsIsIterable[A0, CC0[X] <: IterableOps[X, Iterable, CC0[X]]]: IsIterable[CC0[A0]] { type A = A0; type C = CC0[A0] } =
+ new IsIterable[CC0[A0]] {
+ type A = A0
+ type C = CC0[A0]
+ def apply(coll: CC0[A]): IterableOps[A, Iterable, C] = coll
+ }
+
+ // The `BitSet` type can not be unified with the `CC0` parameter of
+ // the above definition because it does not take a type parameter.
+ // Hence the need for a separate case:
+ implicit def bitSetOpsIsIterable[C0 <: BitSet with BitSetOps[C0]]: IsIterable[C0] { type A = Int; type C = C0 } =
+ new IsIterable[C0] {
+ type A = Int
+ type C = C0
+ def apply(coll: C0): IterableOps[Int, Iterable, C0] = coll
+ }
+
+}
+
+trait IsIterableLowPriority {
+
+ // Makes `IsSeq` instances visible in `IsIterable` companion
+ implicit def isSeqLikeIsIterable[Repr](implicit
+ isSeqLike: IsSeq[Repr]
+ ): IsIterable[Repr] { type A = isSeqLike.A; type C = isSeqLike.C } = isSeqLike
+
+ // Makes `IsMap` instances visible in `IsIterable` companion
+ implicit def isMapLikeIsIterable[Repr](implicit
+ isMapLike: IsMap[Repr]
+ ): IsIterable[Repr] { type A = isMapLike.A; type C = isMapLike.C } = isMapLike
+
+}
diff --git a/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala b/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala
new file mode 100644
index 000000000000..2836ca2bb520
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/generic/IsIterableOnce.scala
@@ -0,0 +1,72 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package generic
+import language.experimental.captureChecking
+
+/** Type class witnessing that a collection representation type `Repr` has
+ * elements of type `A` and has a conversion to `IterableOnce[A]`.
+ *
+ * This type enables simple enrichment of `IterableOnce`s with extension
+ * methods which can make full use of the mechanics of the Scala collections
+ * framework in their implementation.
+ *
+ * Example usage,
+ * {{{
+ * class FilterMapImpl[Repr, I <: IsIterableOnce[Repr]](coll: Repr, it: I) {
+ * final def filterMap[B, That](f: it.A => Option[B])(implicit bf: BuildFrom[Repr, B, That]): That = {
+ * val b = bf.newBuilder(coll)
+ * for(e <- it(coll).iterator) f(e) foreach (b +=)
+ * b.result()
+ * }
+ * }
+ * implicit def filterMap[Repr](coll: Repr)(implicit it: IsIterableOnce[Repr]): FilterMapImpl[Repr, it.type] =
+ * new FilterMapImpl(coll, it)
+ *
+ * List(1, 2, 3, 4, 5) filterMap (i => if(i % 2 == 0) Some(i) else None)
+ * // == List(2, 4)
+ * }}}
+ */
+trait IsIterableOnce[Repr] {
+
+ /** The type of elements we can traverse over (e.g. `Int`). */
+ type A
+
+ @deprecated("'conversion' is now a method named 'apply'", "2.13.0")
+ val conversion: Repr => IterableOnce[A] = apply(_)
+
+ /** A conversion from the representation type `Repr` to a `IterableOnce[A]`. */
+ def apply(coll: Repr): IterableOnce[A]
+
+}
+
+object IsIterableOnce extends IsIterableOnceLowPriority {
+
+ // Straightforward case: IterableOnce subclasses
+ implicit def iterableOnceIsIterableOnce[CC0[A] <: IterableOnce[A], A0]: IsIterableOnce[CC0[A0]] { type A = A0 } =
+ new IsIterableOnce[CC0[A0]] {
+ type A = A0
+ def apply(coll: CC0[A0]): IterableOnce[A0] = coll
+ }
+
+}
+
+trait IsIterableOnceLowPriority {
+
+ // Makes `IsIterable` instance visible in `IsIterableOnce` companion
+ implicit def isIterableLikeIsIterableOnce[Repr](implicit
+ isIterableLike: IsIterable[Repr]
+ ): IsIterableOnce[Repr] { type A = isIterableLike.A } = isIterableLike
+
+}
diff --git a/tests/pos-special/stdlib/collection/generic/IsMap.scala b/tests/pos-special/stdlib/collection/generic/IsMap.scala
new file mode 100644
index 000000000000..ad7254d2dd61
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/generic/IsMap.scala
@@ -0,0 +1,115 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+package generic
+
+import IsMap.Tupled
+import scala.collection.immutable.{IntMap, LongMap}
+import language.experimental.captureChecking
+
+/**
+ * Type class witnessing that a collection type `Repr`
+ * has keys of type `K`, values of type `V` and has a conversion to
+ * `MapOps[K, V, Iterable, C]`, for some types `K`, `V` and `C`.
+ *
+ * This type enables simple enrichment of `Map`s with extension methods.
+ *
+ * @see [[scala.collection.generic.IsIterable]]
+ * @tparam Repr Collection type (e.g. `Map[Int, String]`)
+ */
+trait IsMap[Repr] extends IsIterable[Repr] {
+
+ /** The type of keys */
+ type K
+
+ /** The type of values */
+ type V
+
+ type A = (K, V)
+
+ /** A conversion from the type `Repr` to `MapOps[K, V, Iterable, C]`
+ *
+ * @note The third type parameter of the returned `MapOps` value is
+ * still `Iterable` (and not `Map`) because `MapView[K, V]` only
+ * extends `MapOps[K, V, View, View[A]]`.
+ */
+ override def apply(c: Repr): MapOps[K, V, Tupled[Iterable]#Ap, C]
+
+}
+
+object IsMap {
+
+ /** Convenient type level function that takes a unary type constructor `F[_]`
+ * and returns a binary type constructor that tuples its parameters and passes
+ * them to `F`.
+ *
+ * `Tupled[F]#Ap` is equivalent to `({ type Ap[X, +Y] = F[(X, Y)] })#Ap`.
+ */
+ type Tupled[F[+_]] = { type Ap[X, Y] = F[(X, Y)] }
+
+ // Map collections
+ implicit def mapOpsIsMap[CC0[X, Y] <: MapOps[X, Y, Tupled[Iterable]#Ap, CC0[X, Y]], K0, V0]: IsMap[CC0[K0, V0]] { type K = K0; type V = V0; type C = CC0[K, V] } =
+ new IsMap[CC0[K0, V0]] {
+ type K = K0
+ type V = V0
+ type C = CC0[K0, V0]
+ def apply(c: CC0[K0, V0]): MapOps[K0, V0, Tupled[Iterable]#Ap, C] = c
+ }
+
+ // MapView
+ implicit def mapViewIsMap[CC0[X, Y] <: MapView[X, Y], K0, V0]: IsMap[CC0[K0, V0]] { type K = K0; type V = V0; type C = View[(K0, V0)] } =
+ new IsMap[CC0[K0, V0]] {
+ type K = K0
+ type V = V0
+ type C = View[(K, V)]
+ def apply(c: CC0[K0, V0]): MapOps[K0, V0, Tupled[Iterable]#Ap, View[(K0, V0)]] = c
+ }
+
+ // AnyRefMap has stricter bounds than the ones used by the mapOpsIsMap definition
+ implicit def anyRefMapIsMap[K0 <: AnyRef, V0]: IsMap[mutable.AnyRefMap[K0, V0]] { type K = K0; type V = V0; type C = mutable.AnyRefMap[K0, V0] } =
+ new IsMap[mutable.AnyRefMap[K0, V0]] {
+ type K = K0
+ type V = V0
+ type C = mutable.AnyRefMap[K0, V0]
+ def apply(c: mutable.AnyRefMap[K0, V0]): MapOps[K0, V0, Tupled[Iterable]#Ap, mutable.AnyRefMap[K0, V0]] = c
+ }
+
+ // IntMap takes one type parameter only whereas mapOpsIsMap uses a parameter CC0 with two type parameters
+ implicit def intMapIsMap[V0]: IsMap[IntMap[V0]] { type K = Int; type V = V0; type C = IntMap[V0] } =
+ new IsMap[IntMap[V0]] {
+ type K = Int
+ type V = V0
+ type C = IntMap[V0]
+ def apply(c: IntMap[V0]): MapOps[Int, V0, Tupled[Iterable]#Ap, IntMap[V0]] = c
+ }
+
+ // LongMap is in a similar situation as IntMap
+ implicit def longMapIsMap[V0]: IsMap[LongMap[V0]] { type K = Long; type V = V0; type C = LongMap[V0] } =
+ new IsMap[LongMap[V0]] {
+ type K = Long
+ type V = V0
+ type C = LongMap[V0]
+ def apply(c: LongMap[V0]): MapOps[Long, V0, Tupled[Iterable]#Ap, LongMap[V0]] = c
+ }
+
+ // mutable.LongMap is in a similar situation as LongMap and IntMap
+ implicit def mutableLongMapIsMap[V0]: IsMap[mutable.LongMap[V0]] { type K = Long; type V = V0; type C = mutable.LongMap[V0] } =
+ new IsMap[mutable.LongMap[V0]] {
+ type K = Long
+ type V = V0
+ type C = mutable.LongMap[V0]
+ def apply(c: mutable.LongMap[V0]): MapOps[Long, V0, Tupled[Iterable]#Ap, mutable.LongMap[V0]] = c
+ }
+
+
+}
diff --git a/tests/pos-special/stdlib/collection/generic/IsSeq.scala b/tests/pos-special/stdlib/collection/generic/IsSeq.scala
new file mode 100644
index 000000000000..8ad344c4d4fc
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/generic/IsSeq.scala
@@ -0,0 +1,123 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+package generic
+
+import scala.reflect.ClassTag
+import language.experimental.captureChecking
+import language.experimental.captureChecking
+import scala.annotation.unchecked.uncheckedCaptures
+
+/** Type class witnessing that a collection representation type `Repr` has
+ * elements of type `A` and has a conversion to `SeqOps[A, Iterable, C]`, for
+ * some types `A` and `C`.
+ *
+ * This type enables simple enrichment of `Seq`s with extension methods which
+ * can make full use of the mechanics of the Scala collections framework in
+ * their implementation.
+ *
+ * @see [[scala.collection.generic.IsIterable]]
+ */
+trait IsSeq[Repr] extends IsIterable[Repr] {
+
+ @deprecated("'conversion' is now a method named 'apply'", "2.13.0")
+ override val conversion: Repr => SeqOps[A, Iterable, C] = apply(_)
+
+ /** A conversion from the type `Repr` to `SeqOps[A, Iterable, C]`
+ *
+ * @note The second type parameter of the returned `SeqOps` value is
+ * still `Iterable` (and not `Seq`) because `SeqView[A]` only
+ * extends `SeqOps[A, View, View[A]]`.
+ */
+ def apply(coll: Repr): SeqOps[A, Iterable, C]
+}
+
+object IsSeq {
+
+ private val seqOpsIsSeqVal: IsSeq[Seq[Any]] =
+ new IsSeq[Seq[Any]] {
+ type A = Any
+ type C = Any
+ def apply(coll: Seq[Any]): SeqOps[Any, Iterable, Any] = coll
+ }
+
+ implicit def seqOpsIsSeq[CC0[X] <: SeqOps[X, Iterable, CC0[X]], A0]: IsSeq[CC0[A0]] { type A = A0; type C = CC0[A0] } =
+ seqOpsIsSeqVal.asInstanceOf[IsSeq[CC0[A0]] { type A = A0; type C = CC0[A0] }]
+
+ /** !!! Under cc, views are not Seqs and can't use SeqOps.
+ * So this should be renamed to seqViewIsIterable
+ */
+ implicit def seqViewIsSeq[CC0[X] <: SeqView[X], A0]: IsIterable[CC0[A0]] { type A = A0; type C = View[A0] } =
+ new IsIterable[CC0[A0]] {
+ type A = A0
+ type C = View[A]
+ def apply(coll: CC0[A0]): IterableOps[A0, View, View[A0]] = coll
+ }
+
+ /** !!! Under cc, views are not Seqs and can't use SeqOps.
+ * So this should be renamed to stringViewIsIterable
+ */
+ implicit val stringViewIsSeq: IsIterable[StringView] { type A = Char; type C = View[Char] } =
+ new IsIterable[StringView] {
+ type A = Char
+ type C = View[Char]
+ def apply(coll: StringView): IterableOps[Char, View, View[Char]] = coll
+ }
+
+ implicit val stringIsSeq: IsSeq[String] { type A = Char; type C = String } =
+ new IsSeq[String] {
+ type A = Char
+ type C = String
+ def apply(s: String): SeqOps[Char, immutable.IndexedSeq, String] =
+ new SeqOps[Char, immutable.ArraySeq, String] {
+ def length: Int = s.length
+ def apply(i: Int): Char = s.charAt(i)
+ def toIterable: Iterable[Char] = new immutable.WrappedString(s)
+ protected[this] def coll: String = s
+ protected[this] def fromSpecific(coll: IterableOnce[Char]^): String = coll.iterator.mkString
+ def iterableFactory: FreeSeqFactory[immutable.ArraySeq] = immutable.ArraySeq.untagged
+ override def empty: String = ""
+ protected[this] def newSpecificBuilder: mutable.Builder[Char, String] = new StringBuilder
+ def iterator: Iterator[Char] = s.iterator
+ }
+ }
+
+ implicit def arrayIsSeq[sealed A0 : ClassTag]: IsSeq[Array[A0]] { type A = A0; type C = Array[A0] } =
+ new IsSeq[Array[A0]] {
+ type A = A0
+ type C = Array[A0]
+ def apply(a: Array[A0]): SeqOps[A0, Seq, Array[A0]] =
+ new SeqOps[A, mutable.ArraySeq, Array[A]] {
+ def apply(i: Int): A = a(i)
+ def length: Int = a.length
+ def toIterable: Iterable[A] = mutable.ArraySeq.make[A @uncheckedCaptures](a)
+ protected def coll: Array[A] = a
+ protected def fromSpecific(coll: IterableOnce[A]^): Array[A] = Array.from(coll)
+ def iterableFactory: FreeSeqFactory[mutable.ArraySeq] = mutable.ArraySeq.untagged
+ override def empty: Array[A] = Array.empty[A]
+ protected def newSpecificBuilder: mutable.Builder[A, Array[A]] = Array.newBuilder
+ def iterator: Iterator[A] = a.iterator
+ }
+ }
+
+ // `Range` can not be unified with the `CC0` parameter of the
+ // `seqOpsIsSeq` definition because it does not take a type parameter.
+ // Hence the need for a separate case:
+ implicit def rangeIsSeq[C0 <: Range]: IsSeq[C0] { type A = Int; type C = immutable.IndexedSeq[Int] } =
+ new IsSeq[C0] {
+ type A = Int
+ type C = immutable.IndexedSeq[Int]
+ def apply(coll: C0): SeqOps[Int, Seq, immutable.IndexedSeq[Int]] = coll
+ }
+
+}
diff --git a/tests/pos-special/stdlib/collection/generic/Subtractable.scala b/tests/pos-special/stdlib/collection/generic/Subtractable.scala
new file mode 100644
index 000000000000..2c0967dbaf4b
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/generic/Subtractable.scala
@@ -0,0 +1,63 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package generic
+import language.experimental.captureChecking
+
+/** This trait represents collection-like objects that can be reduced
+ * using a '+' operator. It defines variants of `-` and `--`
+ * as convenience methods in terms of single-element removal `-`.
+ *
+ * @tparam A the type of the elements of the $coll.
+ * @tparam Repr the type of the $coll itself
+ * @define coll collection
+ * @define Coll Subtractable
+ */
+@deprecated("Subtractable is deprecated. This is now implemented as part of SetOps, MapOps, etc.", "2.13.0")
+trait Subtractable[A, +Repr <: Subtractable[A, Repr]] { self =>
+
+ /** The representation object of type `Repr` which contains the collection's elements
+ */
+ protected def repr: Repr
+
+ /** Creates a new $coll from this $coll with an element removed.
+ * @param elem the element to remove
+ * @return a new collection that contains all elements of the current $coll
+ * except one less occurrence of `elem`.
+ */
+ def -(elem: A): Repr
+
+ /** Creates a new $coll from this $coll with some elements removed.
+ *
+ * This method takes two or more elements to be removed. Another overloaded
+ * variant of this method handles the case where a single element is
+ * removed.
+ * @param elem1 the first element to remove.
+ * @param elem2 the second element to remove.
+ * @param elems the remaining elements to remove.
+ * @return a new $coll that contains all elements of the current $coll
+ * except one less occurrence of each of the given elements.
+ */
+ def -(elem1: A, elem2: A, elems: A*): Repr =
+ this - elem1 - elem2 -- elems
+
+ /** Creates a new $coll from this $coll by removing all elements of another
+ * collection.
+ *
+ * @param xs the collection containing the removed elements.
+ * @return a new $coll that contains all elements of the current $coll
+ * except one less occurrence of each of the elements of `elems`.
+ */
+ def --(xs: IterableOnce[A]): Repr = (repr /: xs.iterator) (_ - _)
+}
diff --git a/tests/pos-special/stdlib/collection/generic/package.scala b/tests/pos-special/stdlib/collection/generic/package.scala
new file mode 100644
index 000000000000..0ba67c1bf76e
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/generic/package.scala
@@ -0,0 +1,35 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+import language.experimental.captureChecking
+
+
+package object generic {
+ @deprecated("Clearable was moved from collection.generic to collection.mutable", "2.13.0")
+ type Clearable = scala.collection.mutable.Clearable
+
+ @deprecated("Use scala.collection.BuildFrom instead", "2.13.0")
+ type CanBuildFrom[-From, -A, +C] = scala.collection.BuildFrom[From, A, C]
+
+ @deprecated("Growable was moved from collection.generic to collection.mutable", "2.13.0")
+ type Growable[-A] = scala.collection.mutable.Growable[A]
+
+ @deprecated("Shrinkable was moved from collection.generic to collection.mutable", "2.13.0")
+ type Shrinkable[-A] = scala.collection.mutable.Shrinkable[A]
+
+ @deprecated("Use IsIterable instead", "2.13.0")
+ type IsTraversableLike[Repr] = IsIterable[Repr]
+
+ @deprecated("Use IsIterableOnce instead", "2.13.0")
+ type IsTraversableOnce[Repr] = IsIterableOnce[Repr]
+}
diff --git a/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala b/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala
new file mode 100644
index 000000000000..3a221fc76b6c
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/immutable/ArraySeq.scala
@@ -0,0 +1,692 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+package immutable
+
+import java.util.Arrays
+
+import scala.annotation.unchecked.uncheckedVariance
+import scala.collection.Stepper.EfficientSplit
+import scala.collection.mutable.{ArrayBuffer, ArrayBuilder, Builder, ArraySeq => MutableArraySeq}
+import scala.collection.convert.impl._
+import scala.reflect.ClassTag
+import scala.runtime.ScalaRunTime
+import scala.util.Sorting
+import scala.util.hashing.MurmurHash3
+import language.experimental.captureChecking
+import scala.annotation.unchecked.uncheckedCaptures
+
+/**
+ * An immutable array.
+ *
+ * Supports efficient indexed access and has a small memory footprint.
+ *
+ * @define coll immutable array
+ * @define Coll `ArraySeq`
+ */
+sealed abstract class ArraySeq[+A]
+ extends AbstractSeq[A]
+ with IndexedSeq[A]
+ with IndexedSeqOps[A, ArraySeq, ArraySeq[A]]
+ with StrictOptimizedSeqOps[A, ArraySeq, ArraySeq[A]]
+ with EvidenceIterableFactoryDefaults[A, ArraySeq, ClassTag]
+ with Serializable
+ with Pure {
+
+ /** The tag of the element type. This does not have to be equal to the element type of this ArraySeq. A primitive
+ * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype
+ * or subtype of the element type. */
+ protected def elemTag: ClassTag[_]
+
+ override def iterableFactory: SeqFactory[ArraySeq] = ArraySeq.untagged
+
+ /** The wrapped mutable `Array` that backs this `ArraySeq`. Any changes to this array will break
+ * the expected immutability. Its element type does not have to be equal to the element type of this ArraySeq.
+ * A primitive ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an
+ * array of a supertype or subtype of the element type. */
+ def unsafeArray: Array[_]
+
+ def unsafeArrayAsAnyArray = unsafeArray.asInstanceOf[Array[Any]]
+
+ protected def evidenceIterableFactory: ArraySeq.type = ArraySeq
+ protected def iterableEvidence: ClassTag[A @uncheckedVariance @uncheckedCaptures] = elemTag.asInstanceOf[ClassTag[A]]
+
+ def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit
+
+ @throws[ArrayIndexOutOfBoundsException]
+ def apply(i: Int): A
+
+ override def updated[B >: A](index: Int, elem: B): ArraySeq[B] = {
+ val dest = new Array[Any](length)
+ Array.copy(unsafeArray, 0, dest, 0, length)
+ dest(index) = elem
+ ArraySeq.unsafeWrapArray(dest).asInstanceOf[ArraySeq[B]]
+ }
+
+ override def map[B](f: A => B): ArraySeq[B] = {
+ val a = new Array[Any](size)
+ var i = 0
+ while (i < a.length){
+ a(i) = f(apply(i))
+ i += 1
+ }
+ ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]]
+ }
+
+ override def prepended[B >: A](elem: B): ArraySeq[B] =
+ ArraySeq.unsafeWrapArray(unsafeArrayAsAnyArray.prepended(elem)).asInstanceOf[ArraySeq[B]]
+
+ override def appended[B >: A](elem: B): ArraySeq[B] =
+ ArraySeq.unsafeWrapArray(unsafeArrayAsAnyArray.appended[Any](elem)).asInstanceOf[ArraySeq[B]]
+
+ /** Fast concatenation of two [[ArraySeq]]s.
+ *
+ * @return null if optimisation not possible.
+ */
+ private def appendedAllArraySeq[B >: A](that: ArraySeq[B]): ArraySeq[B] = {
+ // Optimise concatenation of two ArraySeqs
+ // For ArraySeqs with sizes of [100, 1000, 10000] this is [3.5, 4.1, 5.2]x as fast
+ if (isEmpty)
+ that
+ else if (that.isEmpty)
+ this
+ else {
+ val thisIsObj = this.unsafeArray.isInstanceOf[Array[AnyRef]]
+ val thatIsObj = that.unsafeArray.isInstanceOf[Array[AnyRef]]
+ val mismatch = thisIsObj != thatIsObj
+ if (mismatch)
+ // Combining primatives and objects: abort
+ null
+ else if (thisIsObj) {
+ // A and B are objects
+ val ax = this.unsafeArray.asInstanceOf[Array[A @uncheckedCaptures]]
+ val ay = that.unsafeArray.asInstanceOf[Array[B @uncheckedCaptures]]
+ val len = ax.length + ay.length
+ val a = new Array[AnyRef](len)
+ System.arraycopy(ax, 0, a, 0, ax.length)
+ System.arraycopy(ay, 0, a, ax.length, ay.length)
+ ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]]
+ } else {
+ // A is a primative and B = A. Use this instance's protected ClassTag.
+ val ax = this.unsafeArray.asInstanceOf[Array[A @uncheckedCaptures]]
+ val ay = that.unsafeArray.asInstanceOf[Array[A @uncheckedCaptures]]
+ val len = ax.length + ay.length
+ val a = iterableEvidence.newArray(len)
+ System.arraycopy(ax, 0, a, 0, ax.length)
+ System.arraycopy(ay, 0, a, ax.length, ay.length)
+ ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]]
+ }
+ }
+ }
+
+ override def appendedAll[B >: A](suffix: collection.IterableOnce[B]^): ArraySeq[B] = {
+ def genericResult = {
+ val k = suffix.knownSize
+ if (k == 0) this
+ else {
+ val b = ArrayBuilder.make[Any]
+ if(k >= 0) b.sizeHint(k + unsafeArray.length)
+ b.addAll(unsafeArray)
+ b.addAll(suffix)
+ ArraySeq.unsafeWrapArray(b.result()).asInstanceOf[ArraySeq[B]]
+ }
+ }
+
+ suffix match {
+ case that: ArraySeq[_] =>
+ val result = appendedAllArraySeq(that.asInstanceOf[ArraySeq[B]])
+ if (result == null) genericResult
+ else result
+ case _ =>
+ genericResult
+ }
+ }
+
+ override def prependedAll[B >: A](prefix: collection.IterableOnce[B]^): ArraySeq[B] = {
+ def genericResult = {
+ val k = prefix.knownSize
+ if (k == 0) this
+ else {
+ val b = ArrayBuilder.make[Any]
+ if(k >= 0) b.sizeHint(k + unsafeArray.length)
+ b.addAll(prefix)
+ if(k < 0) b.sizeHint(b.length + unsafeArray.length)
+ b.addAll(unsafeArray)
+ ArraySeq.unsafeWrapArray(b.result()).asInstanceOf[ArraySeq[B]]
+ }
+ }
+
+ prefix match {
+ case that: ArraySeq[_] =>
+ val result = that.asInstanceOf[ArraySeq[B]].appendedAllArraySeq(this)
+ if (result == null) genericResult
+ else result
+ case _ =>
+ genericResult
+ }
+ }
+
+ override def zip[B](that: collection.IterableOnce[B]^): ArraySeq[(A, B)] =
+ that match {
+ case bs: ArraySeq[B] =>
+ ArraySeq.tabulate(length min bs.length) { i =>
+ (apply(i), bs(i))
+ }
+ case _ =>
+ strictOptimizedZip[B, ArraySeq[(A, B)]](that, iterableFactory.newBuilder)
+ }
+
+ private inline def ops[A](xs: Array[A @uncheckedCaptures]): ArrayOps[A] = new ArrayOps[A @uncheckedCaptures](xs)
+
+ override def take(n: Int): ArraySeq[A] =
+ if (unsafeArray.length <= n)
+ this
+ else
+ ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).take(n)).asInstanceOf[ArraySeq[A]]
+
+ override def takeRight(n: Int): ArraySeq[A] =
+ if (unsafeArray.length <= n)
+ this
+ else
+ ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).takeRight(n)).asInstanceOf[ArraySeq[A]]
+
+ override def drop(n: Int): ArraySeq[A] =
+ if (n <= 0)
+ this
+ else
+ ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).drop(n)).asInstanceOf[ArraySeq[A]]
+
+ override def dropRight(n: Int): ArraySeq[A] =
+ if (n <= 0)
+ this
+ else
+ ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).dropRight(n)).asInstanceOf[ArraySeq[A]]
+
+ override def slice(from: Int, until: Int): ArraySeq[A] =
+ if (from <= 0 && unsafeArray.length <= until)
+ this
+ else
+ ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).slice(from, until)).asInstanceOf[ArraySeq[A]]
+
+ override def foldLeft[B](z: B)(f: (B, A) => B): B = {
+ // For ArraySeqs with sizes of [100, 1000, 10000] this is [1.3, 1.8, 1.8]x as fast
+ // as the same while-loop over this instead of unsafeArray.
+ val array = unsafeArray
+ var b = z
+ var i = 0
+ while (i < array.length) {
+ val a = array(i).asInstanceOf[A]
+ b = f(b, a)
+ i += 1
+ }
+ b
+ }
+
+ override def foldRight[B](z: B)(f: (A, B) => B): B = {
+ // For ArraySeqs with sizes of [100, 1000, 10000] this is [1.6, 1.8, 2.7]x as fast
+ // as the same while-loop over this instead of unsafeArray.
+ val array = unsafeArray
+ var b = z
+ var i = array.length
+ while (i > 0) {
+ i -= 1
+ val a = array(i).asInstanceOf[A]
+ b = f(a, b)
+ }
+ b
+ }
+
+ override def tail: ArraySeq[A] = ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).tail).asInstanceOf[ArraySeq[A]]
+
+ override def reverse: ArraySeq[A] = ArraySeq.unsafeWrapArray(ops(unsafeArrayAsAnyArray).reverse).asInstanceOf[ArraySeq[A]]
+
+ override protected[this] def className = "ArraySeq"
+
+ override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = {
+ val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len)
+ if(copied > 0) {
+ Array.copy(unsafeArray, 0, xs, start, copied)
+ }
+ copied
+ }
+
+ override protected final def applyPreferredMaxLength: Int = Int.MaxValue
+
+ override def sorted[B >: A](implicit ord: Ordering[B]): ArraySeq[A] =
+ if(unsafeArray.length <= 1) this
+ else {
+ val a = Array.copyAs[AnyRef](unsafeArray, length)(ClassTag.AnyRef)
+ Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]])
+ new ArraySeq.ofRef[AnyRef](a).asInstanceOf[ArraySeq[A]]
+ }
+}
+
+/**
+ * $factoryInfo
+ * @define coll immutable array
+ * @define Coll `ArraySeq`
+ */
+@SerialVersionUID(3L)
+object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self =>
+ val untagged: SeqFactory[ArraySeq] = new ClassTagSeqFactory.AnySeqDelegate(self)
+
+ private[this] lazy val emptyImpl = new ArraySeq.ofRef[Nothing](new Array[Nothing](0))
+
+ def empty[A : ClassTag]: ArraySeq[A] = emptyImpl
+
+ def from[A](it: scala.collection.IterableOnce[A]^)(implicit tag: ClassTag[A]): ArraySeq[A] = it match {
+ case as: ArraySeq[A] => as
+ case _ => unsafeWrapArray(Array.from[A](it))
+ }
+
+ def newBuilder[A : ClassTag]: Builder[A, ArraySeq[A]] =
+ ArrayBuffer.newBuilder[A @uncheckedCaptures].mapResult(b => unsafeWrapArray[A](b.toArray))
+
+ override def fill[A : ClassTag](n: Int)(elem: => A): ArraySeq[A] = tabulate(n)(_ => elem)
+
+ override def tabulate[A : ClassTag](n: Int)(f: Int => A): ArraySeq[A] = {
+ val elements = Array.ofDim[A @uncheckedCaptures](scala.math.max(n, 0))
+ var i = 0
+ while (i < n) {
+ ScalaRunTime.array_update(elements, i, f(i))
+ i = i + 1
+ }
+ ArraySeq.unsafeWrapArray(elements)
+ }
+
+ /**
+ * Wrap an existing `Array` into an `ArraySeq` of the proper primitive specialization type
+ * without copying. Any changes to wrapped array will break the expected immutability.
+ *
+ * Note that an array containing boxed primitives can be wrapped in an `ArraySeq` without
+ * copying. For example, `val a: Array[Any] = Array(1)` is an array of `Object` at runtime,
+ * containing `Integer`s. An `ArraySeq[Int]` can be obtained with a cast:
+ * `ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[Int]]`. The values are still
+ * boxed, the resulting instance is an [[ArraySeq.ofRef]]. Writing
+ * `ArraySeq.unsafeWrapArray(a.asInstanceOf[Array[Int]])` does not work, it throws a
+ * `ClassCastException` at runtime.
+ */
+ def unsafeWrapArray[T](x: Array[T @uncheckedCaptures]): ArraySeq[T] = ((x: @unchecked) match {
+ case null => null
+ case x: Array[AnyRef] => new ofRef[AnyRef](x)
+ case x: Array[Int] => new ofInt(x)
+ case x: Array[Double] => new ofDouble(x)
+ case x: Array[Long] => new ofLong(x)
+ case x: Array[Float] => new ofFloat(x)
+ case x: Array[Char] => new ofChar(x)
+ case x: Array[Byte] => new ofByte(x)
+ case x: Array[Short] => new ofShort(x)
+ case x: Array[Boolean] => new ofBoolean(x)
+ case x: Array[Unit] => new ofUnit(x)
+ }).asInstanceOf[ArraySeq[T]]
+
+ @SerialVersionUID(3L)
+ final class ofRef[T <: AnyRef](val unsafeArray: Array[T]) extends ArraySeq[T] {
+ def elemTag = ClassTag[T](unsafeArray.getClass.getComponentType)
+ def length: Int = unsafeArray.length
+ @throws[ArrayIndexOutOfBoundsException]
+ def apply(i: Int): T = unsafeArray(i)
+ override def hashCode = MurmurHash3.arraySeqHash(unsafeArray)
+ override def equals(that: Any): Boolean = that match {
+ case that: ofRef[_] =>
+ Array.equals(
+ this.unsafeArray.asInstanceOf[Array[AnyRef]],
+ that.unsafeArray.asInstanceOf[Array[AnyRef]])
+ case _ => super.equals(that)
+ }
+ override def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq.ofRef[T] = {
+ if(unsafeArray.length <= 1) this
+ else {
+ val a = unsafeArray.clone()
+ Arrays.sort(a, ord.asInstanceOf[Ordering[T]])
+ new ArraySeq.ofRef(a)
+ }
+ }
+ override def iterator: Iterator[T] = new ArrayOps.ArrayIterator[T](unsafeArray)
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit = (
+ if(shape.shape == StepperShape.ReferenceShape)
+ new ObjectArrayStepper(unsafeArray, 0, unsafeArray.length)
+ else shape.parUnbox(new ObjectArrayStepper(unsafeArray, 0, unsafeArray.length).asInstanceOf[AnyStepper[T] with EfficientSplit])
+ ).asInstanceOf[S with EfficientSplit]
+ }
+
+ @SerialVersionUID(3L)
+ final class ofByte(val unsafeArray: Array[Byte]) extends ArraySeq[Byte] {
+ protected def elemTag = ClassTag.Byte
+ def length: Int = unsafeArray.length
+ @throws[ArrayIndexOutOfBoundsException]
+ def apply(i: Int): Byte = unsafeArray(i)
+ override def hashCode = MurmurHash3.arraySeqHash(unsafeArray)
+ override def equals(that: Any) = that match {
+ case that: ofByte => Arrays.equals(unsafeArray, that.unsafeArray)
+ case _ => super.equals(that)
+ }
+ override def sorted[B >: Byte](implicit ord: Ordering[B]): ArraySeq[Byte] =
+ if(length <= 1) this
+ else if(ord eq Ordering.Byte) {
+ val a = unsafeArray.clone()
+ Arrays.sort(a)
+ new ArraySeq.ofByte(a)
+ } else super.sorted[B]
+ override def iterator: Iterator[Byte] = new ArrayOps.ArrayIterator[Byte](unsafeArray)
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Byte, S]): S with EfficientSplit = (
+ if(shape.shape == StepperShape.ReferenceShape)
+ AnyStepper.ofParIntStepper(new WidenedByteArrayStepper(unsafeArray, 0, unsafeArray.length))
+ else new WidenedByteArrayStepper(unsafeArray, 0, unsafeArray.length)
+ ).asInstanceOf[S with EfficientSplit]
+ override def updated[B >: Byte](index: Int, elem: B): ArraySeq[B] =
+ elem match {
+ case b: Byte => new ArraySeq.ofByte(unsafeArray.updated(index, b))
+ case _ => super.updated(index, elem)
+ }
+ override def appended[B >: Byte](elem: B): ArraySeq[B] =
+ elem match {
+ case b: Byte => new ArraySeq.ofByte(unsafeArray.appended(b))
+ case _ => super.appended(elem)
+ }
+ override def prepended[B >: Byte](elem: B): ArraySeq[B] =
+ elem match {
+ case b: Byte => new ArraySeq.ofByte(unsafeArray.prepended(b))
+ case _ => super.prepended(elem)
+ }
+ }
+
+ @SerialVersionUID(3L)
+ final class ofShort(val unsafeArray: Array[Short]) extends ArraySeq[Short] {
+ protected def elemTag = ClassTag.Short
+ def length: Int = unsafeArray.length
+ @throws[ArrayIndexOutOfBoundsException]
+ def apply(i: Int): Short = unsafeArray(i)
+ override def hashCode = MurmurHash3.arraySeqHash(unsafeArray)
+ override def equals(that: Any) = that match {
+ case that: ofShort => Arrays.equals(unsafeArray, that.unsafeArray)
+ case _ => super.equals(that)
+ }
+ override def sorted[B >: Short](implicit ord: Ordering[B]): ArraySeq[Short] =
+ if(length <= 1) this
+ else if(ord eq Ordering.Short) {
+ val a = unsafeArray.clone()
+ Arrays.sort(a)
+ new ArraySeq.ofShort(a)
+ } else super.sorted[B]
+ override def iterator: Iterator[Short] = new ArrayOps.ArrayIterator[Short](unsafeArray)
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Short, S]): S with EfficientSplit = (
+ if(shape.shape == StepperShape.ReferenceShape)
+ AnyStepper.ofParIntStepper(new WidenedShortArrayStepper(unsafeArray, 0, unsafeArray.length))
+ else new WidenedShortArrayStepper(unsafeArray, 0, unsafeArray.length)
+ ).asInstanceOf[S with EfficientSplit]
+ override def updated[B >: Short](index: Int, elem: B): ArraySeq[B] =
+ elem match {
+ case b: Short => new ArraySeq.ofShort(unsafeArray.updated(index, b))
+ case _ => super.updated(index, elem)
+ }
+ override def appended[B >: Short](elem: B): ArraySeq[B] =
+ elem match {
+ case b: Short => new ArraySeq.ofShort(unsafeArray.appended(b))
+ case _ => super.appended(elem)
+ }
+ override def prepended[B >: Short](elem: B): ArraySeq[B] =
+ elem match {
+ case b: Short => new ArraySeq.ofShort(unsafeArray.prepended(b))
+ case _ => super.prepended(elem)
+ }
+ }
+
+ @SerialVersionUID(3L)
+ final class ofChar(val unsafeArray: Array[Char]) extends ArraySeq[Char] {
+ protected def elemTag = ClassTag.Char
+ def length: Int = unsafeArray.length
+ @throws[ArrayIndexOutOfBoundsException]
+ def apply(i: Int): Char = unsafeArray(i)
+ override def hashCode = MurmurHash3.arraySeqHash(unsafeArray)
+ override def equals(that: Any) = that match {
+ case that: ofChar => Arrays.equals(unsafeArray, that.unsafeArray)
+ case _ => super.equals(that)
+ }
+ override def sorted[B >: Char](implicit ord: Ordering[B]): ArraySeq[Char] =
+ if(length <= 1) this
+ else if(ord eq Ordering.Char) {
+ val a = unsafeArray.clone()
+ Arrays.sort(a)
+ new ArraySeq.ofChar(a)
+ } else super.sorted[B]
+ override def iterator: Iterator[Char] = new ArrayOps.ArrayIterator[Char](unsafeArray)
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Char, S]): S with EfficientSplit = (
+ if(shape.shape == StepperShape.ReferenceShape)
+ AnyStepper.ofParIntStepper(new WidenedCharArrayStepper(unsafeArray, 0, unsafeArray.length))
+ else new WidenedCharArrayStepper(unsafeArray, 0, unsafeArray.length)
+ ).asInstanceOf[S with EfficientSplit]
+ override def updated[B >: Char](index: Int, elem: B): ArraySeq[B] =
+ elem match {
+ case b: Char => new ArraySeq.ofChar(unsafeArray.updated(index, b))
+ case _ => super.updated(index, elem)
+ }
+ override def appended[B >: Char](elem: B): ArraySeq[B] =
+ elem match {
+ case b: Char => new ArraySeq.ofChar(unsafeArray.appended(b))
+ case _ => super.appended(elem)
+ }
+ override def prepended[B >: Char](elem: B): ArraySeq[B] =
+ elem match {
+ case b: Char => new ArraySeq.ofChar(unsafeArray.prepended(b))
+ case _ => super.prepended(elem)
+ }
+
+ override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type =
+ (new MutableArraySeq.ofChar(unsafeArray)).addString(sb, start, sep, end)
+ }
+
+ @SerialVersionUID(3L)
+ final class ofInt(val unsafeArray: Array[Int]) extends ArraySeq[Int] {
+ protected def elemTag = ClassTag.Int
+ def length: Int = unsafeArray.length
+ @throws[ArrayIndexOutOfBoundsException]
+ def apply(i: Int): Int = unsafeArray(i)
+ override def hashCode = MurmurHash3.arraySeqHash(unsafeArray)
+ override def equals(that: Any) = that match {
+ case that: ofInt => Arrays.equals(unsafeArray, that.unsafeArray)
+ case _ => super.equals(that)
+ }
+ override def sorted[B >: Int](implicit ord: Ordering[B]): ArraySeq[Int] =
+ if(length <= 1) this
+ else if(ord eq Ordering.Int) {
+ val a = unsafeArray.clone()
+ Arrays.sort(a)
+ new ArraySeq.ofInt(a)
+ } else super.sorted[B]
+ override def iterator: Iterator[Int] = new ArrayOps.ArrayIterator[Int](unsafeArray)
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = (
+ if(shape.shape == StepperShape.ReferenceShape)
+ AnyStepper.ofParIntStepper(new IntArrayStepper(unsafeArray, 0, unsafeArray.length))
+ else new IntArrayStepper(unsafeArray, 0, unsafeArray.length)
+ ).asInstanceOf[S with EfficientSplit]
+ override def updated[B >: Int](index: Int, elem: B): ArraySeq[B] =
+ elem match {
+ case b: Int => new ArraySeq.ofInt(unsafeArray.updated(index, b))
+ case _ => super.updated(index, elem)
+ }
+ override def appended[B >: Int](elem: B): ArraySeq[B] =
+ elem match {
+ case b: Int => new ArraySeq.ofInt(unsafeArray.appended(b))
+ case _ => super.appended(elem)
+ }
+ override def prepended[B >: Int](elem: B): ArraySeq[B] =
+ elem match {
+ case b: Int => new ArraySeq.ofInt(unsafeArray.prepended(b))
+ case _ => super.prepended(elem)
+ }
+ }
+
+ @SerialVersionUID(3L)
+ final class ofLong(val unsafeArray: Array[Long]) extends ArraySeq[Long] {
+ protected def elemTag = ClassTag.Long
+ def length: Int = unsafeArray.length
+ @throws[ArrayIndexOutOfBoundsException]
+ def apply(i: Int): Long = unsafeArray(i)
+ override def hashCode = MurmurHash3.arraySeqHash(unsafeArray)
+ override def equals(that: Any) = that match {
+ case that: ofLong => Arrays.equals(unsafeArray, that.unsafeArray)
+ case _ => super.equals(that)
+ }
+ override def sorted[B >: Long](implicit ord: Ordering[B]): ArraySeq[Long] =
+ if(length <= 1) this
+ else if(ord eq Ordering.Long) {
+ val a = unsafeArray.clone()
+ Arrays.sort(a)
+ new ArraySeq.ofLong(a)
+ } else super.sorted[B]
+ override def iterator: Iterator[Long] = new ArrayOps.ArrayIterator[Long](unsafeArray)
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Long, S]): S with EfficientSplit = (
+ if(shape.shape == StepperShape.ReferenceShape)
+ AnyStepper.ofParLongStepper(new LongArrayStepper(unsafeArray, 0, unsafeArray.length))
+ else new LongArrayStepper(unsafeArray, 0, unsafeArray.length)
+ ).asInstanceOf[S with EfficientSplit]
+ override def updated[B >: Long](index: Int, elem: B): ArraySeq[B] =
+ elem match {
+ case b: Long => new ArraySeq.ofLong(unsafeArray.updated(index, b))
+ case _ => super.updated(index, elem)
+ }
+ override def appended[B >: Long](elem: B): ArraySeq[B] =
+ elem match {
+ case b: Long => new ArraySeq.ofLong(unsafeArray.appended(b))
+ case _ => super.appended(elem)
+ }
+ override def prepended[B >: Long](elem: B): ArraySeq[B] =
+ elem match {
+ case b: Long => new ArraySeq.ofLong(unsafeArray.prepended(b))
+ case _ => super.prepended(elem)
+ }
+ }
+
+ @SerialVersionUID(3L)
+ final class ofFloat(val unsafeArray: Array[Float]) extends ArraySeq[Float] {
+ protected def elemTag = ClassTag.Float
+ def length: Int = unsafeArray.length
+ @throws[ArrayIndexOutOfBoundsException]
+ def apply(i: Int): Float = unsafeArray(i)
+ override def hashCode = MurmurHash3.arraySeqHash(unsafeArray)
+ override def equals(that: Any) = that match {
+ case that: ofFloat => Arrays.equals(unsafeArray, that.unsafeArray)
+ case _ => super.equals(that)
+ }
+ override def iterator: Iterator[Float] = new ArrayOps.ArrayIterator[Float](unsafeArray)
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Float, S]): S with EfficientSplit = (
+ if(shape.shape == StepperShape.ReferenceShape)
+ AnyStepper.ofParDoubleStepper(new WidenedFloatArrayStepper(unsafeArray, 0, unsafeArray.length))
+ else new WidenedFloatArrayStepper(unsafeArray, 0, unsafeArray.length)
+ ).asInstanceOf[S with EfficientSplit]
+ override def updated[B >: Float](index: Int, elem: B): ArraySeq[B] =
+ elem match {
+ case b: Float => new ArraySeq.ofFloat(unsafeArray.updated(index, b))
+ case _ => super.updated(index, elem)
+ }
+ override def appended[B >: Float](elem: B): ArraySeq[B] =
+ elem match {
+ case b: Float => new ArraySeq.ofFloat(unsafeArray.appended(b))
+ case _ => super.appended(elem)
+ }
+ override def prepended[B >: Float](elem: B): ArraySeq[B] =
+ elem match {
+ case b: Float => new ArraySeq.ofFloat(unsafeArray.prepended(b))
+ case _ => super.prepended(elem)
+ }
+ }
+
+ @SerialVersionUID(3L)
+ final class ofDouble(val unsafeArray: Array[Double]) extends ArraySeq[Double] {
+ protected def elemTag = ClassTag.Double
+ def length: Int = unsafeArray.length
+ @throws[ArrayIndexOutOfBoundsException]
+ def apply(i: Int): Double = unsafeArray(i)
+ override def hashCode = MurmurHash3.arraySeqHash(unsafeArray)
+ override def equals(that: Any) = that match {
+ case that: ofDouble => Arrays.equals(unsafeArray, that.unsafeArray)
+ case _ => super.equals(that)
+ }
+ override def iterator: Iterator[Double] = new ArrayOps.ArrayIterator[Double](unsafeArray)
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Double, S]): S with EfficientSplit = (
+ if(shape.shape == StepperShape.ReferenceShape)
+ AnyStepper.ofParDoubleStepper(new DoubleArrayStepper(unsafeArray, 0, unsafeArray.length))
+ else new DoubleArrayStepper(unsafeArray, 0, unsafeArray.length)
+ ).asInstanceOf[S with EfficientSplit]
+ override def updated[B >: Double](index: Int, elem: B): ArraySeq[B] =
+ elem match {
+ case b: Double => new ArraySeq.ofDouble(unsafeArray.updated(index, b))
+ case _ => super.updated(index, elem)
+ }
+ override def appended[B >: Double](elem: B): ArraySeq[B] =
+ elem match {
+ case b: Double => new ArraySeq.ofDouble(unsafeArray.appended(b))
+ case _ => super.appended(elem)
+ }
+ override def prepended[B >: Double](elem: B): ArraySeq[B] =
+ elem match {
+ case b: Double => new ArraySeq.ofDouble(unsafeArray.prepended(b))
+ case _ => super.prepended(elem)
+ }
+ }
+
+ @SerialVersionUID(3L)
+ final class ofBoolean(val unsafeArray: Array[Boolean]) extends ArraySeq[Boolean] {
+ protected def elemTag = ClassTag.Boolean
+ def length: Int = unsafeArray.length
+ @throws[ArrayIndexOutOfBoundsException]
+ def apply(i: Int): Boolean = unsafeArray(i)
+ override def hashCode = MurmurHash3.arraySeqHash(unsafeArray)
+ override def equals(that: Any) = that match {
+ case that: ofBoolean => Arrays.equals(unsafeArray, that.unsafeArray)
+ case _ => super.equals(that)
+ }
+ override def sorted[B >: Boolean](implicit ord: Ordering[B]): ArraySeq[Boolean] =
+ if(length <= 1) this
+ else if(ord eq Ordering.Boolean) {
+ val a = unsafeArray.clone()
+ Sorting.stableSort(a)
+ new ArraySeq.ofBoolean(a)
+ } else super.sorted[B]
+ override def iterator: Iterator[Boolean] = new ArrayOps.ArrayIterator[Boolean](unsafeArray)
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Boolean, S]): S with EfficientSplit =
+ new BoxedBooleanArrayStepper(unsafeArray, 0, unsafeArray.length).asInstanceOf[S with EfficientSplit]
+ override def updated[B >: Boolean](index: Int, elem: B): ArraySeq[B] =
+ elem match {
+ case b: Boolean => new ArraySeq.ofBoolean(unsafeArray.updated(index, b))
+ case _ => super.updated(index, elem)
+ }
+ override def appended[B >: Boolean](elem: B): ArraySeq[B] =
+ elem match {
+ case b: Boolean => new ArraySeq.ofBoolean(unsafeArray.appended(b))
+ case _ => super.appended(elem)
+ }
+ override def prepended[B >: Boolean](elem: B): ArraySeq[B] =
+ elem match {
+ case b: Boolean => new ArraySeq.ofBoolean(unsafeArray.prepended(b))
+ case _ => super.prepended(elem)
+ }
+ }
+
+ @SerialVersionUID(3L)
+ final class ofUnit(val unsafeArray: Array[Unit]) extends ArraySeq[Unit] {
+ protected def elemTag = ClassTag.Unit
+ def length: Int = unsafeArray.length
+ @throws[ArrayIndexOutOfBoundsException]
+ def apply(i: Int): Unit = unsafeArray(i)
+ override def hashCode = MurmurHash3.arraySeqHash(unsafeArray)
+ override def equals(that: Any) = that match {
+ case that: ofUnit => unsafeArray.length == that.unsafeArray.length
+ case _ => super.equals(that)
+ }
+ override def iterator: Iterator[Unit] = new ArrayOps.ArrayIterator[Unit](unsafeArray)
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Unit, S]): S with EfficientSplit =
+ new ObjectArrayStepper[AnyRef](unsafeArray.asInstanceOf[Array[AnyRef]], 0, unsafeArray.length).asInstanceOf[S with EfficientSplit]
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/immutable/BitSet.scala b/tests/pos-special/stdlib/collection/immutable/BitSet.scala
new file mode 100644
index 000000000000..9c2bfdad54d0
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/immutable/BitSet.scala
@@ -0,0 +1,376 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package immutable
+
+import BitSetOps.{LogWL, updateArray}
+import mutable.Builder
+import scala.annotation.{implicitNotFound, nowarn}
+import language.experimental.captureChecking
+
+/** A class for immutable bitsets.
+ * $bitsetinfo
+ * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#immutable-bitsets "Scala's Collection Library overview"]]
+ * section on `Immutable BitSets` for more information.
+ *
+ * @define Coll `immutable.BitSet`
+ * @define coll immutable bitset
+ */
+sealed abstract class BitSet
+ extends AbstractSet[Int]
+ with SortedSet[Int]
+ with SortedSetOps[Int, SortedSet, BitSet]
+ with StrictOptimizedSortedSetOps[Int, SortedSet, BitSet]
+ with collection.BitSet
+ with collection.BitSetOps[BitSet]
+ with Serializable {
+
+ override def unsorted: Set[Int] = this
+
+ override protected def fromSpecific(coll: IterableOnce[Int]^): BitSet = bitSetFactory.fromSpecific(coll)
+ override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder
+ override def empty: BitSet = bitSetFactory.empty
+
+ def bitSetFactory = BitSet
+
+ protected[collection] def fromBitMaskNoCopy(elems: Array[Long]): BitSet = BitSet.fromBitMaskNoCopy(elems)
+
+ def incl(elem: Int): BitSet = {
+ require(elem >= 0, "bitset element must be >= 0")
+ if (contains(elem)) this
+ else {
+ val idx = elem >> LogWL
+ updateWord(idx, word(idx) | (1L << elem))
+ }
+ }
+
+ def excl(elem: Int): BitSet = {
+ require(elem >= 0, "bitset element must be >= 0")
+ if (contains(elem)) {
+ val idx = elem >> LogWL
+ updateWord(idx, word(idx) & ~(1L << elem))
+ } else this
+ }
+
+ /** Update word at index `idx`; enlarge set if `idx` outside range of set.
+ */
+ protected def updateWord(idx: Int, w: Long): BitSet
+
+ override def map(f: Int => Int): BitSet = strictOptimizedMap(newSpecificBuilder, f)
+ override def map[B](f: Int => B)(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] =
+ super[StrictOptimizedSortedSetOps].map(f)
+
+ override def flatMap(f: Int => IterableOnce[Int]): BitSet = strictOptimizedFlatMap(newSpecificBuilder, f)
+ override def flatMap[B](f: Int => IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] =
+ super[StrictOptimizedSortedSetOps].flatMap(f)
+
+ override def collect(pf: PartialFunction[Int, Int]): BitSet = strictOptimizedCollect(newSpecificBuilder, pf)
+ override def collect[B](pf: scala.PartialFunction[Int, B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] =
+ super[StrictOptimizedSortedSetOps].collect(pf)
+
+ // necessary for disambiguation
+ override def zip[B](that: scala.IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.zipOrdMsg) ev: Ordering[(Int, B)]): SortedSet[(Int, B)] =
+ super.zip(that)
+
+ protected[this] def writeReplace(): AnyRef = new BitSet.SerializationProxy(this)
+}
+
+/**
+ * $factoryInfo
+ * @define Coll `immutable.BitSet`
+ * @define coll immutable bitset
+ */
+@nowarn("cat=deprecation&msg=Implementation classes of BitSet should not be accessed directly")
+@SerialVersionUID(3L)
+object BitSet extends SpecificIterableFactory[Int, BitSet] {
+
+ def fromSpecific(it: scala.collection.IterableOnce[Int]^): BitSet =
+ it match {
+ case bs: BitSet => bs
+ case _ => (newBuilder ++= it).result()
+ }
+
+ final val empty: BitSet = new BitSet1(0L)
+
+ def newBuilder: Builder[Int, BitSet] =
+ mutable.BitSet.newBuilder.mapResult(bs => fromBitMaskNoCopy(bs.elems))
+
+ private def createSmall(a: Long, b: Long): BitSet = if (b == 0L) new BitSet1(a) else new BitSet2(a, b)
+
+ /** A bitset containing all the bits in an array */
+ def fromBitMask(elems: Array[Long]): BitSet = {
+ val len = elems.length
+ if (len == 0) empty
+ else if (len == 1) new BitSet1(elems(0))
+ else if (len == 2) createSmall(elems(0), elems(1))
+ else {
+ val a = java.util.Arrays.copyOf(elems, len)
+ new BitSetN(a)
+ }
+ }
+
+ /** A bitset containing all the bits in an array, wrapping the existing
+ * array without copying.
+ */
+ def fromBitMaskNoCopy(elems: Array[Long]): BitSet = {
+ val len = elems.length
+ if (len == 0) empty
+ else if (len == 1) new BitSet1(elems(0))
+ else if (len == 2) createSmall(elems(0), elems(1))
+ else new BitSetN(elems)
+ }
+
+ @deprecated("Implementation classes of BitSet should not be accessed directly", "2.13.0")
+ class BitSet1(val elems: Long) extends BitSet {
+ protected[collection] def nwords = 1
+ protected[collection] def word(idx: Int) = if (idx == 0) elems else 0L
+ protected[collection] def updateWord(idx: Int, w: Long): BitSet =
+ if (idx == 0) new BitSet1(w)
+ else if (idx == 1) createSmall(elems, w)
+ else this.fromBitMaskNoCopy(updateArray(Array(elems), idx, w))
+
+
+ override def diff(other: collection.Set[Int]): BitSet = other match {
+ case bs: collection.BitSet => bs.nwords match {
+ case 0 => this
+ case _ =>
+ val newElems = elems & ~bs.word(0)
+ if (newElems == 0L) this.empty else new BitSet1(newElems)
+ }
+ case _ => super.diff(other)
+ }
+
+ override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = {
+ val _elems = BitSetOps.computeWordForFilter(pred, isFlipped, elems, 0)
+ if (_elems == 0L) this.empty else new BitSet1(_elems)
+ }
+ }
+
+ @deprecated("Implementation classes of BitSet should not be accessed directly", "2.13.0")
+ class BitSet2(val elems0: Long, val elems1: Long) extends BitSet {
+ protected[collection] def nwords = 2
+ protected[collection] def word(idx: Int) = if (idx == 0) elems0 else if (idx == 1) elems1 else 0L
+ protected[collection] def updateWord(idx: Int, w: Long): BitSet =
+ if (idx == 0) new BitSet2(w, elems1)
+ else if (idx == 1) createSmall(elems0, w)
+ else this.fromBitMaskNoCopy(updateArray(Array(elems0, elems1), idx, w))
+
+
+ override def diff(other: collection.Set[Int]): BitSet = other match {
+ case bs: collection.BitSet => bs.nwords match {
+ case 0 => this
+ case 1 =>
+ new BitSet2(elems0 & ~bs.word(0), elems1)
+ case _ =>
+ val _elems0 = elems0 & ~bs.word(0)
+ val _elems1 = elems1 & ~bs.word(1)
+
+ if (_elems1 == 0L) {
+ if (_elems0 == 0L) {
+ this.empty
+ } else {
+ new BitSet1(_elems0)
+ }
+ } else {
+ new BitSet2(_elems0, _elems1)
+ }
+ }
+ case _ => super.diff(other)
+ }
+
+ override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = {
+ val _elems0 = BitSetOps.computeWordForFilter(pred, isFlipped, elems0, 0)
+ val _elems1 = BitSetOps.computeWordForFilter(pred, isFlipped, elems1, 1)
+
+ if (_elems1 == 0L) {
+ if (_elems0 == 0L) {
+ this.empty
+ }
+ else new BitSet1(_elems0)
+ }
+ else new BitSet2(_elems0, _elems1)
+ }
+ }
+
+ @deprecated("Implementation classes of BitSet should not be accessed directly", "2.13.0")
+ class BitSetN(val elems: Array[Long]) extends BitSet {
+ protected[collection] def nwords = elems.length
+
+ protected[collection] def word(idx: Int) = if (idx < nwords) elems(idx) else 0L
+
+ protected[collection] def updateWord(idx: Int, w: Long): BitSet = this.fromBitMaskNoCopy(updateArray(elems, idx, w))
+
+ override def diff(that: collection.Set[Int]): BitSet = that match {
+ case bs: collection.BitSet =>
+ /*
+ * Algorithm:
+ *
+ * We iterate, word-by-word, backwards from the shortest of the two bitsets (this, or bs) i.e. the one with
+ * the fewer words. Two extra concerns for optimization are described below.
+ *
+ * Array Shrinking:
+ * If `this` is not longer than `bs`, then since we must iterate through the full array of words,
+ * we can track the new highest index word which is non-zero, at little additional cost. At the end, the new
+ * Array[Long] allocated for the returned BitSet will only be of size `maxNonZeroIndex + 1`
+ *
+ * Tracking Changes:
+ * If the two sets are disjoint, then we can return `this`. Therefor, until at least one change is detected,
+ * we check each word for if it has changed from its corresponding word in `this`. Once a single change is
+ * detected, we stop checking because the cost of the new Array must be paid anyways.
+ */
+
+ val bsnwords = bs.nwords
+ val thisnwords = nwords
+ if (bsnwords >= thisnwords) {
+ // here, we may have opportunity to shrink the size of the array
+ // so, track the highest index which is non-zero. That ( + 1 ) will be our new array length
+ var i = thisnwords - 1
+ var currentWord = 0L
+ // if there are never any changes, we can return `this` at the end
+ var anyChanges = false
+ while (i >= 0 && currentWord == 0L) {
+ val oldWord = word(i)
+ currentWord = oldWord & ~bs.word(i)
+ anyChanges ||= currentWord != oldWord
+ i -= 1
+ }
+ i match {
+ case -1 =>
+ if (anyChanges) {
+ if (currentWord == 0) {
+ this.empty
+ } else {
+ new BitSet1(currentWord)
+ }
+ } else {
+ this
+ }
+ case 0 =>
+ val oldFirstWord = word(0)
+ val firstWord = oldFirstWord & ~bs.word(0)
+ anyChanges ||= firstWord != oldFirstWord
+ if (anyChanges) {
+ new BitSet2(firstWord, currentWord)
+ } else {
+ this
+ }
+ case _ =>
+ val minimumNonZeroIndex: Int = i + 1
+ while (!anyChanges && i >= 0) {
+ val oldWord = word(i)
+ currentWord = oldWord & ~bs.word(i)
+ anyChanges ||= currentWord != oldWord
+ i -= 1
+ }
+ if (anyChanges) {
+ val newArray = elems.take(minimumNonZeroIndex + 1)
+ newArray(i + 1) = currentWord
+ while (i >= 0) {
+ newArray(i) = word(i) & ~bs.word(i)
+ i -= 1
+ }
+ new BitSetN(newArray)
+ } else {
+ this
+ }
+ }
+ } else {
+ var i = bsnwords - 1
+ var anyChanges = false
+ var currentWord = 0L
+ while (i >= 0 && !anyChanges) {
+ val oldWord = word(i)
+ currentWord = oldWord & ~bs.word(i)
+ anyChanges ||= currentWord != oldWord
+ i -= 1
+ }
+ if (anyChanges) {
+ val newElems = elems.clone()
+ newElems(i + 1) = currentWord
+ while (i >= 0) {
+ newElems(i) = word(i) & ~bs.word(i)
+ i -= 1
+ }
+ this.fromBitMaskNoCopy(newElems)
+ } else {
+ this
+ }
+ }
+ case _ => super.diff(that)
+ }
+
+
+ override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = {
+ // here, we may have opportunity to shrink the size of the array
+ // so, track the highest index which is non-zero. That ( + 1 ) will be our new array length
+ var i = nwords - 1
+ var currentWord = 0L
+ // if there are never any changes, we can return `this` at the end
+ var anyChanges = false
+ while (i >= 0 && currentWord == 0L) {
+ val oldWord = word(i)
+ currentWord = BitSetOps.computeWordForFilter(pred, isFlipped, oldWord, i)
+ anyChanges ||= currentWord != oldWord
+ i -= 1
+ }
+ i match {
+ case -1 =>
+ if (anyChanges) {
+ if (currentWord == 0) {
+ this.empty
+ } else {
+ new BitSet1(currentWord)
+ }
+ } else {
+ this
+ }
+ case 0 =>
+ val oldFirstWord = word(0)
+ val firstWord = BitSetOps.computeWordForFilter(pred, isFlipped, oldFirstWord, 0)
+ anyChanges ||= firstWord != oldFirstWord
+ if (anyChanges) {
+ new BitSet2(firstWord, currentWord)
+ } else {
+ this
+ }
+ case _ =>
+ val minimumNonZeroIndex: Int = i + 1
+ while (!anyChanges && i >= 0) {
+ val oldWord = word(i)
+ currentWord = BitSetOps.computeWordForFilter(pred, isFlipped, oldWord, i)
+ anyChanges ||= currentWord != oldWord
+ i -= 1
+ }
+ if (anyChanges) {
+ val newArray = elems.take(minimumNonZeroIndex + 1)
+ newArray(i + 1) = currentWord
+ while (i >= 0) {
+ newArray(i) = BitSetOps.computeWordForFilter(pred, isFlipped, word(i), i)
+ i -= 1
+ }
+ new BitSetN(newArray)
+ } else {
+ this
+ }
+ }
+ }
+
+ override def toBitMask: Array[Long] = elems.clone()
+ }
+
+ @SerialVersionUID(3L)
+ private final class SerializationProxy(coll: BitSet) extends scala.collection.BitSet.SerializationProxy(coll) {
+ protected[this] def readResolve(): Any = BitSet.fromBitMaskNoCopy(elems)
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala b/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala
new file mode 100644
index 000000000000..fc9bcb022874
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/immutable/ChampCommon.scala
@@ -0,0 +1,253 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection.immutable
+
+
+import java.lang.Integer.bitCount
+import java.lang.Math.ceil
+import java.lang.System.arraycopy
+import language.experimental.captureChecking
+
+private[collection] object Node {
+ final val HashCodeLength = 32
+
+ final val BitPartitionSize = 5
+
+ final val BitPartitionMask = (1 << BitPartitionSize) - 1
+
+ final val MaxDepth = ceil(HashCodeLength.toDouble / BitPartitionSize).toInt
+
+ final val BranchingFactor = 1 << BitPartitionSize
+
+ final def maskFrom(hash: Int, shift: Int): Int = (hash >>> shift) & BitPartitionMask
+
+ final def bitposFrom(mask: Int): Int = 1 << mask
+
+ final def indexFrom(bitmap: Int, bitpos: Int): Int = bitCount(bitmap & (bitpos - 1))
+
+ final def indexFrom(bitmap: Int, mask: Int, bitpos: Int): Int = if (bitmap == -1) mask else indexFrom(bitmap, bitpos)
+
+}
+
+private[collection] abstract class Node[T <: Node[T]] {
+
+ def hasNodes: Boolean
+
+ def nodeArity: Int
+
+ def getNode(index: Int): T
+
+ def hasPayload: Boolean
+
+ def payloadArity: Int
+
+ def getPayload(index: Int): Any
+
+ def getHash(index: Int): Int
+
+ def cachedJavaKeySetHashCode: Int
+
+ private final def arrayIndexOutOfBounds(as: Array[_], ix:Int): ArrayIndexOutOfBoundsException =
+ new ArrayIndexOutOfBoundsException(s"$ix is out of bounds (min 0, max ${as.length-1}")
+
+ protected final def removeElement(as: Array[Int], ix: Int): Array[Int] = {
+ if (ix < 0) throw arrayIndexOutOfBounds(as, ix)
+ if (ix > as.length - 1) throw arrayIndexOutOfBounds(as, ix)
+ val result = new Array[Int](as.length - 1)
+ arraycopy(as, 0, result, 0, ix)
+ arraycopy(as, ix + 1, result, ix, as.length - ix - 1)
+ result
+ }
+
+ protected final def removeAnyElement(as: Array[Any], ix: Int): Array[Any] = {
+ if (ix < 0) throw arrayIndexOutOfBounds(as, ix)
+ if (ix > as.length - 1) throw arrayIndexOutOfBounds(as, ix)
+ val result = new Array[Any](as.length - 1)
+ arraycopy(as, 0, result, 0, ix)
+ arraycopy(as, ix + 1, result, ix, as.length - ix - 1)
+ result
+ }
+
+ protected final def insertElement(as: Array[Int], ix: Int, elem: Int): Array[Int] = {
+ if (ix < 0) throw arrayIndexOutOfBounds(as, ix)
+ if (ix > as.length) throw arrayIndexOutOfBounds(as, ix)
+ val result = new Array[Int](as.length + 1)
+ arraycopy(as, 0, result, 0, ix)
+ result(ix) = elem
+ arraycopy(as, ix, result, ix + 1, as.length - ix)
+ result
+ }
+ protected final def insertAnyElement(as: Array[Any], ix: Int, elem: Int): Array[Any] = {
+ if (ix < 0) throw arrayIndexOutOfBounds(as, ix)
+ if (ix > as.length) throw arrayIndexOutOfBounds(as, ix)
+ val result = new Array[Any](as.length + 1)
+ arraycopy(as, 0, result, 0, ix)
+ result(ix) = elem
+ arraycopy(as, ix, result, ix + 1, as.length - ix)
+ result
+ }
+}
+
+/**
+ * Base class for fixed-stack iterators that traverse a hash-trie. The iterator performs a
+ * depth-first pre-order traversal, which yields first all payload elements of the current
+ * node before traversing sub-nodes (left to right).
+ *
+ * @tparam T the trie node type we are iterating over
+ */
+private[immutable] abstract class ChampBaseIterator[T <: Node[T]] {
+
+ import Node.MaxDepth
+
+ // Note--this code is duplicated to a large extent both in
+ // ChampBaseReverseIterator and in convert.impl.ChampStepperBase.
+ // If you change this code, check those also in case they also
+ // need to be modified.
+
+ protected var currentValueCursor: Int = 0
+ protected var currentValueLength: Int = 0
+ protected var currentValueNode: T = _
+
+ private[this] var currentStackLevel: Int = -1
+ private[this] var nodeCursorsAndLengths: Array[Int] = _
+ private[this] var nodes: Array[T] = _
+ private def initNodes(): Unit = {
+ if (nodeCursorsAndLengths eq null) {
+ nodeCursorsAndLengths = new Array[Int](MaxDepth * 2)
+ nodes = new Array[Node[T]](MaxDepth).asInstanceOf[Array[T]]
+ }
+ }
+
+ def this(rootNode: T) = {
+ this()
+ if (rootNode.hasNodes) pushNode(rootNode)
+ if (rootNode.hasPayload) setupPayloadNode(rootNode)
+ }
+
+ private final def setupPayloadNode(node: T): Unit = {
+ currentValueNode = node
+ currentValueCursor = 0
+ currentValueLength = node.payloadArity
+ }
+
+ private final def pushNode(node: T): Unit = {
+ initNodes()
+ currentStackLevel = currentStackLevel + 1
+
+ val cursorIndex = currentStackLevel * 2
+ val lengthIndex = currentStackLevel * 2 + 1
+
+ nodes(currentStackLevel) = node
+ nodeCursorsAndLengths(cursorIndex) = 0
+ nodeCursorsAndLengths(lengthIndex) = node.nodeArity
+ }
+
+ private final def popNode(): Unit = {
+ currentStackLevel = currentStackLevel - 1
+ }
+
+ /**
+ * Searches for next node that contains payload values,
+ * and pushes encountered sub-nodes on a stack for depth-first traversal.
+ */
+ private final def searchNextValueNode(): Boolean = {
+ while (currentStackLevel >= 0) {
+ val cursorIndex = currentStackLevel * 2
+ val lengthIndex = currentStackLevel * 2 + 1
+
+ val nodeCursor = nodeCursorsAndLengths(cursorIndex)
+ val nodeLength = nodeCursorsAndLengths(lengthIndex)
+
+ if (nodeCursor < nodeLength) {
+ nodeCursorsAndLengths(cursorIndex) += 1
+
+ val nextNode = nodes(currentStackLevel).getNode(nodeCursor)
+
+ if (nextNode.hasNodes) { pushNode(nextNode) }
+ if (nextNode.hasPayload) { setupPayloadNode(nextNode) ; return true }
+ } else {
+ popNode()
+ }
+ }
+
+ return false
+ }
+
+ final def hasNext = (currentValueCursor < currentValueLength) || searchNextValueNode()
+
+}
+
+/**
+ * Base class for fixed-stack iterators that traverse a hash-trie in reverse order. The base
+ * iterator performs a depth-first post-order traversal, traversing sub-nodes (right to left).
+ *
+ * @tparam T the trie node type we are iterating over
+ */
+private[immutable] abstract class ChampBaseReverseIterator[T <: Node[T]] {
+
+ import Node.MaxDepth
+
+ protected var currentValueCursor: Int = -1
+ protected var currentValueNode: T = _
+
+ private[this] var currentStackLevel: Int = -1
+ private[this] val nodeIndex: Array[Int] = new Array[Int](MaxDepth + 1)
+ private[this] val nodeStack: Array[T] = new Array[Node[T]](MaxDepth + 1).asInstanceOf[Array[T]]
+
+ def this(rootNode: T) = {
+ this()
+ pushNode(rootNode)
+ searchNextValueNode()
+ }
+
+ private final def setupPayloadNode(node: T): Unit = {
+ currentValueNode = node
+ currentValueCursor = node.payloadArity - 1
+ }
+
+ private final def pushNode(node: T): Unit = {
+ currentStackLevel = currentStackLevel + 1
+
+ nodeStack(currentStackLevel) = node
+ nodeIndex(currentStackLevel) = node.nodeArity - 1
+ }
+
+ private final def popNode(): Unit = {
+ currentStackLevel = currentStackLevel - 1
+ }
+
+ /**
+ * Searches for rightmost node that contains payload values,
+ * and pushes encountered sub-nodes on a stack for depth-first traversal.
+ */
+ private final def searchNextValueNode(): Boolean = {
+ while (currentStackLevel >= 0) {
+ val nodeCursor = nodeIndex(currentStackLevel) ; nodeIndex(currentStackLevel) = nodeCursor - 1
+
+ if (nodeCursor >= 0) {
+ val nextNode = nodeStack(currentStackLevel).getNode(nodeCursor)
+ pushNode(nextNode)
+ } else {
+ val currNode = nodeStack(currentStackLevel)
+ popNode()
+
+ if (currNode.hasPayload) { setupPayloadNode(currNode) ; return true }
+ }
+ }
+
+ return false
+ }
+
+ final def hasNext = (currentValueCursor >= 0) || searchNextValueNode()
+
+}
diff --git a/tests/pos-special/stdlib/collection/immutable/HashMap.scala b/tests/pos-special/stdlib/collection/immutable/HashMap.scala
new file mode 100644
index 000000000000..c364924db3a3
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/immutable/HashMap.scala
@@ -0,0 +1,2425 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection.immutable
+
+import java.lang.Integer.bitCount
+import java.lang.System.arraycopy
+
+import scala.annotation.unchecked.{uncheckedVariance => uV}
+import scala.collection.Hashing.improve
+import scala.collection.Stepper.EfficientSplit
+import scala.collection.generic.DefaultSerializable
+import scala.collection.mutable, mutable.ReusableBuilder
+import scala.collection.{Iterator, MapFactory, MapFactoryDefaults, Stepper, StepperShape, mutable}
+import scala.runtime.AbstractFunction2
+import scala.runtime.Statics.releaseFence
+import scala.util.hashing.MurmurHash3
+import language.experimental.captureChecking
+import scala.annotation.unchecked.uncheckedCaptures
+
+/** This class implements immutable maps using a Compressed Hash-Array Mapped Prefix-tree.
+ * See paper https://michael.steindorfer.name/publications/oopsla15.pdf for more details.
+ *
+ * @tparam K the type of the keys contained in this hash set.
+ * @tparam V the type of the values associated with the keys in this hash map.
+ *
+ * @define Coll `immutable.HashMap`
+ * @define coll immutable champ hash map
+ */
+
+final class HashMap[K, +V] private[immutable] (private[immutable] val rootNode: BitmapIndexedMapNode[K, V])
+ extends AbstractMap[K, V]
+ with StrictOptimizedMapOps[K, V, HashMap, HashMap[K, V]]
+ with MapFactoryDefaults[K, V, HashMap, Iterable]
+ with DefaultSerializable {
+
+ def this() = this(MapNode.empty)
+
+ // This release fence is present because rootNode may have previously been mutated during construction.
+ releaseFence()
+
+ override def mapFactory: MapFactory[HashMap] = HashMap
+
+ override def knownSize: Int = rootNode.size
+
+ override def size: Int = rootNode.size
+
+ override def isEmpty: Boolean = rootNode.size == 0
+
+ override def keySet: Set[K] = if (size == 0) Set.empty else new HashKeySet
+
+ private final class HashKeySet extends ImmutableKeySet {
+
+ private[this] def newKeySetOrThis(newHashMap: HashMap[K, _]): Set[K] =
+ if (newHashMap eq HashMap.this) this else newHashMap.keySet
+ private[this] def newKeySetOrThis(newRootNode: BitmapIndexedMapNode[K, _]): Set[K] =
+ if (newRootNode eq rootNode) this else new HashMap(newRootNode).keySet
+
+ override def incl(elem: K): Set[K] = {
+ val originalHash = elem.##
+ val improvedHash = improve(originalHash)
+ val newNode = rootNode.updated(elem, null.asInstanceOf[V], originalHash, improvedHash, 0, replaceValue = false)
+ newKeySetOrThis(newNode)
+ }
+ override def excl(elem: K): Set[K] = newKeySetOrThis(HashMap.this - elem)
+ override def filter(pred: K => Boolean): Set[K] = newKeySetOrThis(HashMap.this.filter(kv => pred(kv._1)))
+ override def filterNot(pred: K => Boolean): Set[K] = newKeySetOrThis(HashMap.this.filterNot(kv => pred(kv._1)))
+ }
+
+ def iterator: Iterator[(K, V)] = {
+ if (isEmpty) Iterator.empty
+ else new MapKeyValueTupleIterator[K, V](rootNode)
+ }
+
+ override def keysIterator: Iterator[K] = {
+ if (isEmpty) Iterator.empty
+ else new MapKeyIterator[K, V](rootNode)
+ }
+ override def valuesIterator: Iterator[V] = {
+ if (isEmpty) Iterator.empty
+ else new MapValueIterator[K, V](rootNode)
+ }
+
+ protected[immutable] def reverseIterator: Iterator[(K, V)] = {
+ if (isEmpty) Iterator.empty
+ else new MapKeyValueTupleReverseIterator[K, V](rootNode)
+ }
+
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit =
+ shape.
+ parUnbox(collection.convert.impl.AnyChampStepper.from[(K, V), MapNode[K, V]](size, rootNode, (node, i) => node.getPayload(i)))
+
+ override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = {
+ import collection.convert.impl._
+ val s = shape.shape match {
+ case StepperShape.IntShape => IntChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i).asInstanceOf[Int])
+ case StepperShape.LongShape => LongChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i).asInstanceOf[Long])
+ case StepperShape.DoubleShape => DoubleChampStepper.from[MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i).asInstanceOf[Double])
+ case _ => shape.parUnbox(AnyChampStepper.from[K, MapNode[K, V]](size, rootNode, (node, i) => node.getKey(i)))
+ }
+ s.asInstanceOf[S with EfficientSplit]
+ }
+
+ override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = {
+ import collection.convert.impl._
+ val s = shape.shape match {
+ case StepperShape.IntShape => IntChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i).asInstanceOf[Int])
+ case StepperShape.LongShape => LongChampStepper.from[ MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i).asInstanceOf[Long])
+ case StepperShape.DoubleShape => DoubleChampStepper.from[MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i).asInstanceOf[Double])
+ case _ => shape.parUnbox(AnyChampStepper.from[V, MapNode[K, V]](size, rootNode, (node, i) => node.getValue(i)))
+ }
+ s.asInstanceOf[S with EfficientSplit]
+ }
+
+ override final def contains(key: K): Boolean = {
+ val keyUnimprovedHash = key.##
+ val keyHash = improve(keyUnimprovedHash)
+ rootNode.containsKey(key, keyUnimprovedHash, keyHash, 0)
+ }
+
+ override def apply(key: K): V = {
+ val keyUnimprovedHash = key.##
+ val keyHash = improve(keyUnimprovedHash)
+ rootNode.apply(key, keyUnimprovedHash, keyHash, 0)
+ }
+
+ def get(key: K): Option[V] = {
+ val keyUnimprovedHash = key.##
+ val keyHash = improve(keyUnimprovedHash)
+ rootNode.get(key, keyUnimprovedHash, keyHash, 0)
+ }
+
+ override def getOrElse[V1 >: V](key: K, default: => V1): V1 = {
+ val keyUnimprovedHash = key.##
+ val keyHash = improve(keyUnimprovedHash)
+ rootNode.getOrElse(key, keyUnimprovedHash, keyHash, 0, default)
+ }
+
+ @`inline` private[this] def newHashMapOrThis[V1 >: V](newRootNode: BitmapIndexedMapNode[K, V1]): HashMap[K, V1] =
+ if (newRootNode eq rootNode) this else new HashMap(newRootNode)
+
+ def updated[V1 >: V](key: K, value: V1): HashMap[K, V1] = {
+ val keyUnimprovedHash = key.##
+ newHashMapOrThis(rootNode.updated(key, value, keyUnimprovedHash, improve(keyUnimprovedHash), 0, replaceValue = true))
+ }
+
+ // preemptively overridden in anticipation of performance optimizations
+ override def updatedWith[V1 >: V](key: K)(remappingFunction: Option[V] => Option[V1]): HashMap[K, V1] =
+ super.updatedWith[V1](key)(remappingFunction)
+
+ def removed(key: K): HashMap[K, V] = {
+ val keyUnimprovedHash = key.##
+ newHashMapOrThis(rootNode.removed(key, keyUnimprovedHash, improve(keyUnimprovedHash), 0))
+ }
+
+ override def concat[V1 >: V](that: scala.IterableOnce[(K, V1)]^): HashMap[K, V1] = that match {
+ case hm: HashMap[K, V1] =>
+ if (isEmpty) hm
+ else {
+ val newNode = rootNode.concat(hm.rootNode, 0)
+ if (newNode eq hm.rootNode) hm
+ else newHashMapOrThis(rootNode.concat(hm.rootNode, 0))
+ }
+ case hm: mutable.HashMap[K @unchecked, V @unchecked] =>
+ val iter = hm.nodeIterator
+ var current = rootNode
+ while (iter.hasNext) {
+ val next = iter.next()
+ val originalHash = hm.unimproveHash(next.hash)
+ val improved = improve(originalHash)
+ current = current.updated(next.key, next.value, originalHash, improved, 0, replaceValue = true)
+
+ if (current ne rootNode) {
+ var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0))
+
+ while (iter.hasNext) {
+ val next = iter.next()
+ val originalHash = hm.unimproveHash(next.hash)
+ shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, next.value, originalHash, improve(originalHash), 0, shallowlyMutableNodeMap)
+ }
+ return new HashMap(current)
+ }
+ }
+ this
+ case lhm: mutable.LinkedHashMap[K @unchecked, V @unchecked] =>
+ val iter = lhm.entryIterator
+ var current = rootNode
+ while (iter.hasNext) {
+ val next = iter.next()
+ val originalHash = lhm.unimproveHash(next.hash)
+ val improved = improve(originalHash)
+ current = current.updated(next.key, next.value, originalHash, improved, 0, replaceValue = true)
+
+ if (current ne rootNode) {
+ var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0))
+
+ while (iter.hasNext) {
+ val next = iter.next()
+ val originalHash = lhm.unimproveHash(next.hash)
+ shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, next.value, originalHash, improve(originalHash), 0, shallowlyMutableNodeMap)
+ }
+ return new HashMap(current)
+ }
+ }
+ this
+ case _ =>
+ class accum extends AbstractFunction2[K, V1, Unit] with Function1[(K, V1), Unit] {
+ var changed = false
+ var shallowlyMutableNodeMap: Int = 0
+ var current: BitmapIndexedMapNode[K, V1] = rootNode
+ def apply(kv: (K, V1)) = apply(kv._1, kv._2)
+ def apply(key: K, value: V1): Unit = {
+ val originalHash = key.##
+ val improved = improve(originalHash)
+ if (!changed) {
+ current = current.updated(key, value, originalHash, improved, 0, replaceValue = true)
+ if (current ne rootNode) {
+ // Note: We could have started with shallowlyMutableNodeMap = 0, however this way, in the case that
+ // the first changed key ended up in a subnode beneath root, we mark that root right away as being
+ // shallowly mutable.
+ //
+ // since key->value has just been inserted, and certainly caused a new root node to be created, we can say with
+ // certainty that it either caused a new subnode to be created underneath `current`, in which case we should
+ // carry on mutating that subnode, or it ended up as a child data pair of the root, in which case, no harm is
+ // done by including its bit position in the shallowlyMutableNodeMap anyways.
+ changed = true
+ shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0))
+ }
+ } else {
+ shallowlyMutableNodeMap = current.updateWithShallowMutations(key, value, originalHash, improved, 0, shallowlyMutableNodeMap)
+ }
+ }
+ }
+ that match {
+ case thatMap: Map[K, V1] =>
+ if (thatMap.isEmpty) this
+ else {
+ val accum = new accum
+ thatMap.foreachEntry(accum)
+ newHashMapOrThis(accum.current)
+ }
+ case _ =>
+ val it = that.iterator
+ if (it.isEmpty) this
+ else {
+ val accum = new accum
+ it.foreach(accum)
+ newHashMapOrThis(accum.current)
+ }
+ }
+ }
+
+ override def tail: HashMap[K, V] = this - head._1
+
+ override def init: HashMap[K, V] = this - last._1
+
+ override def head: (K, V) = iterator.next()
+
+ override def last: (K, V) = reverseIterator.next()
+
+ override def foreach[U](f: ((K, V)) => U): Unit = rootNode.foreach(f)
+
+ override def foreachEntry[U](f: (K, V) => U): Unit = rootNode.foreachEntry(f)
+
+ /** Applies a function to each key, value, and **original** hash value in this Map */
+ @`inline` private[collection] def foreachWithHash(f: (K, V, Int) => Unit): Unit = rootNode.foreachWithHash(f)
+
+ override def equals(that: Any): Boolean =
+ that match {
+ case map: HashMap[_, _] => (this eq map) || (this.rootNode == map.rootNode)
+ case _ => super.equals(that)
+ }
+
+ override def hashCode(): Int = {
+ if (isEmpty) MurmurHash3.emptyMapHash
+ else {
+ // Optimized to avoid recomputation of key hashcodes as these are cached in the nodes and can be assumed to be
+ // immutable.
+ val hashIterator = new MapKeyValueTupleHashIterator(rootNode)
+ val hash = MurmurHash3.unorderedHash(hashIterator, MurmurHash3.mapSeed)
+ // assert(hash == super.hashCode())
+ hash
+ }
+ }
+
+ override protected[this] def className = "HashMap"
+
+ /** Merges this HashMap with an other HashMap by combining all key-value pairs of both maps, and delegating to a merge
+ * function to resolve any key collisions between the two HashMaps.
+ *
+ * @example {{{
+ * val left = HashMap(1 -> 1, 2 -> 1)
+ * val right = HashMap(2 -> 2, 3 -> 2)
+ *
+ * val merged = left.merged(right){ case ((k0, v0), (k1, v1)) => (k0 + k1) -> (v0 + v1) }
+ * // HashMap(1 -> 1, 3 -> 2, 4 -> 3)
+ *
+ * }}}
+ *
+ * @param that the HashMap to merge this HashMap with
+ * @param mergef the merge function which resolves collisions between the two HashMaps. If `mergef` is null, then
+ * keys from `this` will overwrite keys from `that`, making the behaviour equivalent to
+ * `that.concat(this)`
+ *
+ * @note In cases where `mergef` returns keys which themselves collide with other keys returned by `merge`, or
+ * found in `this` or `that`, it is not defined which value will be chosen. For example:
+ *
+ * Colliding multiple results of merging:
+ * {{{
+ * // key `3` collides between a result of merging keys `1` and `2`
+ * val left = HashMap(1 -> 1, 2 -> 2)
+ * val right = HashMap(1 -> 1, 2 -> 2)
+ *
+ * val merged = left.merged(right){ case (_, (_, v1)) => 3 -> v1 }
+ * // HashMap(3 -> 2) is returned, but it could also have returned HashMap(3 -> 1)
+ * }}}
+ * Colliding results of merging with other keys:
+ * {{{
+ * // key `2` collides between a result of merging `1`, and existing key `2`
+ * val left = HashMap(1 -> 1, 2 -> 1)
+ * val right = HashMap(1 -> 2)
+ *
+ * val merged = left.merged(right)((_,_) => 2 -> 3)
+ * // HashMap(2 -> 1) is returned, but it could also have returned HashMap(2 -> 3)
+ * }}}
+ *
+ */
+ def merged[V1 >: V](that: HashMap[K, V1])(mergef: ((K, V), (K, V1)) => (K, V1)): HashMap[K, V1] =
+ if (mergef == null) {
+ that ++ this
+ } else {
+ if (isEmpty) that
+ else if (that.isEmpty) this
+ else if (size == 1) {
+ val payload@(k, v) = rootNode.getPayload(0)
+ val originalHash = rootNode.getHash(0)
+ val improved = improve(originalHash)
+
+ if (that.rootNode.containsKey(k, originalHash, improved, 0)) {
+ val thatPayload = that.rootNode.getTuple(k, originalHash, improved, 0)
+ val (mergedK, mergedV) = mergef(payload, thatPayload)
+ val mergedOriginalHash = mergedK.##
+ val mergedImprovedHash = improve(mergedOriginalHash)
+ new HashMap(that.rootNode.removed(thatPayload._1, originalHash, improved, 0).updated(mergedK, mergedV, mergedOriginalHash, mergedImprovedHash, 0, replaceValue = true))
+ } else {
+ new HashMap(that.rootNode.updated(k, v, originalHash, improved, 0, replaceValue = true))
+ }
+ } else if (that.size == 0) {
+ val thatPayload@(k, v) = rootNode.getPayload(0)
+ val thatOriginalHash = rootNode.getHash(0)
+ val thatImproved = improve(thatOriginalHash)
+
+ if (rootNode.containsKey(k, thatOriginalHash, thatImproved, 0)) {
+ val payload = rootNode.getTuple(k, thatOriginalHash, thatImproved, 0)
+ val (mergedK, mergedV) = mergef(payload, thatPayload)
+ val mergedOriginalHash = mergedK.##
+ val mergedImprovedHash = improve(mergedOriginalHash)
+ new HashMap(rootNode.updated(mergedK, mergedV, mergedOriginalHash, mergedImprovedHash, 0, replaceValue = true))
+ } else {
+ new HashMap(rootNode.updated(k, v, thatOriginalHash, thatImproved, 0, replaceValue = true))
+ }
+ } else {
+ val builder = new HashMapBuilder[K, V1]
+ rootNode.mergeInto(that.rootNode, builder, 0)(mergef)
+ builder.result()
+ }
+ }
+
+ override def transform[W](f: (K, V) => W): HashMap[K, W] =
+ newHashMapOrThis(rootNode.transform[Any](f)).asInstanceOf[HashMap[K, W]]
+
+ override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): HashMap[K, V] = {
+ val newRootNode = rootNode.filterImpl(pred, isFlipped)
+ if (newRootNode eq rootNode) this
+ else if (newRootNode.size == 0) HashMap.empty
+ else new HashMap(newRootNode)
+ }
+
+ override def removedAll(keys: IterableOnce[K]^): HashMap[K, V] = {
+ if (isEmpty) {
+ this
+ } else {
+ keys match {
+ case hashSet: HashSet[K] =>
+ if (hashSet.isEmpty) {
+ this
+ } else {
+ // TODO: Remove all keys from the hashSet in a sub-linear fashion by only visiting the nodes in the tree
+ // This can be a direct port of the implementation of `SetNode[A]#diff(SetNode[A])`
+ val newRootNode = new MapNodeRemoveAllSetNodeIterator(hashSet.rootNode).removeAll(rootNode)
+ if (newRootNode eq rootNode) this
+ else if (newRootNode.size <= 0) HashMap.empty
+ else new HashMap(newRootNode)
+ }
+ case hashSet: collection.mutable.HashSet[K] =>
+ if (hashSet.isEmpty) {
+ this
+ } else {
+ val iter = hashSet.nodeIterator
+ var curr = rootNode
+
+ while (iter.hasNext) {
+ val next = iter.next()
+ val originalHash = hashSet.unimproveHash(next.hash)
+ val improved = improve(originalHash)
+ curr = curr.removed(next.key, originalHash, improved, 0)
+ if (curr.size == 0) {
+ return HashMap.empty
+ }
+ }
+ newHashMapOrThis(curr)
+ }
+ case lhashSet: collection.mutable.LinkedHashSet[K] =>
+ if (lhashSet.isEmpty) {
+ this
+ } else {
+ val iter = lhashSet.entryIterator
+ var curr = rootNode
+
+ while (iter.hasNext) {
+ val next = iter.next()
+ val originalHash = lhashSet.unimproveHash(next.hash)
+ val improved = improve(originalHash)
+ curr = curr.removed(next.key, originalHash, improved, 0)
+ if (curr.size == 0) {
+ return HashMap.empty
+ }
+ }
+ newHashMapOrThis(curr)
+ }
+ case _ =>
+ val iter = keys.iterator
+ var curr = rootNode
+ while (iter.hasNext) {
+ val next = iter.next()
+ val originalHash = next.##
+ val improved = improve(originalHash)
+ curr = curr.removed(next, originalHash, improved, 0)
+ if (curr.size == 0) {
+ return HashMap.empty
+ }
+ }
+ newHashMapOrThis(curr)
+ }
+ }
+ }
+
+ override def partition(p: ((K, V)) => Boolean): (HashMap[K, V], HashMap[K, V]) = {
+ // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included
+ // in a minor release without breaking binary compatibility.
+ //
+ // In particular, `partition` could be optimized to traverse the trie node-by-node, splitting each node into two,
+ // based on the result of applying `p` to its elements and subnodes.
+ super.partition(p)
+ }
+
+ override def take(n: Int): HashMap[K, V] = {
+ // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included
+ // in a minor release without breaking binary compatibility.
+ //
+ // In particular, `take` could be optimized to construct a new trie structure by visiting each node, and including
+ // those nodes in the resulting trie, until `n` total elements have been included.
+ super.take(n)
+ }
+
+ override def takeRight(n: Int): HashMap[K, V] = {
+ // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included
+ // in a minor release without breaking binary compatibility.
+ //
+ // In particular, `take` could be optimized to construct a new trie structure by visiting each node in reverse, and
+ // and including those nodes in the resulting trie, until `n` total elements have been included.
+ super.takeRight(n)
+ }
+
+ override def takeWhile(p: ((K, V)) => Boolean): HashMap[K, V] = {
+ // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included
+ // in a minor release without breaking binary compatibility.
+ //
+ // In particular, `takeWhile` could be optimized to construct a new trie structure by visiting each node, and
+ // including those nodes in the resulting trie, until `p` returns `false`
+ super.takeWhile(p)
+ }
+
+ override def dropWhile(p: ((K, V)) => Boolean): HashMap[K, V] = {
+ // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included
+ // in a minor release without breaking binary compatibility.
+ //
+ // In particular, `dropWhile` could be optimized to construct a new trie structure by visiting each node, and
+ // dropping those nodes in the resulting trie, until `p` returns `true`
+ super.dropWhile(p)
+ }
+
+ override def dropRight(n: Int): HashMap[K, V] = {
+ // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included
+ // in a minor release without breaking binary compatibility.
+ //
+ // In particular, `dropRight` could be optimized to construct a new trie structure by visiting each node, in reverse
+ // order, and dropping all nodes until `n` elements have been dropped
+ super.dropRight(n)
+ }
+
+ override def drop(n: Int): HashMap[K, V] = {
+ // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included
+ // in a minor release without breaking binary compatibility.
+ //
+ // In particular, `dropRight` could be optimized to construct a new trie structure by visiting each node, and
+ // dropping all nodes until `n` elements have been dropped
+ super.drop(n)
+ }
+
+ override def span(p: ((K, V)) => Boolean): (HashMap[K, V], HashMap[K, V]) = {
+ // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included
+ // in a minor release without breaking binary compatibility.
+ //
+ // In particular, `scan` could be optimized to construct a new trie structure by visiting each node, and
+ // keeping each node and element until `p` returns false, then including the remaining nodes in the second result.
+ // This would avoid having to rebuild most of the trie, and would eliminate the need to perform hashing and equality
+ // checks.
+ super.span(p)
+ }
+
+}
+
+private[immutable] object MapNode {
+
+ private final val EmptyMapNode = new BitmapIndexedMapNode(0, 0, Array.empty, Array.empty, 0, 0)
+
+ def empty[K, V]: BitmapIndexedMapNode[K, V] = EmptyMapNode.asInstanceOf[BitmapIndexedMapNode[K, V]]
+
+ final val TupleLength = 2
+
+}
+
+
+private[immutable] sealed abstract class MapNode[K, +V] extends Node[MapNode[K, V @uV]] {
+ def apply(key: K, originalHash: Int, hash: Int, shift: Int): V
+
+ def get(key: K, originalHash: Int, hash: Int, shift: Int): Option[V]
+
+ def getOrElse[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int, f: => V1): V1
+
+ def containsKey(key: K, originalHash: Int, hash: Int, shift: Int): Boolean
+
+ /** Returns a MapNode with the passed key-value assignment added
+ *
+ * @param key the key to add to the MapNode
+ * @param value the value to associate with `key`
+ * @param originalHash the original hash of `key`
+ * @param hash the improved hash of `key`
+ * @param shift the shift of the node (distanceFromRoot * BitPartitionSize)
+ * @param replaceValue if true, then the value currently associated to `key` will be replaced with the passed value
+ * argument.
+ * if false, then the key will be inserted if not already present, however if the key is present
+ * then the passed value will not replace the current value. That is, if `false`, then this
+ * method has `update if not exists` semantics.
+ */
+ def updated[V1 >: V](key: K, value: V1, originalHash: Int, hash: Int, shift: Int, replaceValue: Boolean): MapNode[K, V1]
+
+ def removed[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int): MapNode[K, V1]
+
+ def hasNodes: Boolean
+
+ def nodeArity: Int
+
+ def getNode(index: Int): MapNode[K, V]
+
+ def hasPayload: Boolean
+
+ def payloadArity: Int
+
+ def getKey(index: Int): K
+
+ def getValue(index: Int): V
+
+ def getPayload(index: Int): (K, V)
+
+ def size: Int
+
+ def foreach[U](f: ((K, V)) => U): Unit
+
+ def foreachEntry[U](f: (K, V) => U): Unit
+
+ def foreachWithHash(f: (K, V, Int) => Unit): Unit
+
+ def transform[W](f: (K, V) => W): MapNode[K, W]
+
+ def copy(): MapNode[K, V]
+
+ def concat[V1 >: V](that: MapNode[K, V1], shift: Int): MapNode[K, V1]
+
+ def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): MapNode[K, V]
+
+ /** Merges this node with that node, adding each resulting tuple to `builder`
+ *
+ * `this` should be a node from `left` hashmap in `left.merged(right)(mergef)`
+ *
+ * @param that node from the "right" HashMap. Must also be at the same "path" or "position" within the right tree,
+ * as `this` is, within the left tree
+ */
+ def mergeInto[V1 >: V](that: MapNode[K, V1], builder: HashMapBuilder[K, V1], shift: Int)(mergef: ((K, V), (K, V1)) => (K, V1)): Unit
+
+ /** Returns the exact (equal by reference) key, and value, associated to a given key.
+ * If the key is not bound to a value, then an exception is thrown
+ */
+ def getTuple(key: K, originalHash: Int, hash: Int, shift: Int): (K, V)
+
+ /** Adds all key-value pairs to a builder */
+ def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): Unit
+}
+
+private final class BitmapIndexedMapNode[K, +V](
+ var dataMap: Int,
+ var nodeMap: Int,
+ var content: Array[Any],
+ var originalHashes: Array[Int],
+ var size: Int,
+ var cachedJavaKeySetHashCode: Int) extends MapNode[K, V] {
+
+ releaseFence()
+
+ import MapNode._
+ import Node._
+
+ /*
+ assert(checkInvariantContentIsWellTyped())
+ assert(checkInvariantSubNodesAreCompacted())
+
+ private final def checkInvariantSubNodesAreCompacted(): Boolean =
+ new MapKeyValueTupleIterator[K, V](this).size - payloadArity >= 2 * nodeArity
+
+ private final def checkInvariantContentIsWellTyped(): Boolean = {
+ val predicate1 = TupleLength * payloadArity + nodeArity == content.length
+
+ val predicate2 = Range(0, TupleLength * payloadArity)
+ .forall(i => content(i).isInstanceOf[MapNode[_, _]] == false)
+
+ val predicate3 = Range(TupleLength * payloadArity, content.length)
+ .forall(i => content(i).isInstanceOf[MapNode[_, _]] == true)
+
+ predicate1 && predicate2 && predicate3
+ }
+ */
+
+ def getKey(index: Int): K = content(TupleLength * index).asInstanceOf[K]
+ def getValue(index: Int): V = content(TupleLength * index + 1).asInstanceOf[V]
+
+ def getPayload(index: Int) = Tuple2(
+ content(TupleLength * index).asInstanceOf[K],
+ content(TupleLength * index + 1).asInstanceOf[V])
+
+ override def getHash(index: Int): Int = originalHashes(index)
+
+ def getNode(index: Int): MapNode[K, V] =
+ content(content.length - 1 - index).asInstanceOf[MapNode[K, V]]
+
+ def apply(key: K, originalHash: Int, keyHash: Int, shift: Int): V = {
+ val mask = maskFrom(keyHash, shift)
+ val bitpos = bitposFrom(mask)
+
+ if ((dataMap & bitpos) != 0) {
+ val index = indexFrom(dataMap, mask, bitpos)
+ if (key == getKey(index)) getValue(index) else throw new NoSuchElementException(s"key not found: $key")
+ } else if ((nodeMap & bitpos) != 0) {
+ getNode(indexFrom(nodeMap, mask, bitpos)).apply(key, originalHash, keyHash, shift + BitPartitionSize)
+ } else {
+ throw new NoSuchElementException(s"key not found: $key")
+ }
+ }
+
+ def get(key: K, originalHash: Int, keyHash: Int, shift: Int): Option[V] = {
+ val mask = maskFrom(keyHash, shift)
+ val bitpos = bitposFrom(mask)
+
+ if ((dataMap & bitpos) != 0) {
+ val index = indexFrom(dataMap, mask, bitpos)
+ val key0 = this.getKey(index)
+ if (key == key0) Some(this.getValue(index)) else None
+ } else if ((nodeMap & bitpos) != 0) {
+ val index = indexFrom(nodeMap, mask, bitpos)
+ this.getNode(index).get(key, originalHash, keyHash, shift + BitPartitionSize)
+ } else {
+ None
+ }
+ }
+
+ override def getTuple(key: K, originalHash: Int, hash: Int, shift: Int): (K, V) = {
+ val mask = maskFrom(hash, shift)
+ val bitpos = bitposFrom(mask)
+
+ if ((dataMap & bitpos) != 0) {
+ val index = indexFrom(dataMap, mask, bitpos)
+ val payload = getPayload(index)
+ if (key == payload._1) payload else throw new NoSuchElementException
+ } else if ((nodeMap & bitpos) != 0) {
+ val index = indexFrom(nodeMap, mask, bitpos)
+ getNode(index).getTuple(key, originalHash, hash, shift + BitPartitionSize)
+ } else {
+ throw new NoSuchElementException
+ }
+ }
+
+ def getOrElse[V1 >: V](key: K, originalHash: Int, keyHash: Int, shift: Int, f: => V1): V1 = {
+ val mask = maskFrom(keyHash, shift)
+ val bitpos = bitposFrom(mask)
+
+ if ((dataMap & bitpos) != 0) {
+ val index = indexFrom(dataMap, mask, bitpos)
+ val key0 = this.getKey(index)
+ if (key == key0) getValue(index) else f
+ } else if ((nodeMap & bitpos) != 0) {
+ val index = indexFrom(nodeMap, mask, bitpos)
+ this.getNode(index).getOrElse(key, originalHash, keyHash, shift + BitPartitionSize, f)
+ } else {
+ f
+ }
+ }
+
+ override def containsKey(key: K, originalHash: Int, keyHash: Int, shift: Int): Boolean = {
+ val mask = maskFrom(keyHash, shift)
+ val bitpos = bitposFrom(mask)
+
+ if ((dataMap & bitpos) != 0) {
+ val index = indexFrom(dataMap, mask, bitpos)
+ // assert(hashes(index) == computeHash(this.getKey(index)), (hashes.toSeq, content.toSeq, index, key, keyHash, shift))
+ (originalHashes(index) == originalHash) && key == getKey(index)
+ } else if ((nodeMap & bitpos) != 0) {
+ getNode(indexFrom(nodeMap, mask, bitpos)).containsKey(key, originalHash, keyHash, shift + BitPartitionSize)
+ } else {
+ false
+ }
+ }
+
+
+ def updated[V1 >: V](key: K, value: V1, originalHash: Int, keyHash: Int, shift: Int, replaceValue: Boolean): BitmapIndexedMapNode[K, V1] = {
+ val mask = maskFrom(keyHash, shift)
+ val bitpos = bitposFrom(mask)
+
+ if ((dataMap & bitpos) != 0) {
+ val index = indexFrom(dataMap, mask, bitpos)
+ val key0 = getKey(index)
+ val key0UnimprovedHash = getHash(index)
+ if (key0UnimprovedHash == originalHash && key0 == key) {
+ if (replaceValue) {
+ val value0 = this.getValue(index)
+ if ((key0.asInstanceOf[AnyRef] eq key.asInstanceOf[AnyRef]) && (value0.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]))
+ this
+ else copyAndSetValue(bitpos, key, value)
+ } else this
+ } else {
+ val value0 = this.getValue(index)
+ val key0Hash = improve(key0UnimprovedHash)
+ val subNodeNew = mergeTwoKeyValPairs(key0, value0, key0UnimprovedHash, key0Hash, key, value, originalHash, keyHash, shift + BitPartitionSize)
+
+ copyAndMigrateFromInlineToNode(bitpos, key0Hash, subNodeNew)
+ }
+ } else if ((nodeMap & bitpos) != 0) {
+ val index = indexFrom(nodeMap, mask, bitpos)
+ val subNode = this.getNode(index)
+ val subNodeNew = subNode.updated(key, value, originalHash, keyHash, shift + BitPartitionSize, replaceValue)
+
+ if (subNodeNew eq subNode) this else copyAndSetNode(bitpos, subNode, subNodeNew)
+ } else copyAndInsertValue(bitpos, key, originalHash, keyHash, value)
+ }
+
+ /** A variant of `updated` which performs shallow mutations on the root (`this`), and if possible, on immediately
+ * descendant child nodes (only one level beneath `this`)
+ *
+ * The caller should pass a bitmap of child nodes of this node, which this method may mutate.
+ * If this method may mutate a child node, then if the updated key-value belongs in that child node, it will
+ * be shallowly mutated (its children will not be mutated).
+ *
+ * If instead this method may not mutate the child node in which the to-be-updated key-value pair belongs, then
+ * that child will be updated immutably, but the result will be mutably re-inserted as a child of this node.
+ *
+ * @param key the key to update
+ * @param value the value to set `key` to
+ * @param originalHash key.##
+ * @param keyHash the improved hash
+ * @param shallowlyMutableNodeMap bitmap of child nodes of this node, which can be shallowly mutated
+ * during the call to this method
+ *
+ * @return Int which is the bitwise OR of shallowlyMutableNodeMap and any freshly created nodes, which will be
+ * available for mutations in subsequent calls.
+ */
+ def updateWithShallowMutations[V1 >: V](key: K, value: V1, originalHash: Int, keyHash: Int, shift: Int, shallowlyMutableNodeMap: Int): Int = {
+ val mask = maskFrom(keyHash, shift)
+ val bitpos = bitposFrom(mask)
+
+ if ((dataMap & bitpos) != 0) {
+ val index = indexFrom(dataMap, mask, bitpos)
+ val key0 = getKey(index)
+ val key0UnimprovedHash = getHash(index)
+ if (key0UnimprovedHash == originalHash && key0 == key) {
+ val value0 = this.getValue(index)
+ if (!((key0.asInstanceOf[AnyRef] eq key.asInstanceOf[AnyRef]) && (value0.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]))) {
+ val dataIx = dataIndex(bitpos)
+ val idx = TupleLength * dataIx
+ content(idx + 1) = value
+ }
+ shallowlyMutableNodeMap
+ } else {
+ val value0 = this.getValue(index)
+ val key0Hash = improve(key0UnimprovedHash)
+
+ val subNodeNew = mergeTwoKeyValPairs(key0, value0, key0UnimprovedHash, key0Hash, key, value, originalHash, keyHash, shift + BitPartitionSize)
+ migrateFromInlineToNodeInPlace(bitpos, key0Hash, subNodeNew)
+ shallowlyMutableNodeMap | bitpos
+ }
+ } else if ((nodeMap & bitpos) != 0) {
+ val index = indexFrom(nodeMap, mask, bitpos)
+ val subNode = this.getNode(index)
+ val subNodeSize = subNode.size
+ val subNodeHashCode = subNode.cachedJavaKeySetHashCode
+
+ var returnMutableNodeMap = shallowlyMutableNodeMap
+
+ val subNodeNew: MapNode[K, V1] = subNode match {
+ case subNodeBm: BitmapIndexedMapNode[K, V] if (bitpos & shallowlyMutableNodeMap) != 0 =>
+ subNodeBm.updateWithShallowMutations(key, value, originalHash, keyHash, shift + BitPartitionSize, 0)
+ subNodeBm
+ case _ =>
+ val result = subNode.updated(key, value, originalHash, keyHash, shift + BitPartitionSize, replaceValue = true)
+ if (result ne subNode) {
+ returnMutableNodeMap |= bitpos
+ }
+ result
+ }
+
+ this.content(this.content.length - 1 - this.nodeIndex(bitpos)) = subNodeNew
+ this.size = this.size - subNodeSize + subNodeNew.size
+ this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - subNodeHashCode + subNodeNew.cachedJavaKeySetHashCode
+ returnMutableNodeMap
+ } else {
+ val dataIx = dataIndex(bitpos)
+ val idx = TupleLength * dataIx
+
+ val src = this.content
+ val dst = new Array[Any](src.length + TupleLength)
+
+ // copy 'src' and insert 2 element(s) at position 'idx'
+ arraycopy(src, 0, dst, 0, idx)
+ dst(idx) = key
+ dst(idx + 1) = value
+ arraycopy(src, idx, dst, idx + TupleLength, src.length - idx)
+
+ this.dataMap |= bitpos
+ this.content = dst
+ this.originalHashes = insertElement(originalHashes, dataIx, originalHash)
+ this.size += 1
+ this.cachedJavaKeySetHashCode += keyHash
+ shallowlyMutableNodeMap
+ }
+ }
+
+ def removed[V1 >: V](key: K, originalHash: Int, keyHash: Int, shift: Int): BitmapIndexedMapNode[K, V1] = {
+ val mask = maskFrom(keyHash, shift)
+ val bitpos = bitposFrom(mask)
+
+ if ((dataMap & bitpos) != 0) {
+ val index = indexFrom(dataMap, mask, bitpos)
+ val key0 = this.getKey(index)
+
+ if (key0 == key) {
+ if (this.payloadArity == 2 && this.nodeArity == 0) {
+ /*
+ * Create new node with remaining pair. The new node will a) either become the new root
+ * returned, or b) unwrapped and inlined during returning.
+ */
+ val newDataMap = if (shift == 0) (dataMap ^ bitpos) else bitposFrom(maskFrom(keyHash, 0))
+ if (index == 0)
+ new BitmapIndexedMapNode[K, V1](newDataMap, 0, Array(getKey(1), getValue(1)), Array(originalHashes(1)), 1, improve(getHash(1)))
+ else
+ new BitmapIndexedMapNode[K, V1](newDataMap, 0, Array(getKey(0), getValue(0)), Array(originalHashes(0)), 1, improve(getHash(0)))
+ } else copyAndRemoveValue(bitpos, keyHash)
+ } else this
+ } else if ((nodeMap & bitpos) != 0) {
+ val index = indexFrom(nodeMap, mask, bitpos)
+ val subNode = this.getNode(index)
+
+ val subNodeNew = subNode.removed(key, originalHash, keyHash, shift + BitPartitionSize)
+ // assert(subNodeNew.size != 0, "Sub-node must have at least one element.")
+
+ if (subNodeNew eq subNode) return this
+
+ // cache just in case subNodeNew is a hashCollision node, in which in which case a little arithmetic is avoided
+ // in Vector#length
+ val subNodeNewSize = subNodeNew.size
+
+ if (subNodeNewSize == 1) {
+ if (this.size == subNode.size) {
+ // subNode is the only child (no other data or node children of `this` exist)
+ // escalate (singleton or empty) result
+ subNodeNew.asInstanceOf[BitmapIndexedMapNode[K, V]]
+ } else {
+ // inline value (move to front)
+ copyAndMigrateFromNodeToInline(bitpos, subNode, subNodeNew)
+ }
+ } else if (subNodeNewSize > 1) {
+ // modify current node (set replacement node)
+ copyAndSetNode(bitpos, subNode, subNodeNew)
+ } else this
+ } else this
+ }
+
+ def mergeTwoKeyValPairs[V1 >: V](key0: K, value0: V1, originalHash0: Int, keyHash0: Int, key1: K, value1: V1, originalHash1: Int, keyHash1: Int, shift: Int): MapNode[K, V1] = {
+ // assert(key0 != key1)
+
+ if (shift >= HashCodeLength) {
+ new HashCollisionMapNode[K, V1](originalHash0, keyHash0, Vector((key0, value0), (key1, value1)))
+ } else {
+ val mask0 = maskFrom(keyHash0, shift)
+ val mask1 = maskFrom(keyHash1, shift)
+ val newCachedHash = keyHash0 + keyHash1
+
+ if (mask0 != mask1) {
+ // unique prefixes, payload fits on same level
+ val dataMap = bitposFrom(mask0) | bitposFrom(mask1)
+
+ if (mask0 < mask1) {
+ new BitmapIndexedMapNode[K, V1](dataMap, 0, Array(key0, value0, key1, value1), Array(originalHash0, originalHash1), 2, newCachedHash)
+ } else {
+ new BitmapIndexedMapNode[K, V1](dataMap, 0, Array(key1, value1, key0, value0), Array(originalHash1, originalHash0), 2, newCachedHash)
+ }
+ } else {
+ // identical prefixes, payload must be disambiguated deeper in the trie
+ val nodeMap = bitposFrom(mask0)
+ val node = mergeTwoKeyValPairs(key0, value0, originalHash0, keyHash0, key1, value1, originalHash1, keyHash1, shift + BitPartitionSize)
+ new BitmapIndexedMapNode[K, V1](0, nodeMap, Array(node), Array.emptyIntArray, node.size, node.cachedJavaKeySetHashCode)
+ }
+ }
+ }
+
+ def hasNodes: Boolean = nodeMap != 0
+
+ def nodeArity: Int = bitCount(nodeMap)
+
+ def hasPayload: Boolean = dataMap != 0
+
+ def payloadArity: Int = bitCount(dataMap)
+
+ def dataIndex(bitpos: Int) = bitCount(dataMap & (bitpos - 1))
+
+ def nodeIndex(bitpos: Int) = bitCount(nodeMap & (bitpos - 1))
+
+ def copyAndSetValue[V1 >: V](bitpos: Int, newKey: K, newValue: V1): BitmapIndexedMapNode[K, V1] = {
+ val dataIx = dataIndex(bitpos)
+ val idx = TupleLength * dataIx
+
+ val src = this.content
+ val dst = new Array[Any](src.length)
+
+ // copy 'src' and set 1 element(s) at position 'idx'
+ arraycopy(src, 0, dst, 0, src.length)
+ //dst(idx) = newKey
+ dst(idx + 1) = newValue
+ new BitmapIndexedMapNode[K, V1](dataMap, nodeMap, dst, originalHashes, size, cachedJavaKeySetHashCode)
+ }
+
+ def copyAndSetNode[V1 >: V](bitpos: Int, oldNode: MapNode[K, V1], newNode: MapNode[K, V1]): BitmapIndexedMapNode[K, V1] = {
+ val idx = this.content.length - 1 - this.nodeIndex(bitpos)
+
+ val src = this.content
+ val dst = new Array[Any](src.length)
+
+ // copy 'src' and set 1 element(s) at position 'idx'
+ arraycopy(src, 0, dst, 0, src.length)
+ dst(idx) = newNode
+ new BitmapIndexedMapNode[K, V1](
+ dataMap,
+ nodeMap,
+ dst,
+ originalHashes,
+ size - oldNode.size + newNode.size,
+ cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + newNode.cachedJavaKeySetHashCode
+ )
+ }
+
+ def copyAndInsertValue[V1 >: V](bitpos: Int, key: K, originalHash: Int, keyHash: Int, value: V1): BitmapIndexedMapNode[K, V1] = {
+ val dataIx = dataIndex(bitpos)
+ val idx = TupleLength * dataIx
+
+ val src = this.content
+ val dst = new Array[Any](src.length + TupleLength)
+
+ // copy 'src' and insert 2 element(s) at position 'idx'
+ arraycopy(src, 0, dst, 0, idx)
+ dst(idx) = key
+ dst(idx + 1) = value
+ arraycopy(src, idx, dst, idx + TupleLength, src.length - idx)
+
+ val dstHashes = insertElement(originalHashes, dataIx, originalHash)
+
+ new BitmapIndexedMapNode[K, V1](dataMap | bitpos, nodeMap, dst, dstHashes, size + 1, cachedJavaKeySetHashCode + keyHash)
+ }
+
+ def copyAndRemoveValue(bitpos: Int, keyHash: Int): BitmapIndexedMapNode[K, V] = {
+ val dataIx = dataIndex(bitpos)
+ val idx = TupleLength * dataIx
+
+ val src = this.content
+ val dst = new Array[Any](src.length - TupleLength)
+
+ // copy 'src' and remove 2 element(s) at position 'idx'
+ arraycopy(src, 0, dst, 0, idx)
+ arraycopy(src, idx + TupleLength, dst, idx, src.length - idx - TupleLength)
+
+ val dstHashes = removeElement(originalHashes, dataIx)
+
+ new BitmapIndexedMapNode[K, V](dataMap ^ bitpos, nodeMap, dst, dstHashes, size - 1, cachedJavaKeySetHashCode - keyHash)
+ }
+
+ /** Variant of `copyAndMigrateFromInlineToNode` which mutates `this` rather than returning a new node.
+ *
+ * @param bitpos the bit position of the data to migrate to node
+ * @param keyHash the improved hash of the key currently at `bitpos`
+ * @param node the node to place at `bitpos` beneath `this`
+ */
+ def migrateFromInlineToNodeInPlace[V1 >: V](bitpos: Int, keyHash: Int, node: MapNode[K, V1]): this.type = {
+ val dataIx = dataIndex(bitpos)
+ val idxOld = TupleLength * dataIx
+ val idxNew = this.content.length - TupleLength - nodeIndex(bitpos)
+
+ val src = this.content
+ val dst = new Array[Any](src.length - TupleLength + 1)
+
+ // copy 'src' and remove 2 element(s) at position 'idxOld' and
+ // insert 1 element(s) at position 'idxNew'
+ // assert(idxOld <= idxNew)
+ arraycopy(src, 0, dst, 0, idxOld)
+ arraycopy(src, idxOld + TupleLength, dst, idxOld, idxNew - idxOld)
+ dst(idxNew) = node
+ arraycopy(src, idxNew + TupleLength, dst, idxNew + 1, src.length - idxNew - TupleLength)
+
+ val dstHashes = removeElement(originalHashes, dataIx)
+
+ this.dataMap = dataMap ^ bitpos
+ this.nodeMap = nodeMap | bitpos
+ this.content = dst
+ this.originalHashes = dstHashes
+ this.size = size - 1 + node.size
+ this.cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - keyHash + node.cachedJavaKeySetHashCode
+ this
+ }
+
+ def copyAndMigrateFromInlineToNode[V1 >: V](bitpos: Int, keyHash: Int, node: MapNode[K, V1]): BitmapIndexedMapNode[K, V1] = {
+ val dataIx = dataIndex(bitpos)
+ val idxOld = TupleLength * dataIx
+ val idxNew = this.content.length - TupleLength - nodeIndex(bitpos)
+
+ val src = this.content
+ val dst = new Array[Any](src.length - TupleLength + 1)
+
+ // copy 'src' and remove 2 element(s) at position 'idxOld' and
+ // insert 1 element(s) at position 'idxNew'
+ // assert(idxOld <= idxNew)
+ arraycopy(src, 0, dst, 0, idxOld)
+ arraycopy(src, idxOld + TupleLength, dst, idxOld, idxNew - idxOld)
+ dst(idxNew) = node
+ arraycopy(src, idxNew + TupleLength, dst, idxNew + 1, src.length - idxNew - TupleLength)
+
+ val dstHashes = removeElement(originalHashes, dataIx)
+
+ new BitmapIndexedMapNode[K, V1](
+ dataMap = dataMap ^ bitpos,
+ nodeMap = nodeMap | bitpos,
+ content = dst,
+ originalHashes = dstHashes,
+ size = size - 1 + node.size,
+ cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - keyHash + node.cachedJavaKeySetHashCode
+ )
+ }
+
+ def copyAndMigrateFromNodeToInline[V1 >: V](bitpos: Int, oldNode: MapNode[K, V1], node: MapNode[K, V1]): BitmapIndexedMapNode[K, V1] = {
+ val idxOld = this.content.length - 1 - nodeIndex(bitpos)
+ val dataIxNew = dataIndex(bitpos)
+ val idxNew = TupleLength * dataIxNew
+
+ val key = node.getKey(0)
+ val value = node.getValue(0)
+ val src = this.content
+ val dst = new Array[Any](src.length - 1 + TupleLength)
+
+ // copy 'src' and remove 1 element(s) at position 'idxOld' and
+ // insert 2 element(s) at position 'idxNew'
+ // assert(idxOld >= idxNew)
+ arraycopy(src, 0, dst, 0, idxNew)
+ dst(idxNew) = key
+ dst(idxNew + 1) = value
+ arraycopy(src, idxNew, dst, idxNew + TupleLength, idxOld - idxNew)
+ arraycopy(src, idxOld + 1, dst, idxOld + TupleLength, src.length - idxOld - 1)
+ val hash = node.getHash(0)
+ val dstHashes = insertElement(originalHashes, dataIxNew, hash)
+ new BitmapIndexedMapNode[K, V1](
+ dataMap = dataMap | bitpos,
+ nodeMap = nodeMap ^ bitpos,
+ content = dst,
+ originalHashes = dstHashes,
+ size = size - oldNode.size + 1,
+ cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + node.cachedJavaKeySetHashCode
+ )
+ }
+
+ override def foreach[U](f: ((K, V)) => U): Unit = {
+ val iN = payloadArity // arity doesn't change during this operation
+ var i = 0
+ while (i < iN) {
+ f(getPayload(i))
+ i += 1
+ }
+
+ val jN = nodeArity // arity doesn't change during this operation
+ var j = 0
+ while (j < jN) {
+ getNode(j).foreach(f)
+ j += 1
+ }
+ }
+
+ override def foreachEntry[U](f: (K, V) => U): Unit = {
+ val iN = payloadArity // arity doesn't change during this operation
+ var i = 0
+ while (i < iN) {
+ f(getKey(i), getValue(i))
+ i += 1
+ }
+
+ val jN = nodeArity // arity doesn't change during this operation
+ var j = 0
+ while (j < jN) {
+ getNode(j).foreachEntry(f)
+ j += 1
+ }
+ }
+
+ override def foreachWithHash(f: (K, V, Int) => Unit): Unit = {
+ var i = 0
+ val iN = payloadArity // arity doesn't change during this operation
+ while (i < iN) {
+ f(getKey(i), getValue(i), getHash(i))
+ i += 1
+ }
+
+ val jN = nodeArity // arity doesn't change during this operation
+ var j = 0
+ while (j < jN) {
+ getNode(j).foreachWithHash(f)
+ j += 1
+ }
+ }
+ override def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): Unit = {
+ var i = 0
+ val iN = payloadArity
+ val jN = nodeArity
+ while (i < iN) {
+ builder.addOne(getKey(i), getValue(i), getHash(i))
+ i += 1
+ }
+
+ var j = 0
+ while (j < jN) {
+ getNode(j).buildTo(builder)
+ j += 1
+ }
+ }
+
+ override def transform[W](f: (K, V) => W): BitmapIndexedMapNode[K, W] = {
+ var newContent: Array[Any] = null
+ val iN = payloadArity // arity doesn't change during this operation
+ val jN = nodeArity // arity doesn't change during this operation
+ val newContentLength = content.length
+ var i = 0
+ while (i < iN) {
+ val key = getKey(i)
+ val value = getValue(i)
+ val newValue = f(key, value)
+ if (newContent eq null) {
+ if (newValue.asInstanceOf[AnyRef] ne value.asInstanceOf[AnyRef]) {
+ newContent = content.clone()
+ newContent(TupleLength * i + 1) = newValue
+ }
+ } else {
+ newContent(TupleLength * i + 1) = newValue
+ }
+ i += 1
+ }
+
+ var j = 0
+ while (j < jN) {
+ val node = getNode(j)
+ val newNode = node.transform(f)
+ if (newContent eq null) {
+ if (newNode ne node) {
+ newContent = content.clone()
+ newContent(newContentLength - j - 1) = newNode
+ }
+ } else
+ newContent(newContentLength - j - 1) = newNode
+ j += 1
+ }
+ if (newContent eq null) this.asInstanceOf[BitmapIndexedMapNode[K, W]]
+ else new BitmapIndexedMapNode[K, W](dataMap, nodeMap, newContent, originalHashes, size, cachedJavaKeySetHashCode)
+ }
+
+ override def mergeInto[V1 >: V](that: MapNode[K, V1], builder: HashMapBuilder[K, V1], shift: Int)(mergef: ((K, V), (K, V1)) => (K, V1)): Unit = that match {
+ case bm: BitmapIndexedMapNode[K, V] @unchecked =>
+ if (size == 0) {
+ that.buildTo(builder)
+ return
+ } else if (bm.size == 0) {
+ buildTo(builder)
+ return
+ }
+
+ val allMap = dataMap | bm.dataMap | nodeMap | bm.nodeMap
+
+ val minIndex: Int = Integer.numberOfTrailingZeros(allMap)
+ val maxIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap)
+
+ {
+ var index = minIndex
+ var leftIdx = 0
+ var rightIdx = 0
+
+ while (index < maxIndex) {
+ val bitpos = bitposFrom(index)
+
+ if ((bitpos & dataMap) != 0) {
+ val leftKey = getKey(leftIdx)
+ val leftValue = getValue(leftIdx)
+ val leftOriginalHash = getHash(leftIdx)
+ if ((bitpos & bm.dataMap) != 0) {
+ // left data and right data
+ val rightKey = bm.getKey(rightIdx)
+ val rightValue = bm.getValue(rightIdx)
+ val rightOriginalHash = bm.getHash(rightIdx)
+ if (leftOriginalHash == rightOriginalHash && leftKey == rightKey) {
+ builder.addOne(mergef((leftKey, leftValue), (rightKey, rightValue)))
+ } else {
+ builder.addOne(leftKey, leftValue, leftOriginalHash)
+ builder.addOne(rightKey, rightValue, rightOriginalHash)
+ }
+ rightIdx += 1
+ } else if ((bitpos & bm.nodeMap) != 0) {
+ // left data and right node
+ val subNode = bm.getNode(bm.nodeIndex(bitpos))
+ val leftImprovedHash = improve(leftOriginalHash)
+ val removed = subNode.removed(leftKey, leftOriginalHash, leftImprovedHash, shift + BitPartitionSize)
+ if (removed eq subNode) {
+ // no overlap in leftData and rightNode, just build both children to builder
+ subNode.buildTo(builder)
+ builder.addOne(leftKey, leftValue, leftOriginalHash, leftImprovedHash)
+ } else {
+ // there is collision, so special treatment for that key
+ removed.buildTo(builder)
+ builder.addOne(mergef((leftKey, leftValue), subNode.getTuple(leftKey, leftOriginalHash, leftImprovedHash, shift + BitPartitionSize)))
+ }
+ } else {
+ // left data and nothing on right
+ builder.addOne(leftKey, leftValue, leftOriginalHash)
+ }
+ leftIdx += 1
+ } else if ((bitpos & nodeMap) != 0) {
+ if ((bitpos & bm.dataMap) != 0) {
+ // left node and right data
+ val rightKey = bm.getKey(rightIdx)
+ val rightValue = bm.getValue(rightIdx)
+ val rightOriginalHash = bm.getHash(rightIdx)
+ val rightImprovedHash = improve(rightOriginalHash)
+
+ val subNode = getNode(nodeIndex(bitpos))
+ val removed = subNode.removed(rightKey, rightOriginalHash, rightImprovedHash, shift + BitPartitionSize)
+ if (removed eq subNode) {
+ // no overlap in leftNode and rightData, just build both children to builder
+ subNode.buildTo(builder)
+ builder.addOne(rightKey, rightValue, rightOriginalHash, rightImprovedHash)
+ } else {
+ // there is collision, so special treatment for that key
+ removed.buildTo(builder)
+ builder.addOne(mergef(subNode.getTuple(rightKey, rightOriginalHash, rightImprovedHash, shift + BitPartitionSize), (rightKey, rightValue)))
+ }
+ rightIdx += 1
+
+ } else if ((bitpos & bm.nodeMap) != 0) {
+ // left node and right node
+ getNode(nodeIndex(bitpos)).mergeInto(bm.getNode(bm.nodeIndex(bitpos)), builder, shift + BitPartitionSize)(mergef)
+ } else {
+ // left node and nothing on right
+ getNode(nodeIndex(bitpos)).buildTo(builder)
+ }
+ } else if ((bitpos & bm.dataMap) != 0) {
+ // nothing on left, right data
+ val dataIndex = bm.dataIndex(bitpos)
+ builder.addOne(bm.getKey(dataIndex),bm.getValue(dataIndex), bm.getHash(dataIndex))
+ rightIdx += 1
+
+ } else if ((bitpos & bm.nodeMap) != 0) {
+ // nothing on left, right node
+ bm.getNode(bm.nodeIndex(bitpos)).buildTo(builder)
+ }
+
+ index += 1
+ }
+ }
+ case _: HashCollisionMapNode[_, _] =>
+ throw new Exception("Cannot merge BitmapIndexedMapNode with HashCollisionMapNode")
+ }
+
+ override def equals(that: Any): Boolean =
+ that match {
+ case node: BitmapIndexedMapNode[_, _] =>
+ (this eq node) ||
+ (this.cachedJavaKeySetHashCode == node.cachedJavaKeySetHashCode) &&
+ (this.nodeMap == node.nodeMap) &&
+ (this.dataMap == node.dataMap) &&
+ (this.size == node.size) &&
+ java.util.Arrays.equals(this.originalHashes, node.originalHashes) &&
+ deepContentEquality(this.content, node.content, content.length)
+ case _ => false
+ }
+
+ @`inline` private def deepContentEquality(a1: Array[Any], a2: Array[Any], length: Int): Boolean = {
+ if (a1 eq a2)
+ true
+ else {
+ var isEqual = true
+ var i = 0
+
+ while (isEqual && i < length) {
+ isEqual = a1(i) == a2(i)
+ i += 1
+ }
+
+ isEqual
+ }
+ }
+
+ override def hashCode(): Int =
+ throw new UnsupportedOperationException("Trie nodes do not support hashing.")
+
+ override def concat[V1 >: V](that: MapNode[K, V1], shift: Int): BitmapIndexedMapNode[K, V1] = that match {
+ case bm: BitmapIndexedMapNode[K, V] @unchecked =>
+ if (size == 0) return bm
+ else if (bm.size == 0 || (bm eq this)) return this
+ else if (bm.size == 1) {
+ val originalHash = bm.getHash(0)
+ return this.updated(bm.getKey(0), bm.getValue(0), originalHash, improve(originalHash), shift, replaceValue = true)
+ }
+ // if we go through the merge and the result does not differ from `bm`, we can just return `bm`, to improve sharing
+ // So, `anyChangesMadeSoFar` will be set to `true` as soon as we encounter a difference between the
+ // currently-being-computed result, and `bm`
+ var anyChangesMadeSoFar = false
+
+ val allMap = dataMap | bm.dataMap | nodeMap | bm.nodeMap
+
+ // minimumIndex is inclusive -- it is the first index for which there is data or nodes
+ val minimumBitPos: Int = Node.bitposFrom(Integer.numberOfTrailingZeros(allMap))
+ // maximumIndex is inclusive -- it is the last index for which there is data or nodes
+ // it could not be exclusive, because then upper bound in worst case (Node.BranchingFactor) would be out-of-bound
+ // of int bitposition representation
+ val maximumBitPos: Int = Node.bitposFrom(Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) - 1)
+
+ var leftNodeRightNode = 0
+ var leftDataRightNode = 0
+ var leftNodeRightData = 0
+ var leftDataOnly = 0
+ var rightDataOnly = 0
+ var leftNodeOnly = 0
+ var rightNodeOnly = 0
+ var leftDataRightDataMigrateToNode = 0
+ var leftDataRightDataRightOverwrites = 0
+
+ var dataToNodeMigrationTargets = 0
+
+ {
+ var bitpos = minimumBitPos
+ var leftIdx = 0
+ var rightIdx = 0
+ var finished = false
+
+ while (!finished) {
+
+ if ((bitpos & dataMap) != 0) {
+ if ((bitpos & bm.dataMap) != 0) {
+ val leftOriginalHash = getHash(leftIdx)
+ if (leftOriginalHash == bm.getHash(rightIdx) && getKey(leftIdx) == bm.getKey(rightIdx)) {
+ leftDataRightDataRightOverwrites |= bitpos
+ } else {
+ leftDataRightDataMigrateToNode |= bitpos
+ dataToNodeMigrationTargets |= Node.bitposFrom(Node.maskFrom(improve(leftOriginalHash), shift))
+ }
+ rightIdx += 1
+ } else if ((bitpos & bm.nodeMap) != 0) {
+ leftDataRightNode |= bitpos
+ } else {
+ leftDataOnly |= bitpos
+ }
+ leftIdx += 1
+ } else if ((bitpos & nodeMap) != 0) {
+ if ((bitpos & bm.dataMap) != 0) {
+ leftNodeRightData |= bitpos
+ rightIdx += 1
+ } else if ((bitpos & bm.nodeMap) != 0) {
+ leftNodeRightNode |= bitpos
+ } else {
+ leftNodeOnly |= bitpos
+ }
+ } else if ((bitpos & bm.dataMap) != 0) {
+ rightDataOnly |= bitpos
+ rightIdx += 1
+ } else if ((bitpos & bm.nodeMap) != 0) {
+ rightNodeOnly |= bitpos
+ }
+
+ if (bitpos == maximumBitPos) {
+ finished = true
+ } else {
+ bitpos = bitpos << 1
+ }
+ }
+ }
+
+
+ val newDataMap = leftDataOnly | rightDataOnly | leftDataRightDataRightOverwrites
+
+ val newNodeMap =
+ leftNodeRightNode |
+ leftDataRightNode |
+ leftNodeRightData |
+ leftNodeOnly |
+ rightNodeOnly |
+ dataToNodeMigrationTargets
+
+
+ if ((newDataMap == (rightDataOnly | leftDataRightDataRightOverwrites)) && (newNodeMap == rightNodeOnly)) {
+ // nothing from `this` will make it into the result -- return early
+ return bm
+ }
+
+ val newDataSize = bitCount(newDataMap)
+ val newContentSize = (MapNode.TupleLength * newDataSize) + bitCount(newNodeMap)
+
+ val newContent = new Array[Any](newContentSize)
+ val newOriginalHashes = new Array[Int](newDataSize)
+ var newSize = 0
+ var newCachedHashCode = 0
+
+ {
+ var leftDataIdx = 0
+ var rightDataIdx = 0
+ var leftNodeIdx = 0
+ var rightNodeIdx = 0
+
+ val nextShift = shift + Node.BitPartitionSize
+
+ var compressedDataIdx = 0
+ var compressedNodeIdx = 0
+
+ var bitpos = minimumBitPos
+ var finished = false
+
+ while (!finished) {
+
+ if ((bitpos & leftNodeRightNode) != 0) {
+ val rightNode = bm.getNode(rightNodeIdx)
+ val newNode = getNode(leftNodeIdx).concat(rightNode, nextShift)
+ if (rightNode ne newNode) {
+ anyChangesMadeSoFar = true
+ }
+ newContent(newContentSize - compressedNodeIdx - 1) = newNode
+ compressedNodeIdx += 1
+ rightNodeIdx += 1
+ leftNodeIdx += 1
+ newSize += newNode.size
+ newCachedHashCode += newNode.cachedJavaKeySetHashCode
+
+ } else if ((bitpos & leftDataRightNode) != 0) {
+ val newNode = {
+ val n = bm.getNode(rightNodeIdx)
+ val leftKey = getKey(leftDataIdx)
+ val leftValue = getValue(leftDataIdx)
+ val leftOriginalHash = getHash(leftDataIdx)
+ val leftImproved = improve(leftOriginalHash)
+
+ val updated = n.updated(leftKey, leftValue, leftOriginalHash, leftImproved, nextShift, replaceValue = false)
+
+ if (updated ne n) {
+ anyChangesMadeSoFar = true
+ }
+
+ updated
+ }
+
+ newContent(newContentSize - compressedNodeIdx - 1) = newNode
+ compressedNodeIdx += 1
+ rightNodeIdx += 1
+ leftDataIdx += 1
+ newSize += newNode.size
+ newCachedHashCode += newNode.cachedJavaKeySetHashCode
+ }
+ else if ((bitpos & leftNodeRightData) != 0) {
+ anyChangesMadeSoFar = true
+ val newNode = {
+ val rightOriginalHash = bm.getHash(rightDataIdx)
+ getNode(leftNodeIdx).updated(
+ key = bm.getKey(rightDataIdx),
+ value = bm.getValue(rightDataIdx),
+ originalHash = bm.getHash(rightDataIdx),
+ hash = improve(rightOriginalHash),
+ shift = nextShift,
+ replaceValue = true
+ )
+ }
+
+ newContent(newContentSize - compressedNodeIdx - 1) = newNode
+ compressedNodeIdx += 1
+ leftNodeIdx += 1
+ rightDataIdx += 1
+ newSize += newNode.size
+ newCachedHashCode += newNode.cachedJavaKeySetHashCode
+
+ } else if ((bitpos & leftDataOnly) != 0) {
+ anyChangesMadeSoFar = true
+ val originalHash = originalHashes(leftDataIdx)
+ newContent(MapNode.TupleLength * compressedDataIdx) = getKey(leftDataIdx).asInstanceOf[AnyRef]
+ newContent(MapNode.TupleLength * compressedDataIdx + 1) = getValue(leftDataIdx).asInstanceOf[AnyRef]
+ newOriginalHashes(compressedDataIdx) = originalHash
+
+ compressedDataIdx += 1
+ leftDataIdx += 1
+ newSize += 1
+ newCachedHashCode += improve(originalHash)
+ } else if ((bitpos & rightDataOnly) != 0) {
+ val originalHash = bm.originalHashes(rightDataIdx)
+ newContent(MapNode.TupleLength * compressedDataIdx) = bm.getKey(rightDataIdx).asInstanceOf[AnyRef]
+ newContent(MapNode.TupleLength * compressedDataIdx + 1) = bm.getValue(rightDataIdx).asInstanceOf[AnyRef]
+ newOriginalHashes(compressedDataIdx) = originalHash
+
+ compressedDataIdx += 1
+ rightDataIdx += 1
+ newSize += 1
+ newCachedHashCode += improve(originalHash)
+ } else if ((bitpos & leftNodeOnly) != 0) {
+ anyChangesMadeSoFar = true
+ val newNode = getNode(leftNodeIdx)
+ newContent(newContentSize - compressedNodeIdx - 1) = newNode
+ compressedNodeIdx += 1
+ leftNodeIdx += 1
+ newSize += newNode.size
+ newCachedHashCode += newNode.cachedJavaKeySetHashCode
+ } else if ((bitpos & rightNodeOnly) != 0) {
+ val newNode = bm.getNode(rightNodeIdx)
+ newContent(newContentSize - compressedNodeIdx - 1) = newNode
+ compressedNodeIdx += 1
+ rightNodeIdx += 1
+ newSize += newNode.size
+ newCachedHashCode += newNode.cachedJavaKeySetHashCode
+ } else if ((bitpos & leftDataRightDataMigrateToNode) != 0) {
+ anyChangesMadeSoFar = true
+ val newNode = {
+ val leftOriginalHash = getHash(leftDataIdx)
+ val rightOriginalHash = bm.getHash(rightDataIdx)
+
+ bm.mergeTwoKeyValPairs(
+ getKey(leftDataIdx), getValue(leftDataIdx), leftOriginalHash, improve(leftOriginalHash),
+ bm.getKey(rightDataIdx), bm.getValue(rightDataIdx), rightOriginalHash, improve(rightOriginalHash),
+ nextShift
+ )
+ }
+
+ newContent(newContentSize - compressedNodeIdx - 1) = newNode
+ compressedNodeIdx += 1
+ leftDataIdx += 1
+ rightDataIdx += 1
+ newSize += newNode.size
+ newCachedHashCode += newNode.cachedJavaKeySetHashCode
+ } else if ((bitpos & leftDataRightDataRightOverwrites) != 0) {
+ val originalHash = bm.originalHashes(rightDataIdx)
+ newContent(MapNode.TupleLength * compressedDataIdx) = bm.getKey(rightDataIdx).asInstanceOf[AnyRef]
+ newContent(MapNode.TupleLength * compressedDataIdx + 1) = bm.getValue(rightDataIdx).asInstanceOf[AnyRef]
+ newOriginalHashes(compressedDataIdx) = originalHash
+
+ compressedDataIdx += 1
+ rightDataIdx += 1
+ newSize += 1
+ newCachedHashCode += improve(originalHash)
+ leftDataIdx += 1
+ }
+
+ if (bitpos == maximumBitPos) {
+ finished = true
+ } else {
+ bitpos = bitpos << 1
+ }
+ }
+ }
+
+ if (anyChangesMadeSoFar)
+ new BitmapIndexedMapNode(
+ dataMap = newDataMap,
+ nodeMap = newNodeMap,
+ content = newContent,
+ originalHashes = newOriginalHashes,
+ size = newSize,
+ cachedJavaKeySetHashCode = newCachedHashCode
+ )
+ else bm
+
+ case _ =>
+ // should never happen -- hash collisions are never at the same level as bitmapIndexedMapNodes
+ throw new UnsupportedOperationException("Cannot concatenate a HashCollisionMapNode with a BitmapIndexedMapNode")
+ }
+
+ override def copy(): BitmapIndexedMapNode[K, V] = {
+ val contentClone = content.clone()
+ val contentLength = contentClone.length
+ var i = bitCount(dataMap) * TupleLength
+ while (i < contentLength) {
+ contentClone(i) = contentClone(i).asInstanceOf[MapNode[K, V]].copy()
+ i += 1
+ }
+ new BitmapIndexedMapNode[K, V](dataMap, nodeMap, contentClone, originalHashes.clone(), size, cachedJavaKeySetHashCode)
+ }
+
+ override def filterImpl(pred: ((K, V)) => Boolean, flipped: Boolean): BitmapIndexedMapNode[K, V] = {
+ if (size == 0) this
+ else if (size == 1) {
+ if (pred(getPayload(0)) != flipped) this else MapNode.empty
+ } else if (nodeMap == 0) {
+ // Performance optimization for nodes of depth 1:
+ //
+ // this node has no "node" children, all children are inlined data elems, therefor logic is significantly simpler
+ // approach:
+ // * traverse the content array, accumulating in `newDataMap: Int` any bit positions of keys which pass the filter
+ // * (bitCount(newDataMap) * TupleLength) tells us the new content array and originalHashes array size, so now perform allocations
+ // * traverse the content array once more, placing each passing element (according to `newDatamap`) in the new content and originalHashes arrays
+ //
+ // note:
+ // * this optimization significantly improves performance of not only small trees, but also larger trees, since
+ // even non-root nodes are affected by this improvement, and large trees will consist of many nodes as
+ // descendants
+ //
+ val minimumIndex: Int = Integer.numberOfTrailingZeros(dataMap)
+ val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(dataMap)
+
+ var newDataMap = 0
+ var newCachedHashCode = 0
+ var dataIndex = 0
+
+ var i = minimumIndex
+
+ while(i < maximumIndex) {
+ val bitpos = bitposFrom(i)
+
+ if ((bitpos & dataMap) != 0) {
+ val payload = getPayload(dataIndex)
+ val passed = pred(payload) != flipped
+
+ if (passed) {
+ newDataMap |= bitpos
+ newCachedHashCode += improve(getHash(dataIndex))
+ }
+
+ dataIndex += 1
+ }
+
+ i += 1
+ }
+
+ if (newDataMap == 0) {
+ MapNode.empty
+ } else if (newDataMap == dataMap) {
+ this
+ } else {
+ val newSize = Integer.bitCount(newDataMap)
+ val newContent = new Array[Any](newSize * TupleLength)
+ val newOriginalHashCodes = new Array[Int](newSize)
+ val newMaximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(newDataMap)
+
+ var j = Integer.numberOfTrailingZeros(newDataMap)
+
+ var newDataIndex = 0
+
+
+ while (j < newMaximumIndex) {
+ val bitpos = bitposFrom(j)
+ if ((bitpos & newDataMap) != 0) {
+ val oldIndex = indexFrom(dataMap, bitpos)
+ newContent(newDataIndex * TupleLength) = content(oldIndex * TupleLength)
+ newContent(newDataIndex * TupleLength + 1) = content(oldIndex * TupleLength + 1)
+ newOriginalHashCodes(newDataIndex) = originalHashes(oldIndex)
+ newDataIndex += 1
+ }
+ j += 1
+ }
+
+ new BitmapIndexedMapNode(newDataMap, 0, newContent, newOriginalHashCodes, newSize, newCachedHashCode)
+ }
+
+
+ } else {
+ val allMap = dataMap | nodeMap
+ val minimumIndex: Int = Integer.numberOfTrailingZeros(allMap)
+ val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap)
+
+ var oldDataPassThrough = 0
+
+ // bitmap of nodes which, when filtered, returned a single-element node. These must be migrated to data
+ var nodeMigrateToDataTargetMap = 0
+ // the queue of single-element, post-filter nodes
+ var nodesToMigrateToData: mutable.Queue[MapNode[K, V]] = null
+
+ // bitmap of all nodes which, when filtered, returned themselves. They are passed forward to the returned node
+ var nodesToPassThroughMap = 0
+
+ // bitmap of any nodes which, after being filtered, returned a node that is not empty, but also not `eq` itself
+ // These are stored for later inclusion into the final `content` array
+ // not named `newNodesMap` (plural) to avoid confusion with `newNodeMap` (singular)
+ var mapOfNewNodes = 0
+ // each bit in `mapOfNewNodes` corresponds to one element in this queue
+ var newNodes: mutable.Queue[MapNode[K, V]] = null
+
+ var newDataMap = 0
+ var newNodeMap = 0
+ var newSize = 0
+ var newCachedHashCode = 0
+
+ var dataIndex = 0
+ var nodeIndex = 0
+
+ var i = minimumIndex
+ while (i < maximumIndex) {
+ val bitpos = bitposFrom(i)
+
+ if ((bitpos & dataMap) != 0) {
+ val payload = getPayload(dataIndex)
+ val passed = pred(payload) != flipped
+
+ if (passed) {
+ newDataMap |= bitpos
+ oldDataPassThrough |= bitpos
+ newSize += 1
+ newCachedHashCode += improve(getHash(dataIndex))
+ }
+
+ dataIndex += 1
+ } else if ((bitpos & nodeMap) != 0) {
+ val oldSubNode = getNode(nodeIndex)
+ val newSubNode = oldSubNode.filterImpl(pred, flipped)
+
+ newSize += newSubNode.size
+ newCachedHashCode += newSubNode.cachedJavaKeySetHashCode
+
+ // if (newSubNode.size == 0) do nothing (drop it)
+ if (newSubNode.size > 1) {
+ newNodeMap |= bitpos
+ if (oldSubNode eq newSubNode) {
+ nodesToPassThroughMap |= bitpos
+ } else {
+ mapOfNewNodes |= bitpos
+ if (newNodes eq null) {
+ newNodes = mutable.Queue.empty[MapNode[K, V] @uncheckedCaptures]
+ }
+ newNodes += newSubNode
+ }
+ } else if (newSubNode.size == 1) {
+ newDataMap |= bitpos
+ nodeMigrateToDataTargetMap |= bitpos
+ if (nodesToMigrateToData eq null) {
+ nodesToMigrateToData = mutable.Queue()
+ }
+ nodesToMigrateToData += newSubNode
+ }
+
+ nodeIndex += 1
+ }
+
+ i += 1
+ }
+
+ if (newSize == 0) {
+ MapNode.empty
+ } else if (newSize == size) {
+ this
+ } else {
+ val newDataSize = bitCount(newDataMap)
+ val newContentSize = (MapNode.TupleLength * newDataSize) + bitCount(newNodeMap)
+ val newContent = new Array[Any](newContentSize)
+ val newOriginalHashes = new Array[Int](newDataSize)
+
+ val newAllMap = newDataMap | newNodeMap
+ val maxIndex = Node.BranchingFactor - Integer.numberOfLeadingZeros(newAllMap)
+
+ // note: We MUST start from the minimum index in the old (`this`) node, otherwise `old{Node,Data}Index` will
+ // not be incremented properly. Otherwise we could have started at Integer.numberOfTrailingZeroes(newAllMap)
+ var i = minimumIndex
+
+ var oldDataIndex = 0
+ var oldNodeIndex = 0
+
+ var newDataIndex = 0
+ var newNodeIndex = 0
+
+ while (i < maxIndex) {
+ val bitpos = bitposFrom(i)
+
+ if ((bitpos & oldDataPassThrough) != 0) {
+ newContent(newDataIndex * TupleLength) = getKey(oldDataIndex)
+ newContent(newDataIndex * TupleLength + 1) = getValue(oldDataIndex)
+ newOriginalHashes(newDataIndex) = getHash(oldDataIndex)
+ newDataIndex += 1
+ oldDataIndex += 1
+ } else if ((bitpos & nodesToPassThroughMap) != 0) {
+ newContent(newContentSize - newNodeIndex - 1) = getNode(oldNodeIndex)
+ newNodeIndex += 1
+ oldNodeIndex += 1
+ } else if ((bitpos & nodeMigrateToDataTargetMap) != 0) {
+ // we need not check for null here. If nodeMigrateToDataTargetMap != 0, then nodesMigrateToData must not be null
+ val node = nodesToMigrateToData.dequeue()
+ newContent(TupleLength * newDataIndex) = node.getKey(0)
+ newContent(TupleLength * newDataIndex + 1) = node.getValue(0)
+ newOriginalHashes(newDataIndex) = node.getHash(0)
+ newDataIndex += 1
+ oldNodeIndex += 1
+ } else if ((bitpos & mapOfNewNodes) != 0) {
+ newContent(newContentSize - newNodeIndex - 1) = newNodes.dequeue()
+ newNodeIndex += 1
+ oldNodeIndex += 1
+ } else if ((bitpos & dataMap) != 0) {
+ oldDataIndex += 1
+ } else if ((bitpos & nodeMap) != 0) {
+ oldNodeIndex += 1
+ }
+
+ i += 1
+ }
+
+ new BitmapIndexedMapNode[K, V](newDataMap, newNodeMap, newContent, newOriginalHashes, newSize, newCachedHashCode)
+ }
+ }
+ }
+}
+
+private final class HashCollisionMapNode[K, +V ](
+ val originalHash: Int,
+ val hash: Int,
+ var content: Vector[(K, V @uV) @uncheckedCaptures]
+ ) extends MapNode[K, V] {
+
+ import Node._
+
+ require(content.length >= 2)
+
+ releaseFence()
+
+ private[immutable] def indexOf(key: Any): Int = {
+ val iter = content.iterator
+ var i = 0
+ while (iter.hasNext) {
+ if (iter.next()._1 == key) return i
+ i += 1
+ }
+ -1
+ }
+
+ def size: Int = content.length
+
+ def apply(key: K, originalHash: Int, hash: Int, shift: Int): V = get(key, originalHash, hash, shift).getOrElse(throw new NoSuchElementException)
+
+ def get(key: K, originalHash: Int, hash: Int, shift: Int): Option[V] =
+ if (this.hash == hash) {
+ val index = indexOf(key)
+ if (index >= 0) Some(content(index)._2) else None
+ } else None
+
+ override def getTuple(key: K, originalHash: Int, hash: Int, shift: Int): (K, V) = {
+ val index = indexOf(key)
+ if (index >= 0) content(index) else throw new NoSuchElementException
+ }
+
+ def getOrElse[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int, f: => V1): V1 = {
+ if (this.hash == hash) {
+ indexOf(key) match {
+ case -1 => f
+ case other => content(other)._2
+ }
+ } else f
+ }
+
+ override def containsKey(key: K, originalHash: Int, hash: Int, shift: Int): Boolean =
+ this.hash == hash && indexOf(key) >= 0
+
+ def contains[V1 >: V](key: K, value: V1, hash: Int, shift: Int): Boolean =
+ this.hash == hash && {
+ val index = indexOf(key)
+ index >= 0 && (content(index)._2.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef])
+ }
+
+ def updated[V1 >: V](key: K, value: V1, originalHash: Int, hash: Int, shift: Int, replaceValue: Boolean): MapNode[K, V1] = {
+ val index = indexOf(key)
+ if (index >= 0) {
+ if (replaceValue) {
+ if (content(index)._2.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) {
+ this
+ } else {
+ new HashCollisionMapNode[K, V1](originalHash, hash, content.updated[(K, V1)](index, (key, value)))
+ }
+ } else {
+ this
+ }
+ } else {
+ new HashCollisionMapNode[K, V1](originalHash, hash, content.appended[(K, V1)]((key, value)))
+ }
+ }
+
+ def removed[V1 >: V](key: K, originalHash: Int, hash: Int, shift: Int): MapNode[K, V1] = {
+ if (!this.containsKey(key, originalHash, hash, shift)) {
+ this
+ } else {
+ val updatedContent = content.filterNot(keyValuePair => keyValuePair._1 == key)
+ // assert(updatedContent.size == content.size - 1)
+
+ updatedContent.size match {
+ case 1 =>
+ val (k, v) = updatedContent(0)
+ new BitmapIndexedMapNode[K, V1](bitposFrom(maskFrom(hash, 0)), 0, Array(k, v), Array(originalHash), 1, hash)
+ case _ => new HashCollisionMapNode[K, V1](originalHash, hash, updatedContent)
+ }
+ }
+ }
+
+ def hasNodes: Boolean = false
+
+ def nodeArity: Int = 0
+
+ def getNode(index: Int): MapNode[K, V] =
+ throw new IndexOutOfBoundsException("No sub-nodes present in hash-collision leaf node.")
+
+ def hasPayload: Boolean = true
+
+ def payloadArity: Int = content.length
+
+ def getKey(index: Int): K = getPayload(index)._1
+ def getValue(index: Int): V = getPayload(index)._2
+
+ def getPayload(index: Int): (K, V) = content(index)
+
+ override def getHash(index: Int): Int = originalHash
+
+ def foreach[U](f: ((K, V)) => U): Unit = content.foreach(f)
+
+ def foreachEntry[U](f: (K, V) => U): Unit = content.foreach { case (k, v) => f(k, v)}
+
+ override def foreachWithHash(f: (K, V, Int) => Unit): Unit = {
+ val iter = content.iterator
+ while (iter.hasNext) {
+ val next = iter.next()
+ f(next._1, next._2, originalHash)
+ }
+ }
+
+ override def transform[W](f: (K, V) => W): HashCollisionMapNode[K, W] = {
+ val newContent = Vector.newBuilder[(K, W)]
+ val contentIter = content.iterator
+ // true if any values have been transformed to a different value via `f`
+ var anyChanges = false
+ while(contentIter.hasNext) {
+ val (k, v) = contentIter.next()
+ val newValue = f(k, v)
+ newContent.addOne((k, newValue))
+ anyChanges ||= (v.asInstanceOf[AnyRef] ne newValue.asInstanceOf[AnyRef])
+ }
+ if (anyChanges) new HashCollisionMapNode(originalHash, hash, newContent.result())
+ else this.asInstanceOf[HashCollisionMapNode[K, W]]
+ }
+
+ override def equals(that: Any): Boolean =
+ that match {
+ case node: HashCollisionMapNode[_, _] =>
+ (this eq node) ||
+ (this.hash == node.hash) &&
+ (this.content.length == node.content.length) && {
+ val iter = content.iterator
+ while (iter.hasNext) {
+ val (key, value) = iter.next()
+ val index = node.indexOf(key)
+ if (index < 0 || value != node.content(index)._2) {
+ return false
+ }
+ }
+ true
+ }
+ case _ => false
+ }
+
+ override def concat[V1 >: V](that: MapNode[K, V1], shift: Int): HashCollisionMapNode[K, V1] = that match {
+ case hc: HashCollisionMapNode[K, V1] =>
+ if (hc eq this) {
+ this
+ } else {
+ var newContent: VectorBuilder[(K, V1)] = null
+ val iter = content.iterator
+ while (iter.hasNext) {
+ val nextPayload = iter.next()
+ if (hc.indexOf(nextPayload._1) < 0) {
+ if (newContent eq null) {
+ newContent = new VectorBuilder[(K, V1)]()
+ newContent.addAll(hc.content)
+ }
+ newContent.addOne(nextPayload)
+ }
+ }
+ if (newContent eq null) hc else new HashCollisionMapNode(originalHash, hash, newContent.result())
+ }
+ case _: BitmapIndexedMapNode[K, V1] =>
+ // should never happen -- hash collisions are never at the same level as bitmapIndexedMapNodes
+ throw new UnsupportedOperationException("Cannot concatenate a HashCollisionMapNode with a BitmapIndexedMapNode")
+ }
+
+
+ override def mergeInto[V1 >: V](that: MapNode[K, V1], builder: HashMapBuilder[K, V1], shift: Int)(mergef: ((K, V), (K, V1)) => (K, V1)): Unit = that match {
+ case hc: HashCollisionMapNode[K, V1] =>
+ val iter = content.iterator
+ val rightArray = hc.content.toArray[AnyRef] // really Array[(K, V1)]
+
+ def rightIndexOf(key: K): Int = {
+ var i = 0
+ while (i < rightArray.length) {
+ val elem = rightArray(i)
+ if ((elem ne null) && (elem.asInstanceOf[(K, V1)])._1 == key) return i
+ i += 1
+ }
+ -1
+ }
+
+ while (iter.hasNext) {
+ val nextPayload = iter.next()
+ val index = rightIndexOf(nextPayload._1)
+
+ if (index == -1) {
+ builder.addOne(nextPayload)
+ } else {
+ val rightPayload = rightArray(index).asInstanceOf[(K, V1)]
+ rightArray(index) = null
+
+ builder.addOne(mergef(nextPayload, rightPayload))
+ }
+ }
+
+ var i = 0
+ while (i < rightArray.length) {
+ val elem = rightArray(i)
+ if (elem ne null) builder.addOne(elem.asInstanceOf[(K, V1)])
+ i += 1
+ }
+ case _: BitmapIndexedMapNode[K, V1] =>
+ throw new Exception("Cannot merge HashCollisionMapNode with BitmapIndexedMapNode")
+
+ }
+
+ override def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): Unit = {
+ val iter = content.iterator
+ while (iter.hasNext) {
+ val (k, v) = iter.next()
+ builder.addOne(k, v, originalHash, hash)
+ }
+ }
+
+ override def filterImpl(pred: ((K, V)) => Boolean, flipped: Boolean): MapNode[K, V] = {
+ val newContent = content.filterImpl(pred, flipped)
+ val newContentLength = newContent.length
+ if (newContentLength == 0) {
+ MapNode.empty
+ } else if (newContentLength == 1) {
+ val (k, v) = newContent.head
+ new BitmapIndexedMapNode[K, V](bitposFrom(maskFrom(hash, 0)), 0, Array(k, v), Array(originalHash), 1, hash)
+ } else if (newContentLength == content.length) this
+ else new HashCollisionMapNode(originalHash, hash, newContent)
+ }
+
+ override def copy(): HashCollisionMapNode[K, V] = new HashCollisionMapNode[K, V](originalHash, hash, content)
+
+ override def hashCode(): Int =
+ throw new UnsupportedOperationException("Trie nodes do not support hashing.")
+
+ override def cachedJavaKeySetHashCode: Int = size * hash
+
+}
+
+private final class MapKeyIterator[K, V](rootNode: MapNode[K, V])
+ extends ChampBaseIterator[MapNode[K, V]](rootNode) with Iterator[K] {
+
+ def next() = {
+ if (!hasNext)
+ throw new NoSuchElementException
+
+ val key = currentValueNode.getKey(currentValueCursor)
+ currentValueCursor += 1
+
+ key
+ }
+
+}
+
+private final class MapValueIterator[K, V](rootNode: MapNode[K, V])
+ extends ChampBaseIterator[MapNode[K, V]](rootNode) with Iterator[V] {
+
+ def next() = {
+ if (!hasNext)
+ throw new NoSuchElementException
+
+ val value = currentValueNode.getValue(currentValueCursor)
+ currentValueCursor += 1
+
+ value
+ }
+}
+
+private final class MapKeyValueTupleIterator[K, V](rootNode: MapNode[K, V])
+ extends ChampBaseIterator[MapNode[K, V]](rootNode) with Iterator[(K, V)] {
+
+ def next() = {
+ if (!hasNext)
+ throw new NoSuchElementException
+
+ val payload = currentValueNode.getPayload(currentValueCursor)
+ currentValueCursor += 1
+
+ payload
+ }
+
+}
+
+private final class MapKeyValueTupleReverseIterator[K, V](rootNode: MapNode[K, V])
+ extends ChampBaseReverseIterator[MapNode[K, V]](rootNode) with Iterator[(K, V)] {
+
+ def next() = {
+ if (!hasNext)
+ throw new NoSuchElementException
+
+ val payload = currentValueNode.getPayload(currentValueCursor)
+ currentValueCursor -= 1
+
+ payload
+ }
+}
+
+private final class MapKeyValueTupleHashIterator[K, V](rootNode: MapNode[K, V])
+ extends ChampBaseReverseIterator[MapNode[K, V]](rootNode) with Iterator[Any] {
+ private[this] var hash = 0
+ private[this] var value: V @uncheckedCaptures = _
+ override def hashCode(): Int = MurmurHash3.tuple2Hash(hash, value.##, MurmurHash3.productSeed)
+ def next() = {
+ if (!hasNext)
+ throw new NoSuchElementException
+
+ hash = currentValueNode.getHash(currentValueCursor)
+ value = currentValueNode.getValue(currentValueCursor)
+ currentValueCursor -= 1
+ this
+ }
+}
+
+/** Used in HashMap[K, V]#removeAll(HashSet[K]) */
+private final class MapNodeRemoveAllSetNodeIterator[K](rootSetNode: SetNode[K]) extends ChampBaseIterator(rootSetNode) {
+ /** Returns the result of immutably removing all keys in `rootSetNode` from `rootMapNode` */
+ def removeAll[V](rootMapNode: BitmapIndexedMapNode[K, V]): BitmapIndexedMapNode[K, V] = {
+ var curr = rootMapNode
+ while (curr.size > 0 && hasNext) {
+ val originalHash = currentValueNode.getHash(currentValueCursor)
+ curr = curr.removed(
+ key = currentValueNode.getPayload(currentValueCursor),
+ keyHash = improve(originalHash),
+ originalHash = originalHash,
+ shift = 0
+ )
+ currentValueCursor += 1
+ }
+ curr
+ }
+}
+
+/**
+ * $factoryInfo
+ *
+ * @define Coll `immutable.HashMap`
+ * @define coll immutable champ hash map
+ */
+@SerialVersionUID(3L)
+object HashMap extends MapFactory[HashMap] {
+
+ @transient
+ private final val EmptyMap = new HashMap(MapNode.empty)
+
+ def empty[K, V]: HashMap[K, V] =
+ EmptyMap.asInstanceOf[HashMap[K, V]]
+
+ def from[K, V](source: collection.IterableOnce[(K, V)]^): HashMap[K, V] =
+ source match {
+ case hs: HashMap[K, V] => hs
+ case _ => (newBuilder[K, V] ++= source).result()
+ }
+
+ /** Create a new Builder which can be reused after calling `result()` without an
+ * intermediate call to `clear()` in order to build multiple related results.
+ */
+ def newBuilder[K, V]: ReusableBuilder[(K, V), HashMap[K, V]] = new HashMapBuilder[K, V]
+}
+
+
+/** A Builder for a HashMap.
+ * $multipleResults
+ */
+private[immutable] final class HashMapBuilder[K, V] extends ReusableBuilder[(K, V), HashMap[K, V]] {
+ import MapNode._
+ import Node._
+
+ private def newEmptyRootNode = new BitmapIndexedMapNode[K, V](0, 0, Array.emptyObjectArray.asInstanceOf[Array[Any]], Array.emptyIntArray, 0, 0)
+
+ /** The last given out HashMap as a return value of `result()`, if any, otherwise null.
+ * Indicates that on next add, the elements should be copied to an identical structure, before continuing
+ * mutations. */
+ private var aliased: HashMap[K, V] @uncheckedCaptures = _
+
+ private def isAliased: Boolean = aliased != null
+
+ /** The root node of the partially build hashmap */
+ private var rootNode: BitmapIndexedMapNode[K, V] @uncheckedCaptures = newEmptyRootNode
+
+ private[immutable] def getOrElse[V0 >: V](key: K, value: V0): V0 =
+ if (rootNode.size == 0) value
+ else {
+ val originalHash = key.##
+ rootNode.getOrElse(key, originalHash, improve(originalHash), 0, value)
+ }
+
+ /** Inserts element `elem` into array `as` at index `ix`, shifting right the trailing elems */
+ private[this] def insertElement(as: Array[Int], ix: Int, elem: Int): Array[Int] = {
+ if (ix < 0) throw new ArrayIndexOutOfBoundsException
+ if (ix > as.length) throw new ArrayIndexOutOfBoundsException
+ val result = new Array[Int](as.length + 1)
+ arraycopy(as, 0, result, 0, ix)
+ result(ix) = elem
+ arraycopy(as, ix, result, ix + 1, as.length - ix)
+ result
+ }
+
+ /** Inserts key-value into the bitmapIndexMapNode. Requires that this is a new key-value pair */
+ private[this] def insertValue[V1 >: V](bm: BitmapIndexedMapNode[K, V],bitpos: Int, key: K, originalHash: Int, keyHash: Int, value: V1): Unit = {
+ val dataIx = bm.dataIndex(bitpos)
+ val idx = TupleLength * dataIx
+
+ val src = bm.content
+ val dst = new Array[Any](src.length + TupleLength)
+
+ // copy 'src' and insert 2 element(s) at position 'idx'
+ arraycopy(src, 0, dst, 0, idx)
+ dst(idx) = key
+ dst(idx + 1) = value
+ arraycopy(src, idx, dst, idx + TupleLength, src.length - idx)
+
+ val dstHashes = insertElement(bm.originalHashes, dataIx, originalHash)
+
+ bm.dataMap |= bitpos
+ bm.content = dst
+ bm.originalHashes = dstHashes
+ bm.size += 1
+ bm.cachedJavaKeySetHashCode += keyHash
+ }
+
+ /** Upserts a key/value pair into mapNode, mutably */
+ private[immutable] def update(mapNode: MapNode[K, V], key: K, value: V, originalHash: Int, keyHash: Int, shift: Int): Unit = {
+ mapNode match {
+ case bm: BitmapIndexedMapNode[K, V] =>
+ val mask = maskFrom(keyHash, shift)
+ val bitpos = bitposFrom(mask)
+ if ((bm.dataMap & bitpos) != 0) {
+ val index = indexFrom(bm.dataMap, mask, bitpos)
+ val key0 = bm.getKey(index)
+ val key0UnimprovedHash = bm.getHash(index)
+
+ if (key0UnimprovedHash == originalHash && key0 == key) {
+ bm.content(TupleLength * index + 1) = value
+ } else {
+ val value0 = bm.getValue(index)
+ val key0Hash = improve(key0UnimprovedHash)
+
+ val subNodeNew: MapNode[K, V] =
+ bm.mergeTwoKeyValPairs(key0, value0, key0UnimprovedHash, key0Hash, key, value, originalHash, keyHash, shift + BitPartitionSize)
+
+ bm.migrateFromInlineToNodeInPlace(bitpos, key0Hash, subNodeNew)
+ }
+
+ } else if ((bm.nodeMap & bitpos) != 0) {
+ val index = indexFrom(bm.nodeMap, mask, bitpos)
+ val subNode = bm.getNode(index)
+ val beforeSize = subNode.size
+ val beforeHash = subNode.cachedJavaKeySetHashCode
+ update(subNode, key, value, originalHash, keyHash, shift + BitPartitionSize)
+ bm.size += subNode.size - beforeSize
+ bm.cachedJavaKeySetHashCode += subNode.cachedJavaKeySetHashCode - beforeHash
+ } else {
+ insertValue(bm, bitpos, key, originalHash, keyHash, value)
+ }
+ case hc: HashCollisionMapNode[K, V] =>
+ val index = hc.indexOf(key)
+ if (index < 0) {
+ hc.content = hc.content.appended((key, value))
+ } else {
+ hc.content = hc.content.updated(index, (key, value))
+ }
+ }
+ }
+
+ /** If currently referencing aliased structure, copy elements to new mutable structure */
+ private[this] def ensureUnaliased() = {
+ if (isAliased) copyElems()
+ aliased = null
+ }
+
+ /** Copy elements to new mutable structure */
+ private[this] def copyElems(): Unit = {
+ rootNode = rootNode.copy()
+ }
+
+ override def result(): HashMap[K, V] =
+ if (rootNode.size == 0) {
+ HashMap.empty
+ } else if (aliased != null) {
+ aliased
+ } else {
+ aliased = new HashMap(rootNode)
+ releaseFence()
+ aliased
+ }
+
+ override def addOne(elem: (K, V)): this.type = {
+ ensureUnaliased()
+ val h = elem._1.##
+ val im = improve(h)
+ update(rootNode, elem._1, elem._2, h, im, 0)
+ this
+ }
+
+ def addOne(key: K, value: V): this.type = {
+ ensureUnaliased()
+ val originalHash = key.##
+ update(rootNode, key, value, originalHash, improve(originalHash), 0)
+ this
+ }
+ def addOne(key: K, value: V, originalHash: Int): this.type = {
+ ensureUnaliased()
+ update(rootNode, key, value, originalHash, improve(originalHash), 0)
+ this
+ }
+ def addOne(key: K, value: V, originalHash: Int, hash: Int): this.type = {
+ ensureUnaliased()
+ update(rootNode, key, value, originalHash, hash, 0)
+ this
+ }
+
+ override def addAll(xs: IterableOnce[(K, V)]^): this.type = {
+ ensureUnaliased()
+ xs match {
+ case hm: HashMap[K, V] =>
+ new ChampBaseIterator[MapNode[K, V]](hm.rootNode) {
+ while(hasNext) {
+ val originalHash = currentValueNode.getHash(currentValueCursor)
+ update(
+ mapNode = rootNode,
+ key = currentValueNode.getKey(currentValueCursor),
+ value = currentValueNode.getValue(currentValueCursor),
+ originalHash = originalHash,
+ keyHash = improve(originalHash),
+ shift = 0
+ )
+ currentValueCursor += 1
+ }
+ }.asInstanceOf // !!! cc gets confused with representation of capture sets in invariant position
+ case hm: collection.mutable.HashMap[K, V] =>
+ val iter = hm.nodeIterator
+ while (iter.hasNext) {
+ val next = iter.next()
+ val originalHash = hm.unimproveHash(next.hash)
+ val hash = improve(originalHash)
+ update(rootNode, next.key, next.value, originalHash, hash, 0)
+ }
+ case lhm: collection.mutable.LinkedHashMap[K, V] =>
+ val iter = lhm.entryIterator
+ while (iter.hasNext) {
+ val next = iter.next()
+ val originalHash = lhm.unimproveHash(next.hash)
+ val hash = improve(originalHash)
+ update(rootNode, next.key, next.value, originalHash, hash, 0)
+ }
+ case thatMap: Map[K, V] =>
+ thatMap.foreachEntry((key, value) => addOne(key, value))
+ case other =>
+ val it = other.iterator
+ while(it.hasNext) addOne(it.next())
+ }
+
+ this
+ }
+
+ override def clear(): Unit = {
+ aliased = null
+ if (rootNode.size > 0) {
+ rootNode = newEmptyRootNode
+ }
+ }
+
+ private[collection] def size: Int = rootNode.size
+
+ override def knownSize: Int = rootNode.size
+}
diff --git a/tests/pos-special/stdlib/collection/immutable/HashSet.scala b/tests/pos-special/stdlib/collection/immutable/HashSet.scala
new file mode 100644
index 000000000000..38f394a7005f
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/immutable/HashSet.scala
@@ -0,0 +1,2125 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package immutable
+
+import java.lang.Integer.{bitCount, numberOfTrailingZeros}
+import java.lang.System.arraycopy
+
+import scala.collection.Hashing.improve
+import scala.collection.Stepper.EfficientSplit
+import scala.collection.generic.DefaultSerializable
+import scala.collection.mutable.ReusableBuilder
+import scala.runtime.Statics.releaseFence
+import scala.util.hashing.MurmurHash3
+import language.experimental.captureChecking
+import scala.annotation.unchecked.uncheckedCaptures
+
+/** This class implements immutable sets using a Compressed Hash-Array Mapped Prefix-tree.
+ * See paper https://michael.steindorfer.name/publications/oopsla15.pdf for more details.
+ *
+ * @tparam A the type of the elements contained in this hash set.
+ * @define Coll `immutable.HashSet`
+ * @define coll immutable champ hash set
+ */
+final class HashSet[A] private[immutable](private[immutable] val rootNode: BitmapIndexedSetNode[A])
+ extends AbstractSet[A]
+ with StrictOptimizedSetOps[A, HashSet, HashSet[A]]
+ with IterableFactoryDefaults[A, HashSet]
+ with DefaultSerializable {
+
+ def this() = this(SetNode.empty)
+
+ // This release fence is present because rootNode may have previously been mutated during construction.
+ releaseFence()
+
+ private[this] def newHashSetOrThis(newRootNode: BitmapIndexedSetNode[A]): HashSet[A] =
+ if (rootNode eq newRootNode) this else new HashSet(newRootNode)
+
+ override def iterableFactory: IterableFactory[HashSet] = HashSet
+
+ override def knownSize: Int = rootNode.size
+
+ override def size: Int = rootNode.size
+
+ override def isEmpty: Boolean = rootNode.size == 0
+
+ def iterator: Iterator[A] = {
+ if (isEmpty) Iterator.empty
+ else new SetIterator[A](rootNode)
+ }
+
+ protected[immutable] def reverseIterator: Iterator[A] = new SetReverseIterator[A](rootNode)
+
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = {
+ import convert.impl._
+ val s = shape.shape match {
+ case StepperShape.IntShape => IntChampStepper.from[ SetNode[A]](size, rootNode, (node, i) => node.getPayload(i).asInstanceOf[Int])
+ case StepperShape.LongShape => LongChampStepper.from[ SetNode[A]](size, rootNode, (node, i) => node.getPayload(i).asInstanceOf[Long])
+ case StepperShape.DoubleShape => DoubleChampStepper.from[SetNode[A]](size, rootNode, (node, i) => node.getPayload(i).asInstanceOf[Double])
+ case _ => shape.parUnbox(AnyChampStepper.from[A, SetNode[A]](size, rootNode, (node, i) => node.getPayload(i)))
+ }
+ s.asInstanceOf[S with EfficientSplit]
+ }
+
+ def contains(element: A): Boolean = {
+ val elementUnimprovedHash = element.##
+ val elementHash = improve(elementUnimprovedHash)
+ rootNode.contains(element, elementUnimprovedHash, elementHash, 0)
+ }
+
+ def incl(element: A): HashSet[A] = {
+ val elementUnimprovedHash = element.##
+ val elementHash = improve(elementUnimprovedHash)
+ val newRootNode = rootNode.updated(element, elementUnimprovedHash, elementHash, 0)
+ newHashSetOrThis(newRootNode)
+ }
+
+ def excl(element: A): HashSet[A] = {
+ val elementUnimprovedHash = element.##
+ val elementHash = improve(elementUnimprovedHash)
+ val newRootNode = rootNode.removed(element, elementUnimprovedHash, elementHash, 0)
+ newHashSetOrThis(newRootNode)
+ }
+
+ override def concat(that: IterableOnce[A]): HashSet[A] =
+ that match {
+ case hs: HashSet[A] =>
+ if (isEmpty) hs
+ else {
+ val newNode = rootNode.concat(hs.rootNode, 0)
+ if (newNode eq hs.rootNode) hs
+ else newHashSetOrThis(newNode)
+ }
+ case hs: collection.mutable.HashSet[A] =>
+ val iter = hs.nodeIterator
+ var current = rootNode
+ while (iter.hasNext) {
+ val next = iter.next()
+ val originalHash = hs.unimproveHash(next.hash)
+ val improved = improve(originalHash)
+ current = current.updated(next.key, originalHash, improved, 0)
+
+ if (current ne rootNode) {
+ var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0))
+ while (iter.hasNext) {
+ val next = iter.next()
+ val originalHash = hs.unimproveHash(next.hash)
+ val improved = improve(originalHash)
+ shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, originalHash, improved, 0, shallowlyMutableNodeMap)
+ }
+ return new HashSet(current)
+ }
+ }
+ this
+ case lhs: collection.mutable.LinkedHashSet[A] =>
+ val iter = lhs.entryIterator
+ var current = rootNode
+ while (iter.hasNext) {
+ val next = iter.next()
+ val originalHash = lhs.unimproveHash(next.hash)
+ val improved = improve(originalHash)
+ current = current.updated(next.key, originalHash, improved, 0)
+
+ if (current ne rootNode) {
+ var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0))
+ while (iter.hasNext) {
+ val next = iter.next()
+ val originalHash = lhs.unimproveHash(next.hash)
+ val improved = improve(originalHash)
+ shallowlyMutableNodeMap = current.updateWithShallowMutations(next.key, originalHash, improved, 0, shallowlyMutableNodeMap)
+ }
+ return new HashSet(current)
+ }
+ }
+ this
+ case _ =>
+ val iter = that.iterator
+ var current = rootNode
+ while (iter.hasNext) {
+ val element = iter.next()
+ val originalHash = element.##
+ val improved = improve(originalHash)
+ current = current.updated(element, originalHash, improved, 0)
+
+ if (current ne rootNode) {
+ // Note: We could have started with shallowlyMutableNodeMap = 0, however this way, in the case that
+ // the first changed key ended up in a subnode beneath root, we mark that root right away as being
+ // shallowly mutable.
+ //
+ // since `element` has just been inserted, and certainly caused a new root node to be created, we can say with
+ // certainty that it either caused a new subnode to be created underneath `current`, in which case we should
+ // carry on mutating that subnode, or it ended up as a child data pair of the root, in which case, no harm is
+ // done by including its bit position in the shallowlyMutableNodeMap anyways.
+ var shallowlyMutableNodeMap = Node.bitposFrom(Node.maskFrom(improved, 0))
+ while (iter.hasNext) {
+ val element = iter.next()
+ val originalHash = element.##
+ val improved = improve(originalHash)
+ shallowlyMutableNodeMap = current.updateWithShallowMutations(element, originalHash, improved, 0, shallowlyMutableNodeMap)
+ }
+ return new HashSet(current)
+ }
+ }
+ this
+ }
+
+ override def tail: HashSet[A] = this - head
+
+ override def init: HashSet[A] = this - last
+
+ override def head: A = iterator.next()
+
+ override def last: A = reverseIterator.next()
+
+ override def foreach[U](f: A => U): Unit = rootNode.foreach(f)
+
+ /** Applies a function f to each element, and its corresponding **original** hash, in this Set */
+ @`inline` private[collection] def foreachWithHash(f: (A, Int) => Unit): Unit = rootNode.foreachWithHash(f)
+
+ /** Applies a function f to each element, and its corresponding **original** hash, in this Set
+ * Stops iterating the first time that f returns `false`.*/
+ @`inline` private[collection] def foreachWithHashWhile(f: (A, Int) => Boolean): Unit = rootNode.foreachWithHashWhile(f)
+
+ def subsetOf(that: Set[A]): Boolean = if (that.isEmpty) true else that match {
+ case set: HashSet[A] => rootNode.subsetOf(set.rootNode, 0)
+ case _ => super.subsetOf(that)
+ }
+
+ override def equals(that: Any): Boolean =
+ that match {
+ case set: HashSet[_] => (this eq set) || (this.rootNode == set.rootNode)
+ case _ => super.equals(that)
+ }
+
+ override protected[this] def className = "HashSet"
+
+ override def hashCode(): Int = {
+ val it = new SetHashIterator(rootNode)
+ val hash = MurmurHash3.unorderedHash(it, MurmurHash3.setSeed)
+ //assert(hash == super.hashCode())
+ hash
+ }
+
+ override def diff(that: collection.Set[A]): HashSet[A] = {
+ if (isEmpty) {
+ this
+ } else {
+ that match {
+ case hashSet: HashSet[A] =>
+ if (hashSet.isEmpty) this else {
+ val newRootNode = rootNode.diff(hashSet.rootNode, 0)
+ if (newRootNode.size == 0) HashSet.empty else newHashSetOrThis(rootNode.diff(hashSet.rootNode, 0))
+ }
+ case hashSet: collection.mutable.HashSet[A] =>
+ val iter = hashSet.nodeIterator
+ var curr = rootNode
+ while (iter.hasNext) {
+ val next = iter.next()
+ val originalHash = hashSet.unimproveHash(next.hash)
+ val improved = improve(originalHash)
+ curr = curr.removed(next.key, originalHash, improved, 0)
+ if (curr ne rootNode) {
+ if (curr.size == 0) {
+ return HashSet.empty
+ }
+ while (iter.hasNext) {
+ val next = iter.next()
+ val originalHash = hashSet.unimproveHash(next.hash)
+ val improved = improve(originalHash)
+
+ curr.removeWithShallowMutations(next.key, originalHash, improved)
+
+ if (curr.size == 0) {
+ return HashSet.empty
+ }
+ }
+ return new HashSet(curr)
+ }
+ }
+ this
+
+ case other =>
+ val thatKnownSize = other.knownSize
+
+ if (thatKnownSize == 0) {
+ this
+ } else if (thatKnownSize <= size) {
+ /* this branch intentionally includes the case of thatKnownSize == -1. We know that HashSets are quite fast at look-up, so
+ we're likely to be the faster of the two at that. */
+ removedAllWithShallowMutations(other)
+ } else {
+ // TODO: Develop more sophisticated heuristic for which branch to take
+ filterNot(other.contains)
+ }
+ }
+
+ }
+ }
+
+ /** Immutably removes all elements of `that` from this HashSet
+ *
+ * Mutation is used internally, but only on root SetNodes which this method itself creates.
+ *
+ * That is, this method is safe to call on published sets because it does not mutate `this`
+ */
+ private[this] def removedAllWithShallowMutations(that: IterableOnce[A]): HashSet[A] = {
+ val iter = that.iterator
+ var curr = rootNode
+ while (iter.hasNext) {
+ val next = iter.next()
+ val originalHash = next.##
+ val improved = improve(originalHash)
+ curr = curr.removed(next, originalHash, improved, 0)
+ if (curr ne rootNode) {
+ if (curr.size == 0) {
+ return HashSet.empty
+ }
+ while (iter.hasNext) {
+ val next = iter.next()
+ val originalHash = next.##
+ val improved = improve(originalHash)
+
+ curr.removeWithShallowMutations(next, originalHash, improved)
+
+ if (curr.size == 0) {
+ return HashSet.empty
+ }
+ }
+ return new HashSet(curr)
+ }
+ }
+ this
+ }
+
+ override def removedAll(that: IterableOnce[A]): HashSet[A] = that match {
+ case set: scala.collection.Set[A] => diff(set)
+ case range: Range if range.length > size =>
+ filter {
+ case i: Int => !range.contains(i)
+ case _ => true
+ }
+
+ case _ =>
+ removedAllWithShallowMutations(that)
+ }
+
+ override def partition(p: A => Boolean): (HashSet[A], HashSet[A]) = {
+ // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included
+ // in a minor release without breaking binary compatibility.
+ super.partition(p)
+ }
+
+ override def span(p: A => Boolean): (HashSet[A], HashSet[A]) = {
+ // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included
+ // in a minor release without breaking binary compatibility.
+ super.span(p)
+ }
+
+ override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): HashSet[A] = {
+ val newRootNode = rootNode.filterImpl(pred, isFlipped)
+ if (newRootNode eq rootNode) this
+ else if (newRootNode.size == 0) HashSet.empty
+ else new HashSet(newRootNode)
+ }
+
+ override def intersect(that: collection.Set[A]): HashSet[A] = {
+ // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included
+ // in a minor release without breaking binary compatibility.
+ super.intersect(that)
+ }
+
+ override def take(n: Int): HashSet[A] = {
+ // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included
+ // in a minor release without breaking binary compatibility.
+ super.take(n)
+ }
+
+ override def takeRight(n: Int): HashSet[A] = {
+ // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included
+ // in a minor release without breaking binary compatibility.
+ super.takeRight(n)
+ }
+
+ override def takeWhile(p: A => Boolean): HashSet[A] = {
+ // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included
+ // in a minor release without breaking binary compatibility.
+ super.takeWhile(p)
+ }
+
+ override def drop(n: Int): HashSet[A] = {
+ // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included
+ // in a minor release without breaking binary compatibility.
+ super.drop(n)
+ }
+
+ override def dropRight(n: Int): HashSet[A] = {
+ // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included
+ // in a minor release without breaking binary compatibility.
+ super.dropRight(n)
+ }
+
+ override def dropWhile(p: A => Boolean): HashSet[A] = {
+ // This method has been preemptively overridden in order to ensure that an optimizing implementation may be included
+ // in a minor release without breaking binary compatibility.
+ super.dropWhile(p)
+ }
+}
+
+private[immutable] object SetNode {
+
+ private final val EmptySetNode = new BitmapIndexedSetNode(0, 0, Array.empty, Array.empty, 0, 0)
+
+ def empty[A]: BitmapIndexedSetNode[A] = EmptySetNode.asInstanceOf[BitmapIndexedSetNode[A]]
+
+ final val TupleLength = 1
+
+}
+
+private[immutable] sealed abstract class SetNode[A] extends Node[SetNode[A]] {
+
+ def contains(element: A, originalHash: Int, hash: Int, shift: Int): Boolean
+
+ def updated(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A]
+
+ def removed(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A]
+
+ def hasNodes: Boolean
+
+ def nodeArity: Int
+
+ def getNode(index: Int): SetNode[A]
+
+ def hasPayload: Boolean
+
+ def payloadArity: Int
+
+ def getPayload(index: Int): A
+
+ def size: Int
+
+ def foreach[U](f: A => U): Unit
+
+ def subsetOf(that: SetNode[A], shift: Int): Boolean
+
+ def copy(): SetNode[A]
+
+ def filterImpl(pred: A => Boolean, flipped: Boolean): SetNode[A]
+
+ def diff(that: SetNode[A], shift: Int): SetNode[A]
+
+ def concat(that: SetNode[A], shift: Int): SetNode[A]
+
+ def foreachWithHash(f: (A, Int) => Unit): Unit
+
+ def foreachWithHashWhile(f: (A, Int) => Boolean): Boolean
+}
+
+private final class BitmapIndexedSetNode[A](
+ var dataMap: Int,
+ var nodeMap: Int,
+ var content: Array[Any],
+ var originalHashes: Array[Int],
+ var size: Int,
+ var cachedJavaKeySetHashCode: Int) extends SetNode[A] {
+
+ import Node._
+ import SetNode._
+
+ /*
+ assert(checkInvariantContentIsWellTyped())
+ assert(checkInvariantSubNodesAreCompacted())
+
+ private final def checkInvariantSubNodesAreCompacted(): Boolean =
+ new SetIterator[A](this).size - payloadArity >= 2 * nodeArity
+
+ private final def checkInvariantContentIsWellTyped(): Boolean = {
+ val predicate1 = TupleLength * payloadArity + nodeArity == content.length
+
+ val predicate2 = Range(0, TupleLength * payloadArity)
+ .forall(i => content(i).isInstanceOf[SetNode[_]] == false)
+
+ val predicate3 = Range(TupleLength * payloadArity, content.length)
+ .forall(i => content(i).isInstanceOf[SetNode[_]] == true)
+
+ predicate1 && predicate2 && predicate3
+ }
+ */
+
+ def getPayload(index: Int): A = content(index).asInstanceOf[A]
+
+ override def getHash(index: Int): Int = originalHashes(index)
+
+ def getNode(index: Int): SetNode[A] = content(content.length - 1 - index).asInstanceOf[SetNode[A]]
+
+ def contains(element: A, originalHash: Int, elementHash: Int, shift: Int): Boolean = {
+ val mask = maskFrom(elementHash, shift)
+ val bitpos = bitposFrom(mask)
+
+ if ((dataMap & bitpos) != 0) {
+ val index = indexFrom(dataMap, mask, bitpos)
+ return originalHashes(index) == originalHash && element == this.getPayload(index)
+ }
+
+ if ((nodeMap & bitpos) != 0) {
+ val index = indexFrom(nodeMap, mask, bitpos)
+ return this.getNode(index).contains(element, originalHash, elementHash, shift + BitPartitionSize)
+ }
+
+ false
+ }
+
+ def updated(element: A, originalHash: Int, elementHash: Int, shift: Int): BitmapIndexedSetNode[A] = {
+ val mask = maskFrom(elementHash, shift)
+ val bitpos = bitposFrom(mask)
+
+ if ((dataMap & bitpos) != 0) {
+ val index = indexFrom(dataMap, mask, bitpos)
+ val element0 = this.getPayload(index)
+
+ if (element0.asInstanceOf[AnyRef] eq element.asInstanceOf[AnyRef]) {
+ return this
+ } else {
+ val element0UnimprovedHash = getHash(index)
+ val element0Hash = improve(element0UnimprovedHash)
+ if (originalHash == element0UnimprovedHash && element0 == element) {
+ return this
+ } else {
+ val subNodeNew = mergeTwoKeyValPairs(element0, element0UnimprovedHash, element0Hash, element, originalHash, elementHash, shift + BitPartitionSize)
+ return copyAndMigrateFromInlineToNode(bitpos, element0Hash, subNodeNew)
+ }
+ }
+ }
+ if ((nodeMap & bitpos) != 0) {
+ val index = indexFrom(nodeMap, mask, bitpos)
+ val subNode = this.getNode(index)
+
+ val subNodeNew = subNode.updated(element, originalHash, elementHash, shift + BitPartitionSize)
+ if (subNode eq subNodeNew) {
+ return this
+ } else {
+ return copyAndSetNode(bitpos, subNode, subNodeNew)
+ }
+ }
+
+ copyAndInsertValue(bitpos, element, originalHash, elementHash)
+ }
+ /** A variant of `updated` which performs shallow mutations on the root (`this`), and if possible, on immediately
+ * descendant child nodes (only one level beneath `this`)
+ *
+ * The caller should pass a bitmap of child nodes of this node, which this method may mutate.
+ * If this method may mutate a child node, then if the updated value is located in that child node, it will
+ * be shallowly mutated (its children will not be mutated).
+ *
+ * If instead this method may not mutate the child node in which the to-be-updated value is located, then
+ * that child will be updated immutably, but the result will be mutably re-inserted as a child of this node.
+ *
+ * @param key the key to update
+ * @param originalHash key.##
+ * @param keyHash the improved hash
+ * @param shallowlyMutableNodeMap bitmap of child nodes of this node, which can be shallowly mutated
+ * during the call to this method
+ *
+ * @return Int which is the bitwise OR of shallowlyMutableNodeMap and any freshly created nodes, which will be
+ * available for mutations in subsequent calls.
+ */
+ def updateWithShallowMutations(element: A, originalHash: Int, elementHash: Int, shift: Int, shallowlyMutableNodeMap: Int): Int = {
+ val mask = maskFrom(elementHash, shift)
+ val bitpos = bitposFrom(mask)
+
+ if ((dataMap & bitpos) != 0) {
+ val index = indexFrom(dataMap, mask, bitpos)
+ val element0 = getPayload(index)
+ val element0UnimprovedHash = getHash(index)
+ if (element0UnimprovedHash == originalHash && element0 == element) {
+ shallowlyMutableNodeMap
+ } else {
+ val element0Hash = improve(element0UnimprovedHash)
+ val subNodeNew = mergeTwoKeyValPairs(element0, element0UnimprovedHash, element0Hash, element, originalHash, elementHash, shift + BitPartitionSize)
+ migrateFromInlineToNodeInPlace(bitpos, element0Hash, subNodeNew)
+ shallowlyMutableNodeMap | bitpos
+ }
+ } else if ((nodeMap & bitpos) != 0) {
+ val index = indexFrom(nodeMap, mask, bitpos)
+ val subNode = this.getNode(index)
+ val subNodeSize = subNode.size
+ val subNodeCachedJavaKeySetHashCode = subNode.cachedJavaKeySetHashCode
+
+ var returnNodeMap = shallowlyMutableNodeMap
+
+ val subNodeNew: SetNode[A] = subNode match {
+ case subNodeBm: BitmapIndexedSetNode[A] if (bitpos & shallowlyMutableNodeMap) != 0 =>
+ subNodeBm.updateWithShallowMutations(element, originalHash, elementHash, shift + BitPartitionSize, 0)
+ subNodeBm
+ case _ =>
+ val subNodeNew = subNode.updated(element, originalHash, elementHash, shift + BitPartitionSize)
+ if (subNodeNew ne subNode) {
+ returnNodeMap |= bitpos
+ }
+ subNodeNew
+ }
+
+ this.content(this.content.length - 1 - this.nodeIndex(bitpos)) = subNodeNew
+ this.size = this.size - subNodeSize + subNodeNew.size
+ this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - subNodeCachedJavaKeySetHashCode + subNodeNew.cachedJavaKeySetHashCode
+ returnNodeMap
+ } else {
+ val dataIx = dataIndex(bitpos)
+ val idx = dataIx
+
+ val src = this.content
+ val dst = new Array[Any](src.length + TupleLength)
+
+ // copy 'src' and insert 2 element(s) at position 'idx'
+ arraycopy(src, 0, dst, 0, idx)
+ dst(idx) = element
+ arraycopy(src, idx, dst, idx + TupleLength, src.length - idx)
+
+ val dstHashes = insertElement(originalHashes, dataIx, originalHash)
+
+ this.dataMap |= bitpos
+ this.content = dst
+ this.originalHashes = dstHashes
+ this.size += 1
+ this.cachedJavaKeySetHashCode += elementHash
+ shallowlyMutableNodeMap
+ }
+ }
+
+
+ def removed(element: A, originalHash: Int, elementHash: Int, shift: Int): BitmapIndexedSetNode[A] = {
+ val mask = maskFrom(elementHash, shift)
+ val bitpos = bitposFrom(mask)
+
+ if ((dataMap & bitpos) != 0) {
+ val index = indexFrom(dataMap, mask, bitpos)
+ val element0 = this.getPayload(index)
+
+ if (element0 == element) {
+ if (this.payloadArity == 2 && this.nodeArity == 0) {
+ /*
+ * Create new node with remaining pair. The new node will a) either become the new root
+ * returned, or b) unwrapped and inlined during returning.
+ */
+ val newDataMap = if (shift == 0) (dataMap ^ bitpos) else bitposFrom(maskFrom(elementHash, 0))
+ if (index == 0)
+ return new BitmapIndexedSetNode[A](newDataMap, 0, Array(getPayload(1)), Array(originalHashes(1)), size - 1, improve(originalHashes(1)))
+ else
+ return new BitmapIndexedSetNode[A](newDataMap, 0, Array(getPayload(0)), Array(originalHashes(0)), size - 1, improve(originalHashes(0)))
+ }
+ else return copyAndRemoveValue(bitpos, elementHash)
+ } else return this
+ }
+
+ if ((nodeMap & bitpos) != 0) {
+ val index = indexFrom(nodeMap, mask, bitpos)
+ val subNode = this.getNode(index)
+
+ val subNodeNew = subNode.removed(element, originalHash, elementHash, shift + BitPartitionSize)
+
+ if (subNodeNew eq subNode) return this
+
+ // cache just in case subNodeNew is a hashCollision node, in which in which case a little arithmetic is avoided
+ // in Vector#length
+ val subNodeNewSize = subNodeNew.size
+
+ if (subNodeNewSize == 1) {
+ if (this.size == subNode.size) {
+ // subNode is the only child (no other data or node children of `this` exist)
+ // escalate (singleton or empty) result
+ return subNodeNew.asInstanceOf[BitmapIndexedSetNode[A]]
+ } else {
+ // inline value (move to front)
+ return copyAndMigrateFromNodeToInline(bitpos, elementHash, subNode, subNodeNew)
+ }
+ } else if (subNodeNewSize > 1) {
+ // modify current node (set replacement node)
+ return copyAndSetNode(bitpos, subNode, subNodeNew)
+ }
+ }
+
+ this
+ }
+ /** Variant of `removed` which will perform mutation on only the top-level node (`this`), rather than return a new
+ * node
+ *
+ * Should only be called on root nodes, because shift is assumed to be 0
+ *
+ * @param element the element to remove
+ * @param originalHash the original hash of `element`
+ * @param elementHash the improved hash of `element`
+ */
+ def removeWithShallowMutations(element: A, originalHash: Int, elementHash: Int): this.type = {
+ val mask = maskFrom(elementHash, 0)
+ val bitpos = bitposFrom(mask)
+
+ if ((dataMap & bitpos) != 0) {
+ val index = indexFrom(dataMap, mask, bitpos)
+ val element0 = this.getPayload(index)
+
+ if (element0 == element) {
+ if (this.payloadArity == 2 && this.nodeArity == 0) {
+ val newDataMap = dataMap ^ bitpos
+ if (index == 0) {
+ val newContent = Array[Any](getPayload(1))
+ val newOriginalHashes = Array(originalHashes(1))
+ val newCachedJavaKeySetHashCode = improve(getHash(1))
+ this.content = newContent
+ this.originalHashes = newOriginalHashes
+ this.cachedJavaKeySetHashCode = newCachedJavaKeySetHashCode
+ } else {
+ val newContent = Array[Any](getPayload(0))
+ val newOriginalHashes = Array(originalHashes(0))
+ val newCachedJavaKeySetHashCode = improve(getHash(0))
+ this.content = newContent
+ this.originalHashes = newOriginalHashes
+ this.cachedJavaKeySetHashCode = newCachedJavaKeySetHashCode
+ }
+ this.dataMap = newDataMap
+ this.nodeMap = 0
+ this.size = 1
+ this
+ }
+ else {
+ val dataIx = dataIndex(bitpos)
+ val idx = TupleLength * dataIx
+
+ val src = this.content
+ val dst = new Array[Any](src.length - TupleLength)
+
+ arraycopy(src, 0, dst, 0, idx)
+ arraycopy(src, idx + TupleLength, dst, idx, src.length - idx - TupleLength)
+
+ val dstHashes = removeElement(originalHashes, dataIx)
+
+ this.dataMap = this.dataMap ^ bitpos
+ this.content = dst
+ this.originalHashes = dstHashes
+ this.size -= 1
+ this.cachedJavaKeySetHashCode -= elementHash
+ this
+ }
+ } else this
+ } else if ((nodeMap & bitpos) != 0) {
+ val index = indexFrom(nodeMap, mask, bitpos)
+ val subNode = this.getNode(index)
+
+ val subNodeNew = subNode.removed(element, originalHash, elementHash, BitPartitionSize).asInstanceOf[BitmapIndexedSetNode[A]]
+
+ if (subNodeNew eq subNode) return this
+
+ if (subNodeNew.size == 1) {
+ if (this.payloadArity == 0 && this.nodeArity == 1) {
+ this.dataMap = subNodeNew.dataMap
+ this.nodeMap = subNodeNew.nodeMap
+ this.content = subNodeNew.content
+ this.originalHashes = subNodeNew.originalHashes
+ this.size = subNodeNew.size
+ this.cachedJavaKeySetHashCode = subNodeNew.cachedJavaKeySetHashCode
+ this
+ } else {
+ migrateFromNodeToInlineInPlace(bitpos, originalHash, elementHash, subNode, subNodeNew)
+ this
+ }
+ } else {
+ // size must be > 1
+ this.content(this.content.length - 1 - this.nodeIndex(bitpos)) = subNodeNew
+ this.size -= 1
+ this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - subNode.cachedJavaKeySetHashCode + subNodeNew.cachedJavaKeySetHashCode
+ this
+ }
+ } else this
+ }
+
+ def mergeTwoKeyValPairs(key0: A, originalKeyHash0: Int, keyHash0: Int, key1: A, originalKeyHash1: Int, keyHash1: Int, shift: Int): SetNode[A] = {
+ // assert(key0 != key1)
+
+ if (shift >= HashCodeLength) {
+ new HashCollisionSetNode[A](originalKeyHash0, keyHash0, Vector(key0, key1))
+ } else {
+ val mask0 = maskFrom(keyHash0, shift)
+ val mask1 = maskFrom(keyHash1, shift)
+
+ if (mask0 != mask1) {
+ // unique prefixes, payload fits on same level
+ val dataMap = bitposFrom(mask0) | bitposFrom(mask1)
+ val newCachedHashCode = keyHash0 + keyHash1
+
+ if (mask0 < mask1) {
+ new BitmapIndexedSetNode[A](dataMap, 0, Array(key0, key1), Array(originalKeyHash0, originalKeyHash1), 2, newCachedHashCode)
+ } else {
+ new BitmapIndexedSetNode[A](dataMap, 0, Array(key1, key0), Array(originalKeyHash1, originalKeyHash0), 2, newCachedHashCode)
+ }
+ } else {
+ // identical prefixes, payload must be disambiguated deeper in the trie
+ val nodeMap = bitposFrom(mask0)
+ val node = mergeTwoKeyValPairs(key0, originalKeyHash0, keyHash0, key1, originalKeyHash1, keyHash1, shift + BitPartitionSize)
+
+ new BitmapIndexedSetNode[A](0, nodeMap, Array(node), Array.emptyIntArray, node.size, node.cachedJavaKeySetHashCode)
+ }
+ }
+ }
+
+ def hasPayload: Boolean = dataMap != 0
+
+ def payloadArity: Int = bitCount(dataMap)
+
+ def hasNodes: Boolean = nodeMap != 0
+
+ def nodeArity: Int = bitCount(nodeMap)
+
+ def dataIndex(bitpos: Int) = bitCount(dataMap & (bitpos - 1))
+
+ def nodeIndex(bitpos: Int) = bitCount(nodeMap & (bitpos - 1))
+
+ def copyAndSetNode(bitpos: Int, oldNode: SetNode[A], newNode: SetNode[A]) = {
+ val idx = this.content.length - 1 - this.nodeIndex(bitpos)
+
+ val src = this.content
+ val dst = new Array[Any](src.length)
+
+ // copy 'src' and set 1 element(s) at position 'idx'
+ arraycopy(src, 0, dst, 0, src.length)
+ dst(idx) = newNode
+ new BitmapIndexedSetNode[A](
+ dataMap = dataMap,
+ nodeMap = nodeMap,
+ content = dst,
+ originalHashes = originalHashes,
+ size = size - oldNode.size + newNode.size,
+ cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + newNode.cachedJavaKeySetHashCode
+ )
+ }
+
+ def copyAndInsertValue(bitpos: Int, key: A, originalHash: Int, elementHash: Int) = {
+ val dataIx = dataIndex(bitpos)
+ val idx = TupleLength * dataIx
+
+ val src = this.content
+ val dst = new Array[Any](src.length + 1)
+
+ // copy 'src' and insert 1 element(s) at position 'idx'
+ arraycopy(src, 0, dst, 0, idx)
+ dst(idx) = key
+ arraycopy(src, idx, dst, idx + 1, src.length - idx)
+ val dstHashes = insertElement(originalHashes, dataIx, originalHash)
+
+ new BitmapIndexedSetNode[A](dataMap | bitpos, nodeMap, dst, dstHashes, size + 1, cachedJavaKeySetHashCode + elementHash)
+ }
+
+ def copyAndSetValue(bitpos: Int, key: A, originalHash: Int, elementHash: Int) = {
+ val dataIx = dataIndex(bitpos)
+ val idx = TupleLength * dataIx
+
+ val src = this.content
+ val dst = new Array[Any](src.length)
+
+ // copy 'src' and set 1 element(s) at position 'idx'
+ arraycopy(src, 0, dst, 0, src.length)
+ dst(idx) = key
+
+ new BitmapIndexedSetNode[A](dataMap | bitpos, nodeMap, dst, originalHashes, size, cachedJavaKeySetHashCode)
+ }
+
+ def copyAndRemoveValue(bitpos: Int, elementHash: Int) = {
+ val dataIx = dataIndex(bitpos)
+ val idx = TupleLength * dataIx
+
+ val src = this.content
+ val dst = new Array[Any](src.length - 1)
+
+ // copy 'src' and remove 1 element(s) at position 'idx'
+ arraycopy(src, 0, dst, 0, idx)
+ arraycopy(src, idx + 1, dst, idx, src.length - idx - 1)
+ val dstHashes = removeElement(originalHashes, dataIx)
+ new BitmapIndexedSetNode[A](dataMap ^ bitpos, nodeMap, dst, dstHashes, size - 1, cachedJavaKeySetHashCode - elementHash)
+ }
+
+ def copyAndMigrateFromInlineToNode(bitpos: Int, elementHash: Int, node: SetNode[A]) = {
+ val dataIx = dataIndex(bitpos)
+ val idxOld = TupleLength * dataIx
+ val idxNew = this.content.length - TupleLength - nodeIndex(bitpos)
+
+ val src = this.content
+ val dst = new Array[Any](src.length - 1 + 1)
+
+ // copy 'src' and remove 1 element(s) at position 'idxOld' and
+ // insert 1 element(s) at position 'idxNew'
+ // assert(idxOld <= idxNew)
+ arraycopy(src, 0, dst, 0, idxOld)
+ arraycopy(src, idxOld + 1, dst, idxOld, idxNew - idxOld)
+ dst(idxNew) = node
+ arraycopy(src, idxNew + 1, dst, idxNew + 1, src.length - idxNew - 1)
+ val dstHashes = removeElement(originalHashes, dataIx)
+ new BitmapIndexedSetNode[A](
+ dataMap = dataMap ^ bitpos,
+ nodeMap = nodeMap | bitpos,
+ content = dst, originalHashes = dstHashes,
+ size = size - 1 + node.size,
+ cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - elementHash + node.cachedJavaKeySetHashCode
+ )
+ }
+ /** Variant of `copyAndMigrateFromInlineToNode` which mutates `this` rather than returning a new node.
+ *
+ * Note: This method will mutate `this`, and will mutate `this.content`
+ *
+ * Mutation of `this.content` will occur as an optimization not possible in maps. Since TupleLength == 1 for sets,
+ * content array size does not change during inline <-> node migrations. Therefor, since we are updating in-place,
+ * we reuse this.content by shifting data/nodes around, rather than allocating a new array.
+ *
+ * @param bitpos the bit position of the data to migrate to node
+ * @param keyHash the improved hash of the element currently at `bitpos`
+ * @param node the node to place at `bitpos`
+ */
+ def migrateFromInlineToNodeInPlace(bitpos: Int, keyHash: Int, node: SetNode[A]): this.type = {
+ val dataIx = dataIndex(bitpos)
+ val idxOld = TupleLength * dataIx
+ val idxNew = this.content.length - TupleLength - nodeIndex(bitpos)
+
+ arraycopy(content, idxOld + TupleLength, content, idxOld, idxNew - idxOld)
+ content(idxNew) = node
+
+ this.dataMap = this.dataMap ^ bitpos
+ this.nodeMap = this.nodeMap | bitpos
+ this.originalHashes = removeElement(originalHashes, dataIx)
+ this.size = this.size - 1 + node.size
+ this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - keyHash + node.cachedJavaKeySetHashCode
+ this
+ }
+
+ def copyAndMigrateFromNodeToInline(bitpos: Int, elementHash: Int, oldNode: SetNode[A], node: SetNode[A]) = {
+ val idxOld = this.content.length - 1 - nodeIndex(bitpos)
+ val dataIxNew = dataIndex(bitpos)
+ val idxNew = TupleLength * dataIxNew
+
+ val src = this.content
+ val dst = new Array[Any](src.length - 1 + 1)
+
+ // copy 'src' and remove 1 element(s) at position 'idxOld' and
+ // insert 1 element(s) at position 'idxNew'
+ // assert(idxOld >= idxNew)
+ arraycopy(src, 0, dst, 0, idxNew)
+ dst(idxNew) = node.getPayload(0)
+ arraycopy(src, idxNew, dst, idxNew + 1, idxOld - idxNew)
+ arraycopy(src, idxOld + 1, dst, idxOld + 1, src.length - idxOld - 1)
+ val hash = node.getHash(0)
+ val dstHashes = insertElement(originalHashes, dataIxNew, hash)
+ new BitmapIndexedSetNode[A](
+ dataMap = dataMap | bitpos,
+ nodeMap = nodeMap ^ bitpos,
+ content = dst,
+ originalHashes = dstHashes,
+ size = size - oldNode.size + 1,
+ cachedJavaKeySetHashCode = cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + node.cachedJavaKeySetHashCode
+ )
+ }
+
+ /** Variant of `copyAndMigrateFromNodeToInline` which mutates `this` rather than returning a new node.
+ *
+ * Note: This method will mutate `this`, and will mutate `this.content`
+ *
+ * Mutation of `this.content` will occur as an optimization not possible in maps. Since TupleLength == 1 for sets,
+ * content array size does not change during inline <-> node migrations. Therefor, since we are updating in-place,
+ * we reuse this.content by shifting data/nodes around, rather than allocating a new array.
+ *
+ * @param bitpos the bit position of the node to migrate inline
+ * @param oldNode the node currently stored at position `bitpos`
+ * @param node the node containing the single element to migrate inline
+ */
+ def migrateFromNodeToInlineInPlace(bitpos: Int, originalHash: Int, elementHash: Int, oldNode: SetNode[A], node: SetNode[A]): Unit = {
+ val idxOld = this.content.length - 1 - nodeIndex(bitpos)
+ val dataIxNew = dataIndex(bitpos)
+ val element = node.getPayload(0)
+ arraycopy(content, dataIxNew, content, dataIxNew + 1, idxOld - dataIxNew)
+ content(dataIxNew) = element
+ val hash = node.getHash(0)
+ val dstHashes = insertElement(originalHashes, dataIxNew, hash)
+
+ this.dataMap = this.dataMap | bitpos
+ this.nodeMap = this.nodeMap ^ bitpos
+ this.originalHashes = dstHashes
+ this.size = this.size - oldNode.size + 1
+ this.cachedJavaKeySetHashCode = this.cachedJavaKeySetHashCode - oldNode.cachedJavaKeySetHashCode + node.cachedJavaKeySetHashCode
+ }
+
+ def foreach[U](f: A => U): Unit = {
+ val thisPayloadArity = payloadArity
+ var i = 0
+ while (i < thisPayloadArity) {
+ f(getPayload(i))
+ i += 1
+ }
+
+ val thisNodeArity = nodeArity
+ var j = 0
+ while (j < thisNodeArity) {
+ getNode(j).foreach(f)
+ j += 1
+ }
+ }
+
+ def subsetOf(that: SetNode[A], shift: Int): Boolean = if (this eq that) true else that match {
+ case _: HashCollisionSetNode[A] => false
+ case node: BitmapIndexedSetNode[A] =>
+ val thisBitmap = this.dataMap | this.nodeMap
+ val nodeBitmap = node.dataMap | node.nodeMap
+
+ if ((thisBitmap | nodeBitmap) != nodeBitmap)
+ return false
+
+ var bitmap = thisBitmap & nodeBitmap
+ var bitsToSkip = numberOfTrailingZeros(bitmap)
+
+ var isValidSubset = true
+ while (isValidSubset && bitsToSkip < HashCodeLength) {
+ val bitpos = bitposFrom(bitsToSkip)
+
+ isValidSubset =
+ if ((this.dataMap & bitpos) != 0) {
+ if ((node.dataMap & bitpos) != 0) {
+ // Data x Data
+ val payload0 = this.getPayload(indexFrom(this.dataMap, bitpos))
+ val payload1 = node.getPayload(indexFrom(node.dataMap, bitpos))
+ payload0 == payload1
+ } else {
+ // Data x Node
+ val thisDataIndex = indexFrom(this.dataMap, bitpos)
+ val payload = this.getPayload(thisDataIndex)
+ val subNode = that.getNode(indexFrom(node.nodeMap, bitpos))
+ val elementUnimprovedHash = getHash(thisDataIndex)
+ val elementHash = improve(elementUnimprovedHash)
+ subNode.contains(payload, elementUnimprovedHash, elementHash, shift + BitPartitionSize)
+ }
+ } else {
+ // Node x Node
+ val subNode0 = this.getNode(indexFrom(this.nodeMap, bitpos))
+ val subNode1 = node.getNode(indexFrom(node.nodeMap, bitpos))
+ subNode0.subsetOf(subNode1, shift + BitPartitionSize)
+ }
+
+ val newBitmap = bitmap ^ bitpos
+ bitmap = newBitmap
+ bitsToSkip = numberOfTrailingZeros(newBitmap)
+ }
+ isValidSubset
+ }
+
+ override def filterImpl(pred: A => Boolean, flipped: Boolean): BitmapIndexedSetNode[A] = {
+ if (size == 0) this
+ else if (size == 1) {
+ if (pred(getPayload(0)) != flipped) this else SetNode.empty
+ } else if (nodeMap == 0) {
+ // Performance optimization for nodes of depth 1:
+ //
+ // this node has no "node" children, all children are inlined data elems, therefor logic is significantly simpler
+ // approach:
+ // * traverse the content array, accumulating in `newDataMap: Int` any bit positions of keys which pass the filter
+ // * (bitCount(newDataMap) * TupleLength) tells us the new content array and originalHashes array size, so now perform allocations
+ // * traverse the content array once more, placing each passing element (according to `newDatamap`) in the new content and originalHashes arrays
+ //
+ // note:
+ // * this optimization significantly improves performance of not only small trees, but also larger trees, since
+ // even non-root nodes are affected by this improvement, and large trees will consist of many nodes as
+ // descendants
+ //
+ val minimumIndex: Int = Integer.numberOfTrailingZeros(dataMap)
+ val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(dataMap)
+
+ var newDataMap = 0
+ var newCachedHashCode = 0
+ var dataIndex = 0
+
+ var i = minimumIndex
+
+ while(i < maximumIndex) {
+ val bitpos = bitposFrom(i)
+
+ if ((bitpos & dataMap) != 0) {
+ val payload = getPayload(dataIndex)
+ val passed = pred(payload) != flipped
+
+ if (passed) {
+ newDataMap |= bitpos
+ newCachedHashCode += improve(getHash(dataIndex))
+ }
+
+ dataIndex += 1
+ }
+
+ i += 1
+ }
+
+ if (newDataMap == 0) {
+ SetNode.empty
+ } else if (newDataMap == dataMap) {
+ this
+ } else {
+ val newSize = Integer.bitCount(newDataMap)
+ val newContent = new Array[Any](newSize)
+ val newOriginalHashCodes = new Array[Int](newSize)
+ val newMaximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(newDataMap)
+
+ var j = Integer.numberOfTrailingZeros(newDataMap)
+
+ var newDataIndex = 0
+
+ while (j < newMaximumIndex) {
+ val bitpos = bitposFrom(j)
+ if ((bitpos & newDataMap) != 0) {
+ val oldIndex = indexFrom(dataMap, bitpos)
+ newContent(newDataIndex) = content(oldIndex)
+ newOriginalHashCodes(newDataIndex) = originalHashes(oldIndex)
+ newDataIndex += 1
+ }
+ j += 1
+ }
+
+ new BitmapIndexedSetNode(newDataMap, 0, newContent, newOriginalHashCodes, newSize, newCachedHashCode)
+ }
+ } else {
+ val allMap = dataMap | nodeMap
+ val minimumIndex: Int = Integer.numberOfTrailingZeros(allMap)
+ val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap)
+
+ var oldDataPassThrough = 0
+
+ // bitmap of nodes which, when filtered, returned a single-element node. These must be migrated to data
+ var nodeMigrateToDataTargetMap = 0
+
+ // TODO: When filtering results in a single-elem node, simply `(A, originalHash, improvedHash)` could be returned,
+ // rather than a singleton node (to avoid pointlessly allocating arrays, nodes, which would just be inlined in
+ // the parent anyways). This would probably involve changing the return type of filterImpl to `AnyRef` which may
+ // return at runtime a SetNode[A], or a tuple of (A, Int, Int)
+
+ // the queue of single-element, post-filter nodes
+ var nodesToMigrateToData: mutable.Queue[SetNode[A]] = null
+
+ // bitmap of all nodes which, when filtered, returned themselves. They are passed forward to the returned node
+ var nodesToPassThroughMap = 0
+
+ // bitmap of any nodes which, after being filtered, returned a node that is not empty, but also not `eq` itself
+ // These are stored for later inclusion into the final `content` array
+ // not named `newNodesMap` (plural) to avoid confusion with `newNodeMap` (singular)
+ var mapOfNewNodes = 0
+ // each bit in `mapOfNewNodes` corresponds to one element in this queue
+ var newNodes: mutable.Queue[SetNode[A]] = null
+
+ var newDataMap = 0
+ var newNodeMap = 0
+ var newSize = 0
+ var newCachedHashCode = 0
+
+ var dataIndex = 0
+ var nodeIndex = 0
+
+ var i = minimumIndex
+ while (i < maximumIndex) {
+ val bitpos = bitposFrom(i)
+
+ if ((bitpos & dataMap) != 0) {
+ val payload = getPayload(dataIndex)
+ val passed = pred(payload) != flipped
+
+ if (passed) {
+ newDataMap |= bitpos
+ oldDataPassThrough |= bitpos
+ newSize += 1
+ newCachedHashCode += improve(getHash(dataIndex))
+ }
+
+ dataIndex += 1
+ } else if ((bitpos & nodeMap) != 0) {
+ val oldSubNode = getNode(nodeIndex)
+ val newSubNode = oldSubNode.filterImpl(pred, flipped)
+
+ newSize += newSubNode.size
+ newCachedHashCode += newSubNode.cachedJavaKeySetHashCode
+
+ // if (newSubNode.size == 0) do nothing (drop it)
+ if (newSubNode.size > 1) {
+ newNodeMap |= bitpos
+ if (oldSubNode eq newSubNode) {
+ nodesToPassThroughMap |= bitpos
+ } else {
+ mapOfNewNodes |= bitpos
+ if (newNodes eq null) {
+ newNodes = mutable.Queue.empty[SetNode[A] @uncheckedCaptures]
+ }
+ newNodes += newSubNode
+ }
+ } else if (newSubNode.size == 1) {
+ newDataMap |= bitpos
+ nodeMigrateToDataTargetMap |= bitpos
+ if (nodesToMigrateToData eq null) {
+ nodesToMigrateToData = mutable.Queue.empty[SetNode[A] @uncheckedCaptures]
+ }
+ nodesToMigrateToData += newSubNode
+ }
+
+ nodeIndex += 1
+ }
+
+ i += 1
+ }
+
+ this.newNodeFrom(
+ newSize = newSize,
+ newDataMap = newDataMap,
+ newNodeMap = newNodeMap,
+ minimumIndex = minimumIndex,
+ oldDataPassThrough = oldDataPassThrough,
+ nodesToPassThroughMap = nodesToPassThroughMap,
+ nodeMigrateToDataTargetMap = nodeMigrateToDataTargetMap,
+ nodesToMigrateToData = nodesToMigrateToData,
+ mapOfNewNodes = mapOfNewNodes,
+ newNodes = newNodes,
+ newCachedHashCode = newCachedHashCode
+ )
+ }
+ }
+
+ override def diff(that: SetNode[A], shift: Int): BitmapIndexedSetNode[A] = that match {
+ case bm: BitmapIndexedSetNode[A] =>
+ if (size == 0) this
+ else if (size == 1) {
+ val h = getHash(0)
+ if (that.contains(getPayload(0), h, improve(h), shift)) SetNode.empty else this
+ } else {
+ val allMap = dataMap | nodeMap
+ val minimumIndex: Int = Integer.numberOfTrailingZeros(allMap)
+ val maximumIndex: Int = Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap)
+
+ var oldDataPassThrough = 0
+
+ // bitmap of nodes which, when filtered, returned a single-element node. These must be migrated to data
+ var nodeMigrateToDataTargetMap = 0
+ // the queue of single-element, post-filter nodes
+ var nodesToMigrateToData: mutable.Queue[SetNode[A]] = null
+
+ // bitmap of all nodes which, when filtered, returned themselves. They are passed forward to the returned node
+ var nodesToPassThroughMap = 0
+
+ // bitmap of any nodes which, after being filtered, returned a node that is not empty, but also not `eq` itself
+ // These are stored for later inclusion into the final `content` array
+ // not named `newNodesMap` (plural) to avoid confusion with `newNodeMap` (singular)
+ var mapOfNewNodes = 0
+ // each bit in `mapOfNewNodes` corresponds to one element in this queue
+ var newNodes: mutable.Queue[SetNode[A]] = null
+
+ var newDataMap = 0
+ var newNodeMap = 0
+ var newSize = 0
+ var newCachedHashCode = 0
+
+ var dataIndex = 0
+ var nodeIndex = 0
+
+ var i = minimumIndex
+ while (i < maximumIndex) {
+ val bitpos = bitposFrom(i)
+
+ if ((bitpos & dataMap) != 0) {
+ val payload = getPayload(dataIndex)
+ val originalHash = getHash(dataIndex)
+ val hash = improve(originalHash)
+
+ if (!bm.contains(payload, originalHash, hash, shift)) {
+ newDataMap |= bitpos
+ oldDataPassThrough |= bitpos
+ newSize += 1
+ newCachedHashCode += hash
+ }
+
+ dataIndex += 1
+ } else if ((bitpos & nodeMap) != 0) {
+ val oldSubNode = getNode(nodeIndex)
+
+ val newSubNode: SetNode[A] =
+ if ((bitpos & bm.dataMap) != 0) {
+ val thatDataIndex = indexFrom(bm.dataMap, bitpos)
+ val thatPayload = bm.getPayload(thatDataIndex)
+ val thatOriginalHash = bm.getHash(thatDataIndex)
+ val thatHash = improve(thatOriginalHash)
+ oldSubNode.removed(thatPayload, thatOriginalHash, thatHash, shift + BitPartitionSize)
+ } else if ((bitpos & bm.nodeMap) != 0) {
+ oldSubNode.diff(bm.getNode(indexFrom(bm.nodeMap, bitpos)), shift + BitPartitionSize)
+ } else {
+ oldSubNode
+ }
+
+ newSize += newSubNode.size
+ newCachedHashCode += newSubNode.cachedJavaKeySetHashCode
+
+ // if (newSubNode.size == 0) do nothing (drop it)
+ if (newSubNode.size > 1) {
+ newNodeMap |= bitpos
+ if (oldSubNode eq newSubNode) {
+ nodesToPassThroughMap |= bitpos
+ } else {
+ mapOfNewNodes |= bitpos
+ if (newNodes eq null) {
+ newNodes = mutable.Queue.empty[SetNode[A] @uncheckedCaptures]
+ }
+ newNodes += newSubNode
+ }
+ } else if (newSubNode.size == 1) {
+ newDataMap |= bitpos
+ nodeMigrateToDataTargetMap |= bitpos
+ if (nodesToMigrateToData eq null) {
+ nodesToMigrateToData = mutable.Queue.empty[SetNode[A] @uncheckedCaptures]
+ }
+ nodesToMigrateToData += newSubNode
+ }
+
+ nodeIndex += 1
+ }
+
+ i += 1
+ }
+ this.newNodeFrom(
+ newSize = newSize,
+ newDataMap = newDataMap,
+ newNodeMap = newNodeMap,
+ minimumIndex = minimumIndex,
+ oldDataPassThrough = oldDataPassThrough,
+ nodesToPassThroughMap = nodesToPassThroughMap,
+ nodeMigrateToDataTargetMap = nodeMigrateToDataTargetMap,
+ nodesToMigrateToData = nodesToMigrateToData,
+ mapOfNewNodes = mapOfNewNodes,
+ newNodes = newNodes,
+ newCachedHashCode = newCachedHashCode
+ )
+ }
+ case _: HashCollisionSetNode[A] =>
+ // this branch should never happen, because HashCollisionSetNodes and BitMapIndexedSetNodes do not occur at the
+ // same depth
+ throw new RuntimeException("BitmapIndexedSetNode diff HashCollisionSetNode")
+ }
+
+ /** Utility method only for use in `diff` and `filterImpl`
+ *
+ * @param newSize the size of the new SetNode
+ * @param newDataMap the dataMap of the new SetNode
+ * @param newNodeMap the nodeMap of the new SetNode
+ * @param minimumIndex the minimum index (in range of [0, 31]) for which there are sub-nodes or data beneath the new
+ * SetNode
+ * @param oldDataPassThrough bitmap representing all the data that are just passed from `this` to the new
+ * SetNode
+ * @param nodesToPassThroughMap bitmap representing all nodes that are just passed from `this` to the new SetNode
+ * @param nodeMigrateToDataTargetMap bitmap representing all positions which will now be data in the new SetNode,
+ * but which were nodes in `this`
+ * @param nodesToMigrateToData a queue (in order of child position) of single-element nodes, which will be migrated
+ * to data, in positions in the `nodeMigrateToDataTargetMap`
+ * @param mapOfNewNodes bitmap of positions of new nodes to include in the new SetNode
+ * @param newNodes queue in order of child position, of all new nodes to include in the new SetNode
+ * @param newCachedHashCode the cached java keyset hashcode of the new SetNode
+ */
+ private[this] def newNodeFrom(
+ newSize: Int,
+ newDataMap: Int,
+ newNodeMap: Int,
+ minimumIndex: Int,
+ oldDataPassThrough: Int,
+ nodesToPassThroughMap: Int,
+ nodeMigrateToDataTargetMap: Int,
+ nodesToMigrateToData: mutable.Queue[SetNode[A]],
+ mapOfNewNodes: Int,
+ newNodes: mutable.Queue[SetNode[A]],
+ newCachedHashCode: Int): BitmapIndexedSetNode[A] = {
+ if (newSize == 0) {
+ SetNode.empty
+ } else if (newSize == size) {
+ this
+ } else {
+ val newDataSize = bitCount(newDataMap)
+ val newContentSize = newDataSize + bitCount(newNodeMap)
+ val newContent = new Array[Any](newContentSize)
+ val newOriginalHashes = new Array[Int](newDataSize)
+
+ val newAllMap = newDataMap | newNodeMap
+ val maxIndex = Node.BranchingFactor - Integer.numberOfLeadingZeros(newAllMap)
+
+ // note: We MUST start from the minimum index in the old (`this`) node, otherwise `old{Node,Data}Index` will
+ // not be incremented properly. Otherwise we could have started at Integer.numberOfTrailingZeroes(newAllMap)
+ var i = minimumIndex
+
+ var oldDataIndex = 0
+ var oldNodeIndex = 0
+
+ var newDataIndex = 0
+ var newNodeIndex = 0
+
+ while (i < maxIndex) {
+ val bitpos = bitposFrom(i)
+
+ if ((bitpos & oldDataPassThrough) != 0) {
+ newContent(newDataIndex) = getPayload(oldDataIndex)
+ newOriginalHashes(newDataIndex) = getHash(oldDataIndex)
+ newDataIndex += 1
+ oldDataIndex += 1
+ } else if ((bitpos & nodesToPassThroughMap) != 0) {
+ newContent(newContentSize - newNodeIndex - 1) = getNode(oldNodeIndex)
+ newNodeIndex += 1
+ oldNodeIndex += 1
+ } else if ((bitpos & nodeMigrateToDataTargetMap) != 0) {
+ // we need not check for null here. If nodeMigrateToDataTargetMap != 0, then nodesMigrateToData must not be null
+ val node = nodesToMigrateToData.dequeue()
+ newContent(newDataIndex) = node.getPayload(0)
+ newOriginalHashes(newDataIndex) = node.getHash(0)
+ newDataIndex += 1
+ oldNodeIndex += 1
+ } else if ((bitpos & mapOfNewNodes) != 0) {
+ // we need not check for null here. If mapOfNewNodes != 0, then newNodes must not be null
+ newContent(newContentSize - newNodeIndex - 1) = newNodes.dequeue()
+ newNodeIndex += 1
+ oldNodeIndex += 1
+ } else if ((bitpos & dataMap) != 0) {
+ oldDataIndex += 1
+ } else if ((bitpos & nodeMap) != 0) {
+ oldNodeIndex += 1
+ }
+
+ i += 1
+ }
+
+ new BitmapIndexedSetNode[A](newDataMap, newNodeMap, newContent, newOriginalHashes, newSize, newCachedHashCode)
+ }
+ }
+
+
+ override def equals(that: Any): Boolean =
+ that match {
+ case node: BitmapIndexedSetNode[_] =>
+ (this eq node) ||
+ (this.cachedJavaKeySetHashCode == node.cachedJavaKeySetHashCode) &&
+ (this.nodeMap == node.nodeMap) &&
+ (this.dataMap == node.dataMap) &&
+ (this.size == node.size) &&
+ java.util.Arrays.equals(this.originalHashes, node.originalHashes) &&
+ deepContentEquality(this.content, node.content, content.length)
+ case _ => false
+ }
+
+ @`inline` private def deepContentEquality(a1: Array[Any], a2: Array[Any], length: Int): Boolean = {
+ if (a1 eq a2)
+ true
+ else {
+ var isEqual = true
+ var i = 0
+
+ while (isEqual && i < length) {
+ isEqual = a1(i) == a2(i)
+ i += 1
+ }
+
+ isEqual
+ }
+ }
+
+ override def hashCode(): Int =
+ throw new UnsupportedOperationException("Trie nodes do not support hashing.")
+
+ override def copy(): BitmapIndexedSetNode[A] = {
+ val contentClone = content.clone()
+ val contentLength = contentClone.length
+ var i = bitCount(dataMap)
+ while (i < contentLength) {
+ contentClone(i) = contentClone(i).asInstanceOf[SetNode[A]].copy()
+ i += 1
+ }
+ new BitmapIndexedSetNode[A](dataMap, nodeMap, contentClone, originalHashes.clone(), size, cachedJavaKeySetHashCode)
+ }
+
+ override def concat(that: SetNode[A], shift: Int): BitmapIndexedSetNode[A] = that match {
+ case bm: BitmapIndexedSetNode[A] =>
+ if (size == 0) return bm
+ else if (bm.size == 0 || (bm eq this)) return this
+ else if (bm.size == 1) {
+ val originalHash = bm.getHash(0)
+ return this.updated(bm.getPayload(0), originalHash, improve(originalHash), shift)
+ }
+
+ // if we go through the merge and the result does not differ from `this`, we can just return `this`, to improve sharing
+ // So, `anyChangesMadeSoFar` will be set to `true` as soon as we encounter a difference between the
+ // currently-being-computed result, and `this`
+ var anyChangesMadeSoFar = false
+
+ // bitmap containing `1` in any position that has any descendant in either left or right, either data or node
+ val allMap = dataMap | bm.dataMap | nodeMap | bm.nodeMap
+
+ // minimumIndex is inclusive -- it is the first index for which there is data or nodes
+ val minimumBitPos: Int = Node.bitposFrom(Integer.numberOfTrailingZeros(allMap))
+ // maximumIndex is inclusive -- it is the last index for which there is data or nodes
+ // it could not be exclusive, because then upper bound in worst case (Node.BranchingFactor) would be out-of-bound
+ // of int bitposition representation
+ val maximumBitPos: Int = Node.bitposFrom(Node.BranchingFactor - Integer.numberOfLeadingZeros(allMap) - 1)
+
+ var leftNodeRightNode = 0
+ var leftDataRightNode = 0
+ var leftNodeRightData = 0
+ var leftDataOnly = 0
+ var rightDataOnly = 0
+ var leftNodeOnly = 0
+ var rightNodeOnly = 0
+ var leftDataRightDataMigrateToNode = 0
+ var leftDataRightDataLeftOverwrites = 0
+
+ var dataToNodeMigrationTargets = 0
+
+ {
+ var bitpos = minimumBitPos
+ var leftIdx = 0
+ var rightIdx = 0
+ var finished = false
+
+ while (!finished) {
+
+ if ((bitpos & dataMap) != 0) {
+ if ((bitpos & bm.dataMap) != 0) {
+ if (getHash(leftIdx) == bm.getHash(rightIdx) && getPayload(leftIdx) == bm.getPayload(rightIdx)) {
+ leftDataRightDataLeftOverwrites |= bitpos
+ } else {
+ leftDataRightDataMigrateToNode |= bitpos
+ dataToNodeMigrationTargets |= Node.bitposFrom(Node.maskFrom(improve(getHash(leftIdx)), shift))
+ }
+ rightIdx += 1
+ } else if ((bitpos & bm.nodeMap) != 0) {
+ leftDataRightNode |= bitpos
+ } else {
+ leftDataOnly |= bitpos
+ }
+ leftIdx += 1
+ } else if ((bitpos & nodeMap) != 0) {
+ if ((bitpos & bm.dataMap) != 0) {
+ leftNodeRightData |= bitpos
+ rightIdx += 1
+ } else if ((bitpos & bm.nodeMap) != 0) {
+ leftNodeRightNode |= bitpos
+ } else {
+ leftNodeOnly |= bitpos
+ }
+ } else if ((bitpos & bm.dataMap) != 0) {
+ rightDataOnly |= bitpos
+ rightIdx += 1
+ } else if ((bitpos & bm.nodeMap) != 0) {
+ rightNodeOnly |= bitpos
+ }
+
+ if (bitpos == maximumBitPos) {
+ finished = true
+ } else {
+ bitpos = bitpos << 1
+ }
+ }
+ }
+
+
+ val newDataMap = leftDataOnly | rightDataOnly | leftDataRightDataLeftOverwrites
+
+ val newNodeMap =
+ leftNodeRightNode |
+ leftDataRightNode |
+ leftNodeRightData |
+ leftNodeOnly |
+ rightNodeOnly |
+ dataToNodeMigrationTargets
+
+
+ if ((newDataMap == (leftDataOnly | leftDataRightDataLeftOverwrites)) && (newNodeMap == leftNodeOnly)) {
+ // nothing from `bm` will make it into the result -- return early
+ return this
+ }
+
+ val newDataSize = bitCount(newDataMap)
+ val newContentSize = newDataSize + bitCount(newNodeMap)
+
+ val newContent = new Array[Any](newContentSize)
+ val newOriginalHashes = new Array[Int](newDataSize)
+ var newSize = 0
+ var newCachedHashCode = 0
+
+ {
+ var leftDataIdx = 0
+ var rightDataIdx = 0
+ var leftNodeIdx = 0
+ var rightNodeIdx = 0
+
+ val nextShift = shift + Node.BitPartitionSize
+
+ var compressedDataIdx = 0
+ var compressedNodeIdx = 0
+
+ var bitpos = minimumBitPos
+ var finished = false
+
+ while (!finished) {
+
+ if ((bitpos & leftNodeRightNode) != 0) {
+ val leftNode = getNode(leftNodeIdx)
+ val newNode = leftNode.concat(bm.getNode(rightNodeIdx), nextShift)
+ if (leftNode ne newNode) {
+ anyChangesMadeSoFar = true
+ }
+ newContent(newContentSize - compressedNodeIdx - 1) = newNode
+ compressedNodeIdx += 1
+ rightNodeIdx += 1
+ leftNodeIdx += 1
+ newSize += newNode.size
+ newCachedHashCode += newNode.cachedJavaKeySetHashCode
+
+ } else if ((bitpos & leftDataRightNode) != 0) {
+ anyChangesMadeSoFar = true
+ val newNode = {
+ val n = bm.getNode(rightNodeIdx)
+ val leftPayload = getPayload(leftDataIdx)
+ val leftOriginalHash = getHash(leftDataIdx)
+ val leftImproved = improve(leftOriginalHash)
+ n.updated(leftPayload, leftOriginalHash, leftImproved, nextShift)
+ }
+
+ newContent(newContentSize - compressedNodeIdx - 1) = newNode
+ compressedNodeIdx += 1
+ rightNodeIdx += 1
+ leftDataIdx += 1
+ newSize += newNode.size
+ newCachedHashCode += newNode.cachedJavaKeySetHashCode
+ }
+ else if ((bitpos & leftNodeRightData) != 0) {
+ val newNode = {
+ val rightOriginalHash = bm.getHash(rightDataIdx)
+ val leftNode = getNode(leftNodeIdx)
+ val updated = leftNode.updated(
+ element = bm.getPayload(rightDataIdx),
+ originalHash = bm.getHash(rightDataIdx),
+ hash = improve(rightOriginalHash),
+ shift = nextShift
+ )
+ if (updated ne leftNode) {
+ anyChangesMadeSoFar = true
+ }
+ updated
+ }
+
+ newContent(newContentSize - compressedNodeIdx - 1) = newNode
+ compressedNodeIdx += 1
+ leftNodeIdx += 1
+ rightDataIdx += 1
+ newSize += newNode.size
+ newCachedHashCode += newNode.cachedJavaKeySetHashCode
+
+ } else if ((bitpos & leftDataOnly) != 0) {
+ val originalHash = originalHashes(leftDataIdx)
+ newContent(compressedDataIdx) = getPayload(leftDataIdx).asInstanceOf[AnyRef]
+ newOriginalHashes(compressedDataIdx) = originalHash
+
+ compressedDataIdx += 1
+ leftDataIdx += 1
+ newSize += 1
+ newCachedHashCode += improve(originalHash)
+ } else if ((bitpos & rightDataOnly) != 0) {
+ anyChangesMadeSoFar = true
+ val originalHash = bm.originalHashes(rightDataIdx)
+ newContent(compressedDataIdx) = bm.getPayload(rightDataIdx).asInstanceOf[AnyRef]
+ newOriginalHashes(compressedDataIdx) = originalHash
+
+ compressedDataIdx += 1
+ rightDataIdx += 1
+ newSize += 1
+ newCachedHashCode += improve(originalHash)
+ } else if ((bitpos & leftNodeOnly) != 0) {
+ val newNode = getNode(leftNodeIdx)
+ newContent(newContentSize - compressedNodeIdx - 1) = newNode
+ compressedNodeIdx += 1
+ leftNodeIdx += 1
+ newSize += newNode.size
+ newCachedHashCode += newNode.cachedJavaKeySetHashCode
+ } else if ((bitpos & rightNodeOnly) != 0) {
+ anyChangesMadeSoFar = true
+ val newNode = bm.getNode(rightNodeIdx)
+ newContent(newContentSize - compressedNodeIdx - 1) = newNode
+ compressedNodeIdx += 1
+ rightNodeIdx += 1
+ newSize += newNode.size
+ newCachedHashCode += newNode.cachedJavaKeySetHashCode
+ } else if ((bitpos & leftDataRightDataMigrateToNode) != 0) {
+ anyChangesMadeSoFar = true
+ val newNode = {
+ val leftOriginalHash = getHash(leftDataIdx)
+ val rightOriginalHash = bm.getHash(rightDataIdx)
+
+ bm.mergeTwoKeyValPairs(
+ getPayload(leftDataIdx), leftOriginalHash, improve(leftOriginalHash),
+ bm.getPayload(rightDataIdx), rightOriginalHash, improve(rightOriginalHash),
+ nextShift
+ )
+ }
+
+ newContent(newContentSize - compressedNodeIdx - 1) = newNode
+ compressedNodeIdx += 1
+ leftDataIdx += 1
+ rightDataIdx += 1
+ newSize += newNode.size
+ newCachedHashCode += newNode.cachedJavaKeySetHashCode
+ } else if ((bitpos & leftDataRightDataLeftOverwrites) != 0) {
+ val originalHash = bm.originalHashes(rightDataIdx)
+ newContent(compressedDataIdx) = bm.getPayload(rightDataIdx).asInstanceOf[AnyRef]
+ newOriginalHashes(compressedDataIdx) = originalHash
+
+ compressedDataIdx += 1
+ rightDataIdx += 1
+ newSize += 1
+ newCachedHashCode += improve(originalHash)
+ leftDataIdx += 1
+ }
+
+ if (bitpos == maximumBitPos) {
+ finished = true
+ } else {
+ bitpos = bitpos << 1
+ }
+ }
+ }
+
+ if (anyChangesMadeSoFar)
+ new BitmapIndexedSetNode(
+ dataMap = newDataMap,
+ nodeMap = newNodeMap,
+ content = newContent,
+ originalHashes = newOriginalHashes,
+ size = newSize,
+ cachedJavaKeySetHashCode = newCachedHashCode
+ )
+ else this
+
+ case _ =>
+ // should never happen -- hash collisions are never at the same level as bitmapIndexedSetNodes
+ throw new UnsupportedOperationException("Cannot concatenate a HashCollisionSetNode with a BitmapIndexedSetNode")
+ }
+
+ override def foreachWithHash(f: (A, Int) => Unit): Unit = {
+ val iN = payloadArity // arity doesn't change during this operation
+ var i = 0
+ while (i < iN) {
+ f(getPayload(i), getHash(i))
+ i += 1
+ }
+
+ val jN = nodeArity // arity doesn't change during this operation
+ var j = 0
+ while (j < jN) {
+ getNode(j).foreachWithHash(f)
+ j += 1
+ }
+ }
+
+ override def foreachWithHashWhile(f: (A, Int) => Boolean): Boolean = {
+ val thisPayloadArity = payloadArity
+ var pass = true
+ var i = 0
+ while (i < thisPayloadArity && pass) {
+ pass &&= f(getPayload(i), getHash(i))
+ i += 1
+ }
+
+ val thisNodeArity = nodeArity
+ var j = 0
+ while (j < thisNodeArity && pass) {
+ pass &&= getNode(j).foreachWithHashWhile(f)
+ j += 1
+ }
+ pass
+ }
+}
+
+private final class HashCollisionSetNode[A](val originalHash: Int, val hash: Int, var content: Vector[A] @uncheckedCaptures) extends SetNode[A] {
+
+ import Node._
+
+ require(content.length >= 2)
+
+ def contains(element: A, originalHash: Int, hash: Int, shift: Int): Boolean =
+ this.hash == hash && content.contains(element)
+
+ def updated(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] =
+ if (this.contains(element, originalHash, hash, shift)) {
+ this
+ } else {
+ new HashCollisionSetNode[A](originalHash, hash, content.appended(element))
+ }
+
+ /**
+ * Remove an element from the hash collision node.
+ *
+ * When after deletion only one element remains, we return a bit-mapped indexed node with a
+ * singleton element and a hash-prefix for trie level 0. This node will be then a) either become
+ * the new root, or b) unwrapped and inlined deeper in the trie.
+ */
+ def removed(element: A, originalHash: Int, hash: Int, shift: Int): SetNode[A] =
+ if (!this.contains(element, originalHash, hash, shift)) {
+ this
+ } else {
+ val updatedContent = content.filterNot(element0 => element0 == element)
+ // assert(updatedContent.size == content.size - 1)
+
+ updatedContent.size match {
+ case 1 => new BitmapIndexedSetNode[A](bitposFrom(maskFrom(hash, 0)), 0, Array(updatedContent(0)), Array(originalHash), 1, hash)
+ case _ => new HashCollisionSetNode[A](originalHash, hash, updatedContent)
+ }
+ }
+
+ def hasNodes: Boolean = false
+
+ def nodeArity: Int = 0
+
+ def getNode(index: Int): SetNode[A] =
+ throw new IndexOutOfBoundsException("No sub-nodes present in hash-collision leaf node.")
+
+ def hasPayload: Boolean = true
+
+ def payloadArity: Int = content.length
+
+ def getPayload(index: Int): A = content(index)
+
+ override def getHash(index: Int): Int = originalHash
+
+ def size: Int = content.length
+
+ def foreach[U](f: A => U): Unit = {
+ val iter = content.iterator
+ while (iter.hasNext) {
+ f(iter.next())
+ }
+ }
+
+
+ override def cachedJavaKeySetHashCode: Int = size * hash
+
+ def subsetOf(that: SetNode[A], shift: Int): Boolean = if (this eq that) true else that match {
+ case node: HashCollisionSetNode[A] =>
+ this.payloadArity <= node.payloadArity && this.content.forall(node.content.contains)
+ case _ =>
+ false
+ }
+
+ override def filterImpl(pred: A => Boolean, flipped: Boolean): SetNode[A] = {
+ val newContent = content.filterImpl(pred, flipped)
+ val newContentLength = newContent.length
+ if (newContentLength == 0) {
+ SetNode.empty
+ } else if (newContentLength == 1) {
+ new BitmapIndexedSetNode[A](bitposFrom(maskFrom(hash, 0)), 0, Array(newContent.head), Array(originalHash), 1, hash)
+ } else if (newContent.length == content.length) this
+ else new HashCollisionSetNode(originalHash, hash, newContent)
+ }
+
+ override def diff(that: SetNode[A], shift: Int): SetNode[A] =
+ filterImpl(that.contains(_, originalHash, hash, shift), true)
+
+ override def equals(that: Any): Boolean =
+ that match {
+ case node: HashCollisionSetNode[_] =>
+ (this eq node) ||
+ (this.hash == node.hash) &&
+ (this.content.size == node.content.size) &&
+ this.content.forall(node.content.contains)
+ case _ => false
+ }
+
+ override def hashCode(): Int =
+ throw new UnsupportedOperationException("Trie nodes do not support hashing.")
+
+ override def copy() = new HashCollisionSetNode[A](originalHash, hash, content)
+
+ override def concat(that: SetNode[A], shift: Int): SetNode[A] = that match {
+ case hc: HashCollisionSetNode[A] =>
+ if (hc eq this) {
+ this
+ } else {
+ var newContent: VectorBuilder[A] = null
+ val iter = hc.content.iterator
+ while (iter.hasNext) {
+ val nextPayload = iter.next()
+ if (!content.contains(nextPayload)) {
+ if (newContent eq null) {
+ newContent = new VectorBuilder()
+ newContent.addAll(this.content)
+ }
+ newContent.addOne(nextPayload)
+ }
+ }
+ if (newContent eq null) this else new HashCollisionSetNode(originalHash, hash, newContent.result())
+ }
+ case _: BitmapIndexedSetNode[A] =>
+ // should never happen -- hash collisions are never at the same level as bitmapIndexedSetNodes
+ throw new UnsupportedOperationException("Cannot concatenate a HashCollisionSetNode with a BitmapIndexedSetNode")
+ }
+
+ override def foreachWithHash(f: (A, Int) => Unit): Unit = {
+ val iter = content.iterator
+ while (iter.hasNext) {
+ val next = iter.next()
+ f(next.asInstanceOf[A], originalHash)
+ }
+ }
+
+ override def foreachWithHashWhile(f: (A, Int) => Boolean): Boolean = {
+ var stillGoing = true
+ val iter = content.iterator
+ while (iter.hasNext && stillGoing) {
+ val next = iter.next()
+ stillGoing &&= f(next.asInstanceOf[A], originalHash)
+ }
+ stillGoing
+ }
+}
+
+private final class SetIterator[A](rootNode: SetNode[A])
+ extends ChampBaseIterator[SetNode[A]](rootNode) with Iterator[A] {
+
+ def next() = {
+ if (!hasNext)
+ throw new NoSuchElementException
+
+ val payload = currentValueNode.getPayload(currentValueCursor)
+ currentValueCursor += 1
+
+ payload
+ }
+
+}
+
+private final class SetReverseIterator[A](rootNode: SetNode[A])
+ extends ChampBaseReverseIterator[SetNode[A]](rootNode) with Iterator[A] {
+
+ def next(): A = {
+ if (!hasNext)
+ throw new NoSuchElementException
+
+ val payload = currentValueNode.getPayload(currentValueCursor)
+ currentValueCursor -= 1
+
+ payload
+ }
+
+}
+
+private final class SetHashIterator[A](rootNode: SetNode[A])
+ extends ChampBaseIterator[SetNode[A]](rootNode) with Iterator[AnyRef] {
+ private[this] var hash = 0
+ override def hashCode(): Int = hash
+
+ def next(): AnyRef = {
+ if (!hasNext)
+ throw new NoSuchElementException
+
+ hash = currentValueNode.getHash(currentValueCursor)
+ currentValueCursor += 1
+ this
+ }
+
+}
+
+
+/**
+ * $factoryInfo
+ *
+ * @define Coll `immutable.HashSet`
+ * @define coll immutable champ hash set
+ */
+@SerialVersionUID(3L)
+object HashSet extends IterableFactory[HashSet] {
+
+ @transient
+ private final val EmptySet = new HashSet(SetNode.empty)
+
+ def empty[A]: HashSet[A] =
+ EmptySet.asInstanceOf[HashSet[A]]
+
+ def from[A](source: collection.IterableOnce[A]^): HashSet[A] =
+ source match {
+ case hs: HashSet[A] => hs
+ case _ if source.knownSize == 0 => empty[A]
+ case _ => (newBuilder[A] ++= source).result()
+ }
+
+ /** Create a new Builder which can be reused after calling `result()` without an
+ * intermediate call to `clear()` in order to build multiple related results.
+ */
+ def newBuilder[A]: ReusableBuilder[A, HashSet[A]] = new HashSetBuilder
+}
+
+/** Builder for HashSet.
+ * $multipleResults
+ */
+private[collection] final class HashSetBuilder[A] extends ReusableBuilder[A, HashSet[A]] {
+ import Node._
+ import SetNode._
+
+ private def newEmptyRootNode = new BitmapIndexedSetNode[A](0, 0, Array.emptyObjectArray.asInstanceOf[Array[Any]], Array.emptyIntArray, 0, 0)
+
+ /** The last given out HashSet as a return value of `result()`, if any, otherwise null.
+ * Indicates that on next add, the elements should be copied to an identical structure, before continuing
+ * mutations. */
+ private var aliased: HashSet[A] @uncheckedCaptures = _
+
+ private def isAliased: Boolean = aliased != null
+
+ /** The root node of the partially build hashmap */
+ private var rootNode: BitmapIndexedSetNode[A] @uncheckedCaptures = newEmptyRootNode
+
+ /** Inserts element `elem` into array `as` at index `ix`, shifting right the trailing elems */
+ private def insertElement(as: Array[Int], ix: Int, elem: Int): Array[Int] = {
+ if (ix < 0) throw new ArrayIndexOutOfBoundsException
+ if (ix > as.length) throw new ArrayIndexOutOfBoundsException
+ val result = new Array[Int](as.length + 1)
+ arraycopy(as, 0, result, 0, ix)
+ result(ix) = elem
+ arraycopy(as, ix, result, ix + 1, as.length - ix)
+ result
+ }
+
+ /** Inserts key-value into the bitmapIndexMapNode. Requires that this is a new key-value pair */
+ private def insertValue[A1 >: A](bm: BitmapIndexedSetNode[A], bitpos: Int, key: A, originalHash: Int, keyHash: Int): Unit = {
+ val dataIx = bm.dataIndex(bitpos)
+ val idx = TupleLength * dataIx
+
+ val src = bm.content
+ val dst = new Array[Any](src.length + TupleLength)
+
+ // copy 'src' and insert 2 element(s) at position 'idx'
+ arraycopy(src, 0, dst, 0, idx)
+ dst(idx) = key
+ arraycopy(src, idx, dst, idx + TupleLength, src.length - idx)
+
+ val dstHashes = insertElement(bm.originalHashes, dataIx, originalHash)
+
+ bm.dataMap = bm.dataMap | bitpos
+ bm.content = dst
+ bm.originalHashes = dstHashes
+ bm.size += 1
+ bm.cachedJavaKeySetHashCode += keyHash
+ }
+
+ /** Mutates `bm` to replace inline data at bit position `bitpos` with updated key/value */
+ private def setValue[A1 >: A](bm: BitmapIndexedSetNode[A], bitpos: Int, elem: A): Unit = {
+ val dataIx = bm.dataIndex(bitpos)
+ val idx = TupleLength * dataIx
+ bm.content(idx) = elem
+ }
+
+ def update(setNode: SetNode[A], element: A, originalHash: Int, elementHash: Int, shift: Int): Unit =
+ setNode match {
+ case bm: BitmapIndexedSetNode[A] =>
+ val mask = maskFrom(elementHash, shift)
+ val bitpos = bitposFrom(mask)
+
+ if ((bm.dataMap & bitpos) != 0) {
+ val index = indexFrom(bm.dataMap, mask, bitpos)
+ val element0 = bm.getPayload(index)
+ val element0UnimprovedHash = bm.getHash(index)
+
+ if (element0UnimprovedHash == originalHash && element0 == element) {
+ setValue(bm, bitpos, element0)
+ } else {
+ val element0Hash = improve(element0UnimprovedHash)
+ val subNodeNew = bm.mergeTwoKeyValPairs(element0, element0UnimprovedHash, element0Hash, element, originalHash, elementHash, shift + BitPartitionSize)
+ bm.migrateFromInlineToNodeInPlace(bitpos, element0Hash, subNodeNew)
+ }
+ } else if ((bm.nodeMap & bitpos) != 0) {
+ val index = indexFrom(bm.nodeMap, mask, bitpos)
+ val subNode = bm.getNode(index)
+ val beforeSize = subNode.size
+ val beforeHashCode = subNode.cachedJavaKeySetHashCode
+ update(subNode, element, originalHash, elementHash, shift + BitPartitionSize)
+ bm.size += subNode.size - beforeSize
+ bm.cachedJavaKeySetHashCode += subNode.cachedJavaKeySetHashCode - beforeHashCode
+ } else {
+ insertValue(bm, bitpos, element, originalHash, elementHash)
+ }
+ case hc: HashCollisionSetNode[A] =>
+ val index = hc.content.indexOf(element)
+ if (index < 0) {
+ hc.content = hc.content.appended(element)
+ } else {
+ hc.content = hc.content.updated(index, element)
+ }
+ }
+
+ /** If currently referencing aliased structure, copy elements to new mutable structure */
+ private def ensureUnaliased():Unit = {
+ if (isAliased) copyElems()
+ aliased = null
+ }
+
+ /** Copy elements to new mutable structure */
+ private def copyElems(): Unit = {
+ rootNode = rootNode.copy()
+ }
+
+ override def result(): HashSet[A] =
+ if (rootNode.size == 0) {
+ HashSet.empty
+ } else if (aliased != null) {
+ aliased
+ } else {
+ aliased = new HashSet(rootNode)
+ releaseFence()
+ aliased
+ }
+
+ override def addOne(elem: A): this.type = {
+ ensureUnaliased()
+ val h = elem.##
+ val im = improve(h)
+ update(rootNode, elem, h, im, 0)
+ this
+ }
+
+ override def addAll(xs: IterableOnce[A]^) = {
+ ensureUnaliased()
+ xs match {
+ case hm: HashSet[A] =>
+ new ChampBaseIterator[SetNode[A]](hm.rootNode) {
+ while(hasNext) {
+ val originalHash = currentValueNode.getHash(currentValueCursor)
+ update(
+ setNode = rootNode,
+ element = currentValueNode.getPayload(currentValueCursor),
+ originalHash = originalHash,
+ elementHash = improve(originalHash),
+ shift = 0
+ )
+ currentValueCursor += 1
+ }
+ }.asInstanceOf // !!! cc gets confused with representation of capture sets in invariant position
+ case other =>
+ val it = other.iterator
+ while(it.hasNext) addOne(it.next())
+ }
+
+ this
+ }
+
+ override def clear(): Unit = {
+ aliased = null
+ if (rootNode.size > 0) {
+ // if rootNode is empty, we will not have given it away anyways, we instead give out the reused Set.empty
+ rootNode = newEmptyRootNode
+ }
+ }
+
+ private[collection] def size: Int = rootNode.size
+
+ override def knownSize: Int = rootNode.size
+}
diff --git a/tests/pos-special/stdlib/collection/immutable/IntMap.scala b/tests/pos-special/stdlib/collection/immutable/IntMap.scala
new file mode 100644
index 000000000000..d7077845b845
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/immutable/IntMap.scala
@@ -0,0 +1,504 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+package immutable
+
+import scala.collection.generic.{BitOperations, DefaultSerializationProxy}
+import scala.collection.mutable.{Builder, ImmutableBuilder}
+import scala.annotation.tailrec
+import scala.annotation.unchecked.uncheckedVariance
+import scala.language.implicitConversions
+import language.experimental.captureChecking
+import scala.annotation.unchecked.uncheckedCaptures
+
+/** Utility class for integer maps.
+ */
+private[immutable] object IntMapUtils extends BitOperations.Int {
+ def branchMask(i: Int, j: Int) = highestOneBit(i ^ j)
+
+ def join[T](p1: Int, t1: IntMap[T], p2: Int, t2: IntMap[T]): IntMap[T] = {
+ val m = branchMask(p1, p2)
+ val p = mask(p1, m)
+ if (zero(p1, m)) IntMap.Bin(p, m, t1, t2)
+ else IntMap.Bin(p, m, t2, t1)
+ }
+
+ def bin[T](prefix: Int, mask: Int, left: IntMap[T], right: IntMap[T]): IntMap[T] = (left, right) match {
+ case (left, IntMap.Nil) => left
+ case (IntMap.Nil, right) => right
+ case (left, right) => IntMap.Bin(prefix, mask, left, right)
+ }
+}
+
+import IntMapUtils._
+
+/** A companion object for integer maps.
+ *
+ * @define Coll `IntMap`
+ */
+object IntMap {
+ def empty[T] : IntMap[T] = IntMap.Nil
+
+ def singleton[T](key: Int, value: T): IntMap[T] = IntMap.Tip(key, value)
+
+ def apply[T](elems: (Int, T)*): IntMap[T] =
+ elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2))
+
+ def from[V](coll: IterableOnce[(Int, V)]^): IntMap[V] =
+ newBuilder[V].addAll(coll).result()
+
+ private[immutable] case object Nil extends IntMap[Nothing] {
+ // Important! Without this equals method in place, an infinite
+ // loop from Map.equals => size => pattern-match-on-Nil => equals
+ // develops. Case objects and custom equality don't mix without
+ // careful handling.
+ override def equals(that : Any) = that match {
+ case _: this.type => true
+ case _: IntMap[_] => false // The only empty IntMaps are eq Nil
+ case _ => super.equals(that)
+ }
+ }
+
+ private[immutable] case class Tip[+T](key: Int, value: T) extends IntMap[T]{
+ def withValue[S](s: S) =
+ if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[IntMap.Tip[S]]
+ else IntMap.Tip(key, s)
+ }
+
+ private[immutable] case class Bin[+T](prefix: Int, mask: Int, left: IntMap[T], right: IntMap[T]) extends IntMap[T] {
+ def bin[S](left: IntMap[S], right: IntMap[S]): IntMap[S] = {
+ if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[IntMap.Bin[S]]
+ else IntMap.Bin[S](prefix, mask, left, right)
+ }
+ }
+
+ def newBuilder[V]: Builder[(Int, V), IntMap[V]] =
+ new ImmutableBuilder[(Int, V), IntMap[V]](empty) {
+ def addOne(elem: (Int, V)): this.type = { elems = elems + elem; this }
+ }
+
+ implicit def toFactory[V](dummy: IntMap.type): Factory[(Int, V), IntMap[V]] = ToFactory.asInstanceOf[Factory[(Int, V), IntMap[V]]]
+
+ @SerialVersionUID(3L)
+ private[this] object ToFactory extends Factory[(Int, AnyRef), IntMap[AnyRef]] with Serializable {
+ def fromSpecific(it: IterableOnce[(Int, AnyRef)]^): IntMap[AnyRef] = IntMap.from[AnyRef](it)
+ def newBuilder: Builder[(Int, AnyRef), IntMap[AnyRef]] = IntMap.newBuilder[AnyRef]
+ }
+
+ implicit def toBuildFrom[V](factory: IntMap.type): BuildFrom[Any, (Int, V), IntMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Int, V), IntMap[V]]]
+ private[this] object ToBuildFrom extends BuildFrom[Any, (Int, AnyRef), IntMap[AnyRef]] {
+ def fromSpecific(from: Any)(it: IterableOnce[(Int, AnyRef)]^) = IntMap.from(it)
+ def newBuilder(from: Any) = IntMap.newBuilder[AnyRef]
+ }
+
+ implicit def iterableFactory[V]: Factory[(Int, V), IntMap[V]] = toFactory(this)
+ implicit def buildFromIntMap[V]: BuildFrom[IntMap[_], (Int, V), IntMap[V]] = toBuildFrom(this)
+}
+
+// Iterator over a non-empty IntMap.
+private[immutable] abstract class IntMapIterator[V, T](it: IntMap[V]) extends AbstractIterator[T] {
+
+ // Basically this uses a simple stack to emulate conversion over the tree. However
+ // because we know that Ints are at least 32 bits we can have at most 32 IntMap.Bins and
+ // one IntMap.Tip sitting on the tree at any point. Therefore we know the maximum stack
+ // depth is 33 and
+ var index = 0
+ var buffer = new Array[AnyRef](33)
+
+ def pop = {
+ index -= 1
+ buffer(index).asInstanceOf[IntMap[V]]
+ }
+
+ def push(x: IntMap[V]): Unit = {
+ buffer(index) = x.asInstanceOf[AnyRef]
+ index += 1
+ }
+ push(it)
+
+ /**
+ * What value do we assign to a tip?
+ */
+ def valueOf(tip: IntMap.Tip[V]): T
+
+ def hasNext = index != 0
+ @tailrec
+ final def next(): T =
+ pop match {
+ case IntMap.Bin(_,_, t@IntMap.Tip(_, _), right) => {
+ push(right)
+ valueOf(t)
+ }
+ case IntMap.Bin(_, _, left, right) => {
+ push(right)
+ push(left)
+ next()
+ }
+ case t@IntMap.Tip(_, _) => valueOf(t)
+ // This should never happen. We don't allow IntMap.Nil in subtrees of the IntMap
+ // and don't return an IntMapIterator for IntMap.Nil.
+ case IntMap.Nil => throw new IllegalStateException("Empty maps not allowed as subtrees")
+ }
+}
+
+private[immutable] class IntMapEntryIterator[V](it: IntMap[V]) extends IntMapIterator[V, (Int, V)](it) {
+ def valueOf(tip: IntMap.Tip[V]) = (tip.key, tip.value)
+}
+
+private[immutable] class IntMapValueIterator[V](it: IntMap[V]) extends IntMapIterator[V, V](it) {
+ def valueOf(tip: IntMap.Tip[V]) = tip.value
+}
+
+private[immutable] class IntMapKeyIterator[V](it: IntMap[V]) extends IntMapIterator[V, Int](it) {
+ def valueOf(tip: IntMap.Tip[V]) = tip.key
+}
+
+import IntMap._
+
+/** Specialised immutable map structure for integer keys, based on
+ * [[https://ittc.ku.edu/~andygill/papers/IntMap98.pdf Fast Mergeable Integer Maps]]
+ * by Okasaki and Gill. Essentially a trie based on binary digits of the integers.
+ *
+ * '''Note:''' This class is as of 2.8 largely superseded by HashMap.
+ *
+ * @tparam T type of the values associated with integer keys.
+ *
+ * @define Coll `immutable.IntMap`
+ * @define coll immutable integer map
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ */
+sealed abstract class IntMap[+T] extends AbstractMap[Int, T]
+ with StrictOptimizedMapOps[Int, T, Map, IntMap[T]]
+ with Serializable {
+
+ override protected def fromSpecific(coll: scala.collection.IterableOnce[(Int, T) @uncheckedVariance]^): IntMap[T] =
+ intMapFrom[T](coll)
+ protected def intMapFrom[V2](coll: scala.collection.IterableOnce[(Int, V2)]^): IntMap[V2] = {
+ val b = IntMap.newBuilder[V2]
+ b.sizeHint(coll)
+ b.addAll(coll)
+ b.result()
+ }
+ override protected def newSpecificBuilder: Builder[(Int, T), IntMap[T]] @uncheckedVariance =
+ new ImmutableBuilder[(Int, T), IntMap[T]](empty) {
+ def addOne(elem: (Int, T)): this.type = { elems = elems + elem; this }
+ }
+
+ override def empty: IntMap[T] = IntMap.Nil
+
+ override def toList = {
+ val buffer = new scala.collection.mutable.ListBuffer[(Int, T) @uncheckedCaptures]
+ foreach(buffer += _)
+ buffer.toList
+ }
+
+ /**
+ * Iterator over key, value pairs of the map in unsigned order of the keys.
+ *
+ * @return an iterator over pairs of integer keys and corresponding values.
+ */
+ def iterator: Iterator[(Int, T)] = this match {
+ case IntMap.Nil => Iterator.empty
+ case _ => new IntMapEntryIterator(this)
+ }
+
+ /**
+ * Loops over the key, value pairs of the map in unsigned order of the keys.
+ */
+ override final def foreach[U](f: ((Int, T)) => U): Unit = this match {
+ case IntMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) }
+ case IntMap.Tip(key, value) => f((key, value))
+ case IntMap.Nil =>
+ }
+
+ override def foreachEntry[U](f: (IntMapUtils.Int, T) => U): Unit = this match {
+ case IntMap.Bin(_, _, left, right) => { left.foreachEntry(f); right.foreachEntry(f) }
+ case IntMap.Tip(key, value) => f(key, value)
+ case IntMap.Nil =>
+ }
+
+ override def keysIterator: Iterator[Int] = this match {
+ case IntMap.Nil => Iterator.empty
+ case _ => new IntMapKeyIterator(this)
+ }
+
+ /**
+ * Loop over the keys of the map. The same as `keys.foreach(f)`, but may
+ * be more efficient.
+ *
+ * @param f The loop body
+ */
+ final def foreachKey[U](f: Int => U): Unit = this match {
+ case IntMap.Bin(_, _, left, right) => { left.foreachKey(f); right.foreachKey(f) }
+ case IntMap.Tip(key, _) => f(key)
+ case IntMap.Nil =>
+ }
+
+ override def valuesIterator: Iterator[T] = this match {
+ case IntMap.Nil => Iterator.empty
+ case _ => new IntMapValueIterator(this)
+ }
+
+ /**
+ * Loop over the values of the map. The same as `values.foreach(f)`, but may
+ * be more efficient.
+ *
+ * @param f The loop body
+ */
+ final def foreachValue[U](f: T => U): Unit = this match {
+ case IntMap.Bin(_, _, left, right) => { left.foreachValue(f); right.foreachValue(f) }
+ case IntMap.Tip(_, value) => f(value)
+ case IntMap.Nil =>
+ }
+
+ override protected[this] def className = "IntMap"
+
+ override def isEmpty = this eq IntMap.Nil
+ override def knownSize: Int = if (isEmpty) 0 else super.knownSize
+ override def filter(f: ((Int, T)) => Boolean): IntMap[T] = this match {
+ case IntMap.Bin(prefix, mask, left, right) => {
+ val (newleft, newright) = (left.filter(f), right.filter(f))
+ if ((left eq newleft) && (right eq newright)) this
+ else bin(prefix, mask, newleft, newright)
+ }
+ case IntMap.Tip(key, value) =>
+ if (f((key, value))) this
+ else IntMap.Nil
+ case IntMap.Nil => IntMap.Nil
+ }
+
+ override def transform[S](f: (Int, T) => S): IntMap[S] = this match {
+ case b@IntMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f))
+ case t@IntMap.Tip(key, value) => t.withValue(f(key, value))
+ case IntMap.Nil => IntMap.Nil
+ }
+
+ final override def size: Int = this match {
+ case IntMap.Nil => 0
+ case IntMap.Tip(_, _) => 1
+ case IntMap.Bin(_, _, left, right) => left.size + right.size
+ }
+
+ @tailrec
+ final def get(key: Int): Option[T] = this match {
+ case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key)
+ case IntMap.Tip(key2, value) => if (key == key2) Some(value) else None
+ case IntMap.Nil => None
+ }
+
+ @tailrec
+ final override def getOrElse[S >: T](key: Int, default: => S): S = this match {
+ case IntMap.Nil => default
+ case IntMap.Tip(key2, value) => if (key == key2) value else default
+ case IntMap.Bin(prefix, mask, left, right) =>
+ if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default)
+ }
+
+ @tailrec
+ final override def apply(key: Int): T = this match {
+ case IntMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key)
+ case IntMap.Tip(key2, value) => if (key == key2) value else throw new IllegalArgumentException("Key not found")
+ case IntMap.Nil => throw new IllegalArgumentException("key not found")
+ }
+
+ override def + [S >: T] (kv: (Int, S)): IntMap[S] = updated(kv._1, kv._2)
+
+ override def updated[S >: T](key: Int, value: S): IntMap[S] = this match {
+ case IntMap.Bin(prefix, mask, left, right) =>
+ if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this)
+ else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updated(key, value), right)
+ else IntMap.Bin(prefix, mask, left, right.updated(key, value))
+ case IntMap.Tip(key2, value2) =>
+ if (key == key2) IntMap.Tip(key, value)
+ else join(key, IntMap.Tip(key, value), key2, this)
+ case IntMap.Nil => IntMap.Tip(key, value)
+ }
+
+ def map[V2](f: ((Int, T)) => (Int, V2)): IntMap[V2] = intMapFrom(new View.Map(this, f))
+
+ def flatMap[V2](f: ((Int, T)) => IterableOnce[(Int, V2)]): IntMap[V2] = intMapFrom(new View.FlatMap(this, f))
+
+ override def concat[V1 >: T](that: collection.IterableOnce[(Int, V1)]^): IntMap[V1] =
+ super.concat(that).asInstanceOf[IntMap[V1]] // Already has correct type but not declared as such
+
+ override def ++ [V1 >: T](that: collection.IterableOnce[(Int, V1)]^): IntMap[V1] = concat(that)
+
+ def collect[V2](pf: PartialFunction[(Int, T), (Int, V2)]): IntMap[V2] =
+ strictOptimizedCollect(IntMap.newBuilder[V2], pf)
+
+ /**
+ * Updates the map, using the provided function to resolve conflicts if the key is already present.
+ *
+ * Equivalent to:
+ * {{{
+ * this.get(key) match {
+ * case None => this.update(key, value)
+ * case Some(oldvalue) => this.update(key, f(oldvalue, value)
+ * }
+ * }}}
+ *
+ * @tparam S The supertype of values in this `LongMap`.
+ * @param key The key to update
+ * @param value The value to use if there is no conflict
+ * @param f The function used to resolve conflicts.
+ * @return The updated map.
+ */
+ def updateWith[S >: T](key: Int, value: S, f: (T, S) => S): IntMap[S] = this match {
+ case IntMap.Bin(prefix, mask, left, right) =>
+ if (!hasMatch(key, prefix, mask)) join(key, IntMap.Tip(key, value), prefix, this)
+ else if (zero(key, mask)) IntMap.Bin(prefix, mask, left.updateWith(key, value, f), right)
+ else IntMap.Bin(prefix, mask, left, right.updateWith(key, value, f))
+ case IntMap.Tip(key2, value2) =>
+ if (key == key2) IntMap.Tip(key, f(value2, value))
+ else join(key, IntMap.Tip(key, value), key2, this)
+ case IntMap.Nil => IntMap.Tip(key, value)
+ }
+
+ def removed (key: Int): IntMap[T] = this match {
+ case IntMap.Bin(prefix, mask, left, right) =>
+ if (!hasMatch(key, prefix, mask)) this
+ else if (zero(key, mask)) bin(prefix, mask, left - key, right)
+ else bin(prefix, mask, left, right - key)
+ case IntMap.Tip(key2, _) =>
+ if (key == key2) IntMap.Nil
+ else this
+ case IntMap.Nil => IntMap.Nil
+ }
+
+ /**
+ * A combined transform and filter function. Returns an `IntMap` such that
+ * for each `(key, value)` mapping in this map, if `f(key, value) == None`
+ * the map contains no mapping for key, and if `f(key, value)`.
+ *
+ * @tparam S The type of the values in the resulting `LongMap`.
+ * @param f The transforming function.
+ * @return The modified map.
+ */
+ def modifyOrRemove[S](f: (Int, T) => Option[S]): IntMap[S] = this match {
+ case IntMap.Bin(prefix, mask, left, right) =>
+ val newleft = left.modifyOrRemove(f)
+ val newright = right.modifyOrRemove(f)
+ if ((left eq newleft) && (right eq newright)) this.asInstanceOf[IntMap[S]]
+ else bin(prefix, mask, newleft, newright)
+ case IntMap.Tip(key, value) => f(key, value) match {
+ case None =>
+ IntMap.Nil
+ case Some(value2) =>
+ //hack to preserve sharing
+ if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[IntMap[S]]
+ else IntMap.Tip(key, value2)
+ }
+ case IntMap.Nil =>
+ IntMap.Nil
+ }
+
+ /**
+ * Forms a union map with that map, using the combining function to resolve conflicts.
+ *
+ * @tparam S The type of values in `that`, a supertype of values in `this`.
+ * @param that The map to form a union with.
+ * @param f The function used to resolve conflicts between two mappings.
+ * @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`.
+ */
+ def unionWith[S >: T](that: IntMap[S], f: (Int, S, S) => S): IntMap[S] = (this, that) match{
+ case (IntMap.Bin(p1, m1, l1, r1), that@(IntMap.Bin(p2, m2, l2, r2))) =>
+ if (shorter(m1, m2)) {
+ if (!hasMatch(p2, p1, m1)) join(p1, this, p2, that)
+ else if (zero(p2, m1)) IntMap.Bin(p1, m1, l1.unionWith(that, f), r1)
+ else IntMap.Bin(p1, m1, l1, r1.unionWith(that, f))
+ } else if (shorter(m2, m1)){
+ if (!hasMatch(p1, p2, m2)) join(p1, this, p2, that)
+ else if (zero(p1, m2)) IntMap.Bin(p2, m2, this.unionWith(l2, f), r2)
+ else IntMap.Bin(p2, m2, l2, this.unionWith(r2, f))
+ }
+ else {
+ if (p1 == p2) IntMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f))
+ else join(p1, this, p2, that)
+ }
+ case (IntMap.Tip(key, value), x) => x.updateWith[S](key, value, (x, y) => f(key, y, x))
+ case (x, IntMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y))
+ case (IntMap.Nil, x) => x
+ case (x, IntMap.Nil) => x
+ }
+
+ /**
+ * Forms the intersection of these two maps with a combining function. The
+ * resulting map is a map that has only keys present in both maps and has
+ * values produced from the original mappings by combining them with `f`.
+ *
+ * @tparam S The type of values in `that`.
+ * @tparam R The type of values in the resulting `LongMap`.
+ * @param that The map to intersect with.
+ * @param f The combining function.
+ * @return Intersection of `this` and `that`, with values for identical keys produced by function `f`.
+ */
+ def intersectionWith[S, R](that: IntMap[S], f: (Int, T, S) => R): IntMap[R] = (this, that) match {
+ case (IntMap.Bin(p1, m1, l1, r1), that@IntMap.Bin(p2, m2, l2, r2)) =>
+ if (shorter(m1, m2)) {
+ if (!hasMatch(p2, p1, m1)) IntMap.Nil
+ else if (zero(p2, m1)) l1.intersectionWith(that, f)
+ else r1.intersectionWith(that, f)
+ } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f))
+ else {
+ if (!hasMatch(p1, p2, m2)) IntMap.Nil
+ else if (zero(p1, m2)) this.intersectionWith(l2, f)
+ else this.intersectionWith(r2, f)
+ }
+ case (IntMap.Tip(key, value), that) => that.get(key) match {
+ case None => IntMap.Nil
+ case Some(value2) => IntMap.Tip(key, f(key, value, value2))
+ }
+ case (_, IntMap.Tip(key, value)) => this.get(key) match {
+ case None => IntMap.Nil
+ case Some(value2) => IntMap.Tip(key, f(key, value2, value))
+ }
+ case (_, _) => IntMap.Nil
+ }
+
+ /**
+ * Left biased intersection. Returns the map that has all the same mappings
+ * as this but only for keys which are present in the other map.
+ *
+ * @tparam R The type of values in `that`.
+ * @param that The map to intersect with.
+ * @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`.
+ */
+ def intersection[R](that: IntMap[R]): IntMap[T] =
+ this.intersectionWith(that, (key: Int, value: T, value2: R) => value)
+
+ def ++[S >: T](that: IntMap[S]) =
+ this.unionWith[S](that, (key, x, y) => y)
+
+ /**
+ * The entry with the lowest key value considered in unsigned order.
+ */
+ @tailrec
+ final def firstKey: Int = this match {
+ case Bin(_, _, l, r) => l.firstKey
+ case Tip(k, v) => k
+ case IntMap.Nil => throw new IllegalStateException("Empty set")
+ }
+
+ /**
+ * The entry with the highest key value considered in unsigned order.
+ */
+ @tailrec
+ final def lastKey: Int = this match {
+ case Bin(_, _, l, r) => r.lastKey
+ case Tip(k, v) => k
+ case IntMap.Nil => throw new IllegalStateException("Empty set")
+ }
+
+ protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(IntMap.toFactory[T](IntMap), this)
+}
diff --git a/tests/pos-special/stdlib/collection/immutable/Iterable.scala b/tests/pos-special/stdlib/collection/immutable/Iterable.scala
index 44f13d0f2895..c4f9900eea8b 100644
--- a/tests/pos-special/stdlib/collection/immutable/Iterable.scala
+++ b/tests/pos-special/stdlib/collection/immutable/Iterable.scala
@@ -32,7 +32,7 @@ trait Iterable[+A] extends collection.Iterable[A]
@SerialVersionUID(3L)
object Iterable extends IterableFactory.Delegate[Iterable](List) {
- override def from[E](it: IterableOnce[E]): Iterable[E] = it match {
+ override def from[E](it: IterableOnce[E]^): Iterable[E]^{it} = it match {
case iterable: Iterable[E] => iterable
case _ => super.from(it)
}
diff --git a/tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala b/tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala
new file mode 100644
index 000000000000..5684130b6048
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/immutable/LazyListIterable.scala
@@ -0,0 +1,1376 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package immutable
+
+import java.io.{ObjectInputStream, ObjectOutputStream}
+import java.lang.{StringBuilder => JStringBuilder}
+
+import scala.annotation.tailrec
+import scala.collection.generic.SerializeEnd
+import scala.collection.mutable.{Builder, ReusableBuilder, StringBuilder}
+import scala.language.implicitConversions
+import scala.runtime.Statics
+import language.experimental.captureChecking
+import annotation.unchecked.uncheckedCaptures
+
+/** This class implements an immutable linked list. We call it "lazy"
+ * because it computes its elements only when they are needed.
+ *
+ * The class extends Iterable; it is a replacement for LazyList, which
+ * which implemented Seq. The reason is that under capture checking, we
+ * assume that all Seqs are strict, and LazyList broke that assumption.
+ * As a consequence, we declare LazyList is deprecated and unsafe for
+ * capture checking, and replace it by the current class, LazyListIterable.
+ *
+ * Elements are memoized; that is, the value of each element is computed at most once.
+ *
+ * Elements are computed in-order and are never skipped. In other words,
+ * accessing the tail causes the head to be computed first.
+ *
+ * How lazy is a `LazyListIterable`? When you have a value of type `LazyListIterable`, you
+ * don't know yet whether the list is empty or not. If you learn that it is non-empty,
+ * then you also know that the head has been computed. But the tail is itself
+ * a `LazyListIterable`, whose emptiness-or-not might remain undetermined.
+ *
+ * A `LazyListIterable` may be infinite. For example, `LazyListIterable.from(0)` contains
+ * all of the natural numbers 0, 1, 2, and so on. For infinite sequences,
+ * some methods (such as `count`, `sum`, `max` or `min`) will not terminate.
+ *
+ * Here is an example:
+ *
+ * {{{
+ * import scala.math.BigInt
+ * object Main extends App {
+ * val fibs: LazyListIterable[BigInt] =
+ * BigInt(0) #:: BigInt(1) #:: fibs.zip(fibs.tail).map{ n => n._1 + n._2 }
+ * fibs.take(5).foreach(println)
+ * }
+ *
+ * // prints
+ * //
+ * // 0
+ * // 1
+ * // 1
+ * // 2
+ * // 3
+ * }}}
+ *
+ * To illustrate, let's add some output to the definition `fibs`, so we
+ * see what's going on.
+ *
+ * {{{
+ * import scala.math.BigInt
+ * object Main extends App {
+ * val fibs: LazyListIterable[BigInt] =
+ * BigInt(0) #:: BigInt(1) #::
+ * fibs.zip(fibs.tail).map{ n =>
+ * println(s"Adding \${n._1} and \${n._2}")
+ * n._1 + n._2
+ * }
+ * fibs.take(5).foreach(println)
+ * fibs.take(6).foreach(println)
+ * }
+ *
+ * // prints
+ * //
+ * // 0
+ * // 1
+ * // Adding 0 and 1
+ * // 1
+ * // Adding 1 and 1
+ * // 2
+ * // Adding 1 and 2
+ * // 3
+ *
+ * // And then prints
+ * //
+ * // 0
+ * // 1
+ * // 1
+ * // 2
+ * // 3
+ * // Adding 2 and 3
+ * // 5
+ * }}}
+ *
+ * Note that the definition of `fibs` uses `val` not `def`. The memoization of the
+ * `LazyListIterable` requires us to have somewhere to store the information and a `val`
+ * allows us to do that.
+ *
+ * Further remarks about the semantics of `LazyListIterable`:
+ *
+ * - Though the `LazyListIterable` changes as it is accessed, this does not
+ * contradict its immutability. Once the values are memoized they do
+ * not change. Values that have yet to be memoized still "exist", they
+ * simply haven't been computed yet.
+ *
+ * - One must be cautious of memoization; it can eat up memory if you're not
+ * careful. That's because memoization of the `LazyListIterable` creates a structure much like
+ * [[scala.collection.immutable.List]]. As long as something is holding on to
+ * the head, the head holds on to the tail, and so on recursively.
+ * If, on the other hand, there is nothing holding on to the head (e.g. if we used
+ * `def` to define the `LazyListIterable`) then once it is no longer being used directly,
+ * it disappears.
+ *
+ * - Note that some operations, including [[drop]], [[dropWhile]],
+ * [[flatMap]] or [[collect]] may process a large number of intermediate
+ * elements before returning.
+ *
+ * Here's another example. Let's start with the natural numbers and iterate
+ * over them.
+ *
+ * {{{
+ * // We'll start with a silly iteration
+ * def loop(s: String, i: Int, iter: Iterator[Int]): Unit = {
+ * // Stop after 200,000
+ * if (i < 200001) {
+ * if (i % 50000 == 0) println(s + i)
+ * loop(s, iter.next(), iter)
+ * }
+ * }
+ *
+ * // Our first LazyListIterable definition will be a val definition
+ * val lazylist1: LazyListIterable[Int] = {
+ * def loop(v: Int): LazyListIterable[Int] = v #:: loop(v + 1)
+ * loop(0)
+ * }
+ *
+ * // Because lazylist1 is a val, everything that the iterator produces is held
+ * // by virtue of the fact that the head of the LazyListIterable is held in lazylist1
+ * val it1 = lazylist1.iterator
+ * loop("Iterator1: ", it1.next(), it1)
+ *
+ * // We can redefine this LazyListIterable such that all we have is the Iterator left
+ * // and allow the LazyListIterable to be garbage collected as required. Using a def
+ * // to provide the LazyListIterable ensures that no val is holding onto the head as
+ * // is the case with lazylist1
+ * def lazylist2: LazyListIterable[Int] = {
+ * def loop(v: Int): LazyListIterable[Int] = v #:: loop(v + 1)
+ * loop(0)
+ * }
+ * val it2 = lazylist2.iterator
+ * loop("Iterator2: ", it2.next(), it2)
+ *
+ * // And, of course, we don't actually need a LazyListIterable at all for such a simple
+ * // problem. There's no reason to use a LazyListIterable if you don't actually need
+ * // one.
+ * val it3 = new Iterator[Int] {
+ * var i = -1
+ * def hasNext = true
+ * def next(): Int = { i += 1; i }
+ * }
+ * loop("Iterator3: ", it3.next(), it3)
+ * }}}
+ *
+ * - In the `fibs` example earlier, the fact that `tail` works at all is of interest.
+ * `fibs` has an initial `(0, 1, LazyListIterable(...))`, so `tail` is deterministic.
+ * If we defined `fibs` such that only `0` were concretely known, then the act
+ * of determining `tail` would require the evaluation of `tail`, so the
+ * computation would be unable to progress, as in this code:
+ * {{{
+ * // The first time we try to access the tail we're going to need more
+ * // information which will require us to recurse, which will require us to
+ * // recurse, which...
+ * lazy val sov: LazyListIterable[Vector[Int]] = Vector(0) #:: sov.zip(sov.tail).map { n => n._1 ++ n._2 }
+ * }}}
+ *
+ * The definition of `fibs` above creates a larger number of objects than
+ * necessary depending on how you might want to implement it. The following
+ * implementation provides a more "cost effective" implementation due to the
+ * fact that it has a more direct route to the numbers themselves:
+ *
+ * {{{
+ * lazy val fib: LazyListIterable[Int] = {
+ * def loop(h: Int, n: Int): LazyListIterable[Int] = h #:: loop(n, h + n)
+ * loop(1, 1)
+ * }
+ * }}}
+ *
+ * The head, the tail and whether the list is empty or not can be initially unknown.
+ * Once any of those are evaluated, they are all known, though if the tail is
+ * built with `#::` or `#:::`, it's content still isn't evaluated. Instead, evaluating
+ * the tails content is deferred until the tails empty status, head or tail is
+ * evaluated.
+ *
+ * Delaying the evaluation of whether a LazyListIterable is empty or not until it's needed
+ * allows LazyListIterable to not eagerly evaluate any elements on a call to `filter`.
+ *
+ * Only when it's further evaluated (which may be never!) any of the elements gets
+ * forced.
+ *
+ * for example:
+ *
+ * {{{
+ * def tailWithSideEffect: LazyListIterable[Nothing] = {
+ * println("getting empty LazyListIterable")
+ * LazyListIterable.empty
+ * }
+ *
+ * val emptyTail = tailWithSideEffect // prints "getting empty LazyListIterable"
+ *
+ * val suspended = 1 #:: tailWithSideEffect // doesn't print anything
+ * val tail = suspended.tail // although the tail is evaluated, *still* nothing is yet printed
+ * val filtered = tail.filter(_ => false) // still nothing is printed
+ * filtered.isEmpty // prints "getting empty LazyListIterable"
+ * }}}
+ *
+ * @tparam A the type of the elements contained in this lazy list.
+ *
+ * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#lazylists "Scala's Collection Library overview"]]
+ * section on `LazyLists` for more information.
+ * @define Coll `LazyListIterable`
+ * @define coll lazy list
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define appendStackSafety Note: Repeated chaining of calls to append methods (`appended`,
+ * `appendedAll`, `lazyAppendedAll`) without forcing any of the
+ * intermediate resulting lazy lists may overflow the stack when
+ * the final result is forced.
+ * @define preservesLaziness This method preserves laziness; elements are only evaluated
+ * individually as needed.
+ * @define initiallyLazy This method does not evaluate anything until an operation is performed
+ * on the result (e.g. calling `head` or `tail`, or checking if it is empty).
+ * @define evaluatesAllElements This method evaluates all elements of the collection.
+ */
+@SerialVersionUID(3L)
+final class LazyListIterable[+A] private(private[this] var lazyState: () => LazyListIterable.State[A]^)
+ extends AbstractIterable[A]
+ with Iterable[A]
+ with IterableOps[A, LazyListIterable, LazyListIterable[A]]
+ with IterableFactoryDefaults[A, LazyListIterable]
+ with Serializable {
+ this: LazyListIterable[A]^ =>
+ import LazyListIterable._
+
+ @volatile private[this] var stateEvaluated: Boolean = false
+ @inline private def stateDefined: Boolean = stateEvaluated
+ private[this] var midEvaluation = false
+
+ private lazy val state: State[A]^ = {
+ // if it's already mid-evaluation, we're stuck in an infinite
+ // self-referential loop (also it's empty)
+ if (midEvaluation) {
+ throw new RuntimeException("self-referential LazyListIterable or a derivation thereof has no more elements")
+ }
+ midEvaluation = true
+ val res = try lazyState() finally midEvaluation = false
+ // if we set it to `true` before evaluating, we may infinite loop
+ // if something expects `state` to already be evaluated
+ stateEvaluated = true
+ lazyState = null // allow GC
+ res
+ }
+
+ override def iterableFactory: IterableFactory[LazyListIterable] = LazyListIterable
+
+ override def isEmpty: Boolean = state eq State.Empty
+
+ /** @inheritdoc
+ *
+ * $preservesLaziness
+ */
+ override def knownSize: Int = if (knownIsEmpty) 0 else -1
+
+ override def head: A = state.head
+
+ override def tail: LazyListIterable[A]^{this} = state.tail
+
+ @inline private[this] def knownIsEmpty: Boolean = stateEvaluated && (isEmpty: @inline)
+ @inline private def knownNonEmpty: Boolean = stateEvaluated && !(isEmpty: @inline)
+
+ /** Evaluates all undefined elements of the lazy list.
+ *
+ * This method detects cycles in lazy lists, and terminates after all
+ * elements of the cycle are evaluated. For example:
+ *
+ * {{{
+ * val ring: LazyListIterable[Int] = 1 #:: 2 #:: 3 #:: ring
+ * ring.force
+ * ring.toString
+ *
+ * // prints
+ * //
+ * // LazyListIterable(1, 2, 3, ...)
+ * }}}
+ *
+ * This method will *not* terminate for non-cyclic infinite-sized collections.
+ *
+ * @return this
+ */
+ def force: this.type = {
+ // Use standard 2x 1x iterator trick for cycle detection ("those" is slow one)
+ var these, those: LazyListIterable[A]^{this} = this
+ if (!these.isEmpty) {
+ these = these.tail
+ }
+ while (those ne these) {
+ if (these.isEmpty) return this
+ these = these.tail
+ if (these.isEmpty) return this
+ these = these.tail
+ if (these eq those) return this
+ those = those.tail
+ }
+ this
+ }
+
+ /** @inheritdoc
+ *
+ * The iterator returned by this method preserves laziness; elements are
+ * only evaluated individually as needed.
+ */
+ override def iterator: Iterator[A]^{this} =
+ if (knownIsEmpty) Iterator.empty
+ else new LazyIterator(this)
+
+ /** Apply the given function `f` to each element of this linear sequence
+ * (while respecting the order of the elements).
+ *
+ * @param f The treatment to apply to each element.
+ * @note Overridden here as final to trigger tail-call optimization, which
+ * replaces 'this' with 'tail' at each iteration. This is absolutely
+ * necessary for allowing the GC to collect the underlying LazyListIterable as elements
+ * are consumed.
+ * @note This function will force the realization of the entire LazyListIterable
+ * unless the `f` throws an exception.
+ */
+ @tailrec
+ override def foreach[U](f: A => U): Unit = {
+ if (!isEmpty) {
+ f(head)
+ tail.foreach(f)
+ }
+ }
+
+ /** LazyListIterable specialization of foldLeft which allows GC to collect along the
+ * way.
+ *
+ * @tparam B The type of value being accumulated.
+ * @param z The initial value seeded into the function `op`.
+ * @param op The operation to perform on successive elements of the `LazyListIterable`.
+ * @return The accumulated value from successive applications of `op`.
+ */
+ @tailrec
+ override def foldLeft[B](z: B)(op: (B, A) => B): B =
+ if (isEmpty) z
+ else tail.foldLeft(op(z, head))(op)
+
+ // State.Empty doesn't use the SerializationProxy
+ protected[this] def writeReplace(): AnyRef^{this} =
+ if (knownNonEmpty) new LazyListIterable.SerializationProxy[A](this) else this
+
+ override protected[this] def className = "LazyListIterable"
+
+ /** The lazy list resulting from the concatenation of this lazy list with the argument lazy list.
+ *
+ * $preservesLaziness
+ *
+ * $appendStackSafety
+ *
+ * @param suffix The collection that gets appended to this lazy list
+ * @return The lazy list containing elements of this lazy list and the iterable object.
+ */
+ def lazyAppendedAll[B >: A](suffix: => collection.IterableOnce[B]^): LazyListIterable[B]^{this, suffix} =
+ newLL {
+ if (isEmpty) suffix match {
+ case lazyList: LazyListIterable[B] => lazyList.state // don't recompute the LazyListIterable
+ case coll if coll.knownSize == 0 => State.Empty
+ case coll => stateFromIterator(coll.iterator)
+ }
+ else sCons(head, tail lazyAppendedAll suffix)
+ }
+
+ /** @inheritdoc
+ *
+ * $preservesLaziness
+ *
+ * $appendStackSafety
+ */
+ def appendedAll[B >: A](suffix: IterableOnce[B]^): LazyListIterable[B]^{this, suffix} =
+ if (knownIsEmpty) LazyListIterable.from(suffix)
+ else lazyAppendedAll(suffix)
+
+ /** @inheritdoc
+ *
+ * $preservesLaziness
+ *
+ * $appendStackSafety
+ */
+ def appended[B >: A](elem: B): LazyListIterable[B]^{this} =
+ if (knownIsEmpty) newLL(sCons(elem, LazyListIterable.empty))
+ else lazyAppendedAll(Iterator.single(elem))
+
+ /** @inheritdoc
+ *
+ * $preservesLaziness
+ */
+ override def scanLeft[B](z: B)(op: (B, A) => B): LazyListIterable[B]^{this, op} =
+ if (knownIsEmpty) newLL(sCons(z, LazyListIterable.empty))
+ else newLL(scanLeftState(z)(op))
+
+ private def scanLeftState[B](z: B)(op: (B, A) => B): State[B]^{this, op} =
+ sCons(
+ z,
+ newLL {
+ if (isEmpty) State.Empty
+ else tail.scanLeftState(op(z, head))(op)
+ }
+ )
+
+ /** LazyListIterable specialization of reduceLeft which allows GC to collect
+ * along the way.
+ *
+ * @tparam B The type of value being accumulated.
+ * @param f The operation to perform on successive elements of the `LazyListIterable`.
+ * @return The accumulated value from successive applications of `f`.
+ */
+ override def reduceLeft[B >: A](f: (B, A) => B): B = {
+ if (this.isEmpty) throw new UnsupportedOperationException("empty.reduceLeft")
+ else {
+ var reducedRes: B = this.head
+ var left: LazyListIterable[A]^{this} = this.tail
+ while (!left.isEmpty) {
+ reducedRes = f(reducedRes, left.head)
+ left = left.tail
+ }
+ reducedRes
+ }
+ }
+
+ /** @inheritdoc
+ *
+ * $preservesLaziness
+ */
+ override def partition(p: A => Boolean): (LazyListIterable[A]^{this, p}, LazyListIterable[A]^{this, p}) = (filter(p), filterNot(p))
+
+ /** @inheritdoc
+ *
+ * $preservesLaziness
+ */
+ override def partitionMap[A1, A2](f: A => Either[A1, A2]): (LazyListIterable[A1]^{this, f}, LazyListIterable[A2]^{this, f}) = {
+ val (left, right) = map(f).partition(_.isLeft)
+ (left.map(_.asInstanceOf[Left[A1, _]].value), right.map(_.asInstanceOf[Right[_, A2]].value))
+ }
+
+ /** @inheritdoc
+ *
+ * $preservesLaziness
+ */
+ override def filter(pred: A => Boolean): LazyListIterable[A]^{this, pred} =
+ if (knownIsEmpty) LazyListIterable.empty
+ else LazyListIterable.filterImpl(this, pred, isFlipped = false)
+
+ /** @inheritdoc
+ *
+ * $preservesLaziness
+ */
+ override def filterNot(pred: A => Boolean): LazyListIterable[A]^{this, pred} =
+ if (knownIsEmpty) LazyListIterable.empty
+ else LazyListIterable.filterImpl(this, pred, isFlipped = true)
+
+ /** A `collection.WithFilter` which allows GC of the head of lazy list during processing.
+ *
+ * This method is not particularly useful for a lazy list, as [[filter]] already preserves
+ * laziness.
+ *
+ * The `collection.WithFilter` returned by this method preserves laziness; elements are
+ * only evaluated individually as needed.
+ */
+ override def withFilter(p: A => Boolean): collection.WithFilter[A, LazyListIterable]^{this, p} =
+ new LazyListIterable.WithFilter(coll, p)
+
+ /** @inheritdoc
+ *
+ * $preservesLaziness
+ */
+ def prepended[B >: A](elem: B): LazyListIterable[B] = newLL(sCons(elem, this))
+
+ /** @inheritdoc
+ *
+ * $preservesLaziness
+ */
+ def prependedAll[B >: A](prefix: collection.IterableOnce[B]^): LazyListIterable[B]^{this, prefix} =
+ if (knownIsEmpty) LazyListIterable.from(prefix)
+ else if (prefix.knownSize == 0) this
+ else newLL(stateFromIteratorConcatSuffix(prefix.iterator)(state))
+
+ /** @inheritdoc
+ *
+ * $preservesLaziness
+ */
+ override def map[B](f: A => B): LazyListIterable[B]^{this, f} =
+ if (knownIsEmpty) LazyListIterable.empty
+ else (mapImpl(f): @inline)
+
+ /** @inheritdoc
+ *
+ * $preservesLaziness
+ */
+ override def tapEach[U](f: A => U): LazyListIterable[A]^{this, f} = map { a => f(a); a }
+
+ private def mapImpl[B](f: A => B): LazyListIterable[B]^{this, f} =
+ newLL {
+ if (isEmpty) State.Empty
+ else sCons(f(head), tail.mapImpl(f))
+ }
+
+ /** @inheritdoc
+ *
+ * $preservesLaziness
+ */
+ override def collect[B](pf: PartialFunction[A, B]^): LazyListIterable[B]^{this, pf} =
+ if (knownIsEmpty) LazyListIterable.empty
+ else LazyListIterable.collectImpl(this, pf)
+
+ /** @inheritdoc
+ *
+ * This method does not evaluate any elements further than
+ * the first element for which the partial function is defined.
+ */
+ @tailrec
+ override def collectFirst[B](pf: PartialFunction[A, B]): Option[B] =
+ if (isEmpty) None
+ else {
+ val res = pf.applyOrElse(head, LazyListIterable.anyToMarker.asInstanceOf[A => B])
+ if (res.asInstanceOf[AnyRef] eq Statics.pfMarker) tail.collectFirst(pf)
+ else Some(res)
+ }
+
+ /** @inheritdoc
+ *
+ * This method does not evaluate any elements further than
+ * the first element matching the predicate.
+ */
+ @tailrec
+ override def find(p: A => Boolean): Option[A] =
+ if (isEmpty) None
+ else {
+ val elem = head
+ if (p(elem)) Some(elem)
+ else tail.find(p)
+ }
+
+ /** @inheritdoc
+ *
+ * $preservesLaziness
+ */
+ // optimisations are not for speed, but for functionality
+ // see tickets #153, #498, #2147, and corresponding tests in run/ (as well as run/stream_flatmap_odds.scala)
+ override def flatMap[B](f: A => IterableOnce[B]^): LazyListIterable[B]^{this, f} =
+ if (knownIsEmpty) LazyListIterable.empty
+ else LazyListIterable.flatMapImpl(this, f)
+
+ /** @inheritdoc
+ *
+ * $preservesLaziness
+ */
+ override def flatten[B](implicit asIterable: A -> IterableOnce[B]): LazyListIterable[B]^{this} = flatMap(asIterable)
+
+ /** @inheritdoc
+ *
+ * $preservesLaziness
+ */
+ override def zip[B](that: collection.IterableOnce[B]^): LazyListIterable[(A, B)]^{this, that} =
+ if (this.knownIsEmpty || that.knownSize == 0) LazyListIterable.empty
+ else newLL(zipState(that.iterator))
+
+ private def zipState[B](it: Iterator[B]^): State[(A, B)]^{this, it} =
+ if (this.isEmpty || !it.hasNext) State.Empty
+ else sCons((head, it.next()), newLL { tail zipState it })
+
+ /** @inheritdoc
+ *
+ * $preservesLaziness
+ */
+ override def zipWithIndex: LazyListIterable[(A, Int)]^{this} = this zip LazyListIterable.from(0)
+
+ /** @inheritdoc
+ *
+ * $preservesLaziness
+ */
+ override def zipAll[A1 >: A, B](that: collection.Iterable[B]^, thisElem: A1, thatElem: B): LazyListIterable[(A1, B)]^{this, that} = {
+ if (this.knownIsEmpty) {
+ if (that.knownSize == 0) LazyListIterable.empty
+ else LazyListIterable.continually(thisElem) zip that
+ } else {
+ if (that.knownSize == 0) zip(LazyListIterable.continually(thatElem))
+ else newLL(zipAllState(that.iterator, thisElem, thatElem))
+ }
+ }
+
+ private def zipAllState[A1 >: A, B](it: Iterator[B]^, thisElem: A1, thatElem: B): State[(A1, B)]^{this, it} = {
+ if (it.hasNext) {
+ if (this.isEmpty) sCons((thisElem, it.next()), newLL { LazyListIterable.continually(thisElem) zipState it })
+ else sCons((this.head, it.next()), newLL { this.tail.zipAllState(it, thisElem, thatElem) })
+ } else {
+ if (this.isEmpty) State.Empty
+ else sCons((this.head, thatElem), this.tail zip LazyListIterable.continually(thatElem))
+ }
+ }
+
+ /** @inheritdoc
+ *
+ * This method is not particularly useful for a lazy list, as [[zip]] already preserves
+ * laziness.
+ *
+ * The `collection.LazyZip2` returned by this method preserves laziness; elements are
+ * only evaluated individually as needed.
+ */
+ // just in case it can be meaningfully overridden at some point
+ override def lazyZip[B](that: collection.Iterable[B]^): LazyZip2[A, B, LazyListIterable.this.type]^{this, that} =
+ super.lazyZip(that)
+
+ /** @inheritdoc
+ *
+ * $preservesLaziness
+ */
+ override def unzip[A1, A2](implicit asPair: A -> (A1, A2)): (LazyListIterable[A1]^{this}, LazyListIterable[A2]^{this}) =
+ (map(asPair(_)._1), map(asPair(_)._2))
+
+ /** @inheritdoc
+ *
+ * $preservesLaziness
+ */
+ override def unzip3[A1, A2, A3](implicit asTriple: A -> (A1, A2, A3)): (LazyListIterable[A1]^{this}, LazyListIterable[A2]^{this}, LazyListIterable[A3]^{this}) =
+ (map(asTriple(_)._1), map(asTriple(_)._2), map(asTriple(_)._3))
+
+ /** @inheritdoc
+ *
+ * $initiallyLazy
+ * Additionally, it preserves laziness for all except the first `n` elements.
+ */
+ override def drop(n: Int): LazyListIterable[A]^{this} =
+ if (n <= 0) this
+ else if (knownIsEmpty) LazyListIterable.empty
+ else LazyListIterable.dropImpl(this, n)
+
+ /** @inheritdoc
+ *
+ * $initiallyLazy
+ * Additionally, it preserves laziness for all elements after the predicate returns `false`.
+ */
+ override def dropWhile(p: A => Boolean): LazyListIterable[A]^{this, p} =
+ if (knownIsEmpty) LazyListIterable.empty
+ else LazyListIterable.dropWhileImpl(this, p)
+
+ /** @inheritdoc
+ *
+ * $initiallyLazy
+ */
+ override def dropRight(n: Int): LazyListIterable[A]^{this} = {
+ if (n <= 0) this
+ else if (knownIsEmpty) LazyListIterable.empty
+ else newLL {
+ var scout = this
+ var remaining = n
+ // advance scout n elements ahead (or until empty)
+ while (remaining > 0 && !scout.isEmpty) {
+ remaining -= 1
+ scout = scout.tail
+ }
+ dropRightState(scout)
+ }
+ }
+
+ private def dropRightState(scout: LazyListIterable[_]^): State[A]^{this, scout} =
+ if (scout.isEmpty) State.Empty
+ else sCons(head, newLL(tail.dropRightState(scout.tail)))
+
+ /** @inheritdoc
+ *
+ * $preservesLaziness
+ */
+ override def take(n: Int): LazyListIterable[A] =
+ if (knownIsEmpty) LazyListIterable.empty
+ else (takeImpl(n): @inline)
+
+ private def takeImpl(n: Int): LazyListIterable[A] = {
+ if (n <= 0) LazyListIterable.empty
+ else newLL {
+ if (isEmpty) State.Empty
+ else sCons(head, tail.takeImpl(n - 1))
+ }
+ }
+
+ /** @inheritdoc
+ *
+ * $preservesLaziness
+ */
+ override def takeWhile(p: A => Boolean): LazyListIterable[A]^{this, p} =
+ if (knownIsEmpty) LazyListIterable.empty
+ else (takeWhileImpl(p): @inline)
+
+ private def takeWhileImpl(p: A => Boolean): LazyListIterable[A]^{this, p} =
+ newLL {
+ if (isEmpty || !p(head)) State.Empty
+ else sCons(head, tail.takeWhileImpl(p))
+ }
+
+ /** @inheritdoc
+ *
+ * $initiallyLazy
+ */
+ override def takeRight(n: Int): LazyListIterable[A]^{this} =
+ if (n <= 0 || knownIsEmpty) LazyListIterable.empty
+ else LazyListIterable.takeRightImpl(this, n)
+
+ /** @inheritdoc
+ *
+ * $initiallyLazy
+ * Additionally, it preserves laziness for all but the first `from` elements.
+ */
+ override def slice(from: Int, until: Int): LazyListIterable[A]^{this} = take(until).drop(from)
+
+ /** @inheritdoc
+ *
+ * $evaluatesAllElements
+ */
+ def reverse: LazyListIterable[A] = reverseOnto(LazyListIterable.empty)
+
+ // need contravariant type B to make the compiler happy - still returns LazyListIterable[A]
+ @tailrec
+ private def reverseOnto[B >: A](tl: LazyListIterable[B]): LazyListIterable[B] =
+ if (isEmpty) tl
+ else tail.reverseOnto(newLL(sCons(head, tl)))
+
+ @tailrec
+ private def lengthGt(len: Int): Boolean =
+ if (len < 0) true
+ else if (isEmpty) false
+ else tail.lengthGt(len - 1)
+
+ /** @inheritdoc
+ *
+ * The iterator returned by this method mostly preserves laziness;
+ * a single element ahead of the iterator is evaluated.
+ */
+ override def grouped(size: Int): Iterator[LazyListIterable[A]] = {
+ require(size > 0, "size must be positive, but was " + size)
+ slidingImpl(size = size, step = size)
+ }
+
+ /** @inheritdoc
+ *
+ * The iterator returned by this method mostly preserves laziness;
+ * `size - step max 1` elements ahead of the iterator are evaluated.
+ */
+ override def sliding(size: Int, step: Int): Iterator[LazyListIterable[A]] = {
+ require(size > 0 && step > 0, s"size=$size and step=$step, but both must be positive")
+ slidingImpl(size = size, step = step)
+ }
+
+ @inline private def slidingImpl(size: Int, step: Int): Iterator[LazyListIterable[A]] =
+ if (knownIsEmpty) Iterator.empty
+ else new SlidingIterator[A](this, size = size, step = step)
+
+ /** @inheritdoc
+ *
+ * $preservesLaziness
+ */
+ def padTo[B >: A](len: Int, elem: B): LazyListIterable[B]^{this} = {
+ if (len <= 0) this
+ else newLL {
+ if (isEmpty) LazyListIterable.fill(len)(elem).state
+ else sCons(head, tail.padTo(len - 1, elem))
+ }
+ }
+
+ /** @inheritdoc
+ *
+ * $preservesLaziness
+ */
+ def patch[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): LazyListIterable[B]^{this, other} =
+ if (knownIsEmpty) LazyListIterable from other
+ else patchImpl(from, other, replaced)
+
+ private def patchImpl[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): LazyListIterable[B]^{this, other} =
+ newLL {
+ if (from <= 0) stateFromIteratorConcatSuffix(other.iterator)(LazyListIterable.dropImpl(this, replaced).state)
+ else if (isEmpty) stateFromIterator(other.iterator)
+ else sCons(head, tail.patchImpl(from - 1, other, replaced))
+ }
+
+ /** @inheritdoc
+ *
+ * $evaluatesAllElements
+ */
+ // overridden just in case a lazy implementation is developed at some point
+ override def transpose[B](implicit asIterable: A -> collection.Iterable[B]): LazyListIterable[LazyListIterable[B]]^{this} = super.transpose
+
+ /** @inheritdoc
+ *
+ * $preservesLaziness
+ */
+ def updated[B >: A](index: Int, elem: B): LazyListIterable[B]^{this} =
+ if (index < 0) throw new IndexOutOfBoundsException(s"$index")
+ else updatedImpl(index, elem, index)
+
+ private def updatedImpl[B >: A](index: Int, elem: B, startIndex: Int): LazyListIterable[B]^{this} = {
+ newLL {
+ if (index <= 0) sCons(elem, tail)
+ else if (tail.isEmpty) throw new IndexOutOfBoundsException(startIndex.toString)
+ else sCons(head, tail.updatedImpl(index - 1, elem, startIndex))
+ }
+ }
+
+ /** Appends all elements of this $coll to a string builder using start, end, and separator strings.
+ * The written text begins with the string `start` and ends with the string `end`.
+ * Inside, the string representations (w.r.t. the method `toString`)
+ * of all elements of this $coll are separated by the string `sep`.
+ *
+ * An undefined state is represented with `"<not computed>"` and cycles are represented with `"<cycle>"`.
+ *
+ * $evaluatesAllElements
+ *
+ * @param sb the string builder to which elements are appended.
+ * @param start the starting string.
+ * @param sep the separator string.
+ * @param end the ending string.
+ * @return the string builder `b` to which elements were appended.
+ */
+ override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = {
+ force
+ addStringNoForce(sb.underlying, start, sep, end)
+ sb
+ }
+
+ private[this] def addStringNoForce(b: JStringBuilder, start: String, sep: String, end: String): JStringBuilder = {
+ b.append(start)
+ if (!stateDefined) b.append("")
+ else if (!isEmpty) {
+ b.append(head)
+ var cursor = this
+ inline def appendCursorElement(): Unit = b.append(sep).append(cursor.head)
+ var scout = tail
+ inline def scoutNonEmpty: Boolean = scout.stateDefined && !scout.isEmpty
+ if ((cursor ne scout) && (!scout.stateDefined || (cursor.state ne scout.state))) {
+ cursor = scout
+ if (scoutNonEmpty) {
+ scout = scout.tail
+ // Use 2x 1x iterator trick for cycle detection; slow iterator can add strings
+ while ((cursor ne scout) && scoutNonEmpty && (cursor.state ne scout.state)) {
+ appendCursorElement()
+ cursor = cursor.tail
+ scout = scout.tail
+ if (scoutNonEmpty) scout = scout.tail
+ }
+ }
+ }
+ if (!scoutNonEmpty) { // Not a cycle, scout hit an end
+ while (cursor ne scout) {
+ appendCursorElement()
+ cursor = cursor.tail
+ }
+ // if cursor (eq scout) has state defined, it is empty; else unknown state
+ if (!cursor.stateDefined) b.append(sep).append("")
+ } else {
+ @inline def same(a: LazyListIterable[A]^, b: LazyListIterable[A]^): Boolean = (a eq b) || (a.state eq b.state)
+ // Cycle.
+ // If we have a prefix of length P followed by a cycle of length C,
+ // the scout will be at position (P%C) in the cycle when the cursor
+ // enters it at P. They'll then collide when the scout advances another
+ // C - (P%C) ahead of the cursor.
+ // If we run the scout P farther, then it will be at the start of
+ // the cycle: (C - (P%C) + (P%C)) == C == 0. So if another runner
+ // starts at the beginning of the prefix, they'll collide exactly at
+ // the start of the loop.
+ var runner = this
+ var k = 0
+ while (!same(runner, scout)) {
+ runner = runner.tail
+ scout = scout.tail
+ k += 1
+ }
+ // Now runner and scout are at the beginning of the cycle. Advance
+ // cursor, adding to string, until it hits; then we'll have covered
+ // everything once. If cursor is already at beginning, we'd better
+ // advance one first unless runner didn't go anywhere (in which case
+ // we've already looped once).
+ if (same(cursor, scout) && (k > 0)) {
+ appendCursorElement()
+ cursor = cursor.tail
+ }
+ while (!same(cursor, scout)) {
+ appendCursorElement()
+ cursor = cursor.tail
+ }
+ b.append(sep).append("")
+ }
+ }
+ b.append(end)
+ }
+
+ /** $preservesLaziness
+ *
+ * @return a string representation of this collection. An undefined state is
+ * represented with `"<not computed>"` and cycles are represented with `"<cycle>"`
+ *
+ * Examples:
+ *
+ * - `"LazyListIterable(4, <not computed>)"`, a non-empty lazy list ;
+ * - `"LazyListIterable(1, 2, 3, <not computed>)"`, a lazy list with at least three elements ;
+ * - `"LazyListIterable(1, 2, 3, <cycle>)"`, an infinite lazy list that contains
+ * a cycle at the fourth element.
+ */
+ override def toString(): String = addStringNoForce(new JStringBuilder(className), "(", ", ", ")").toString
+
+ /** @inheritdoc
+ *
+ * $preservesLaziness
+ */
+ @deprecated("Check .knownSize instead of .hasDefiniteSize for more actionable information (see scaladoc for details)", "2.13.0")
+ override def hasDefiniteSize: Boolean = {
+ if (!stateDefined) false
+ else if (isEmpty) true
+ else {
+ // Two-iterator trick (2x & 1x speed) for cycle detection.
+ var those = this
+ var these = tail
+ while (those ne these) {
+ if (!these.stateDefined) return false
+ else if (these.isEmpty) return true
+ these = these.tail
+ if (!these.stateDefined) return false
+ else if (these.isEmpty) return true
+ these = these.tail
+ if (those eq these) return false
+ those = those.tail
+ }
+ false // Cycle detected
+ }
+ }
+}
+
+/**
+ * $factoryInfo
+ * @define coll lazy list
+ * @define Coll `LazyListIterable`
+ */
+@SerialVersionUID(3L)
+object LazyListIterable extends IterableFactory[LazyListIterable] {
+ // Eagerly evaluate cached empty instance
+ private[this] val _empty = newLL(State.Empty).force
+
+ private sealed trait State[+A] extends Serializable {
+ this: State[A]^ =>
+ def head: A
+ def tail: LazyListIterable[A]^
+ }
+
+ private object State {
+ @SerialVersionUID(3L)
+ object Empty extends State[Nothing] {
+ def head: Nothing = throw new NoSuchElementException("head of empty lazy list")
+ def tail: LazyListIterable[Nothing] = throw new UnsupportedOperationException("tail of empty lazy list")
+ }
+
+ @SerialVersionUID(3L)
+ final class Cons[A](val head: A, val tail: LazyListIterable[A]^) extends State[A]
+ }
+
+ /** Creates a new LazyListIterable. */
+ @inline private def newLL[A](state: => State[A]^): LazyListIterable[A]^{state} = new LazyListIterable[A](() => state)
+
+ /** Creates a new State.Cons. */
+ @inline private def sCons[A](hd: A, tl: LazyListIterable[A]^): State[A]^{tl} = new State.Cons[A](hd, tl)
+
+ private val anyToMarker: Any => Any = _ => Statics.pfMarker
+
+ /* All of the following `Impl` methods are carefully written so as not to
+ * leak the beginning of the `LazyListIterable`. They copy the initial `LazyListIterable` (`ll`) into
+ * `var rest`, which gets closed over as a `scala.runtime.ObjectRef`, thus not permanently
+ * leaking the head of the `LazyListIterable`. Additionally, the methods are written so that, should
+ * an exception be thrown by the evaluation of the `LazyListIterable` or any supplied function, they
+ * can continue their execution where they left off.
+ */
+
+ private def filterImpl[A](ll: LazyListIterable[A]^, p: A => Boolean, isFlipped: Boolean): LazyListIterable[A]^{ll, p} = {
+ // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD
+ var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[filterImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric
+ newLL {
+ var elem: A = null.asInstanceOf[A]
+ var found = false
+ var rest = restRef // var rest = restRef.elem
+ while (!found && !rest.isEmpty) {
+ elem = rest.head
+ found = p(elem) != isFlipped
+ rest = rest.tail
+ restRef = rest // restRef.elem = rest
+ }
+ if (found) sCons(elem, filterImpl(rest, p, isFlipped)) else State.Empty
+ }
+ }
+
+ private def collectImpl[A, B](ll: LazyListIterable[A]^, pf: PartialFunction[A, B]^): LazyListIterable[B]^{ll, pf} = {
+ // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD
+ var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[collectImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric
+ newLL {
+ val marker = Statics.pfMarker
+ val toMarker = anyToMarker.asInstanceOf[A => B] // safe because Function1 is erased
+
+ var res: B = marker.asInstanceOf[B] // safe because B is unbounded
+ var rest = restRef // var rest = restRef.elem
+ while((res.asInstanceOf[AnyRef] eq marker) && !rest.isEmpty) {
+ res = pf.applyOrElse(rest.head, toMarker)
+ rest = rest.tail
+ restRef = rest // restRef.elem = rest
+ }
+ if (res.asInstanceOf[AnyRef] eq marker) State.Empty
+ else sCons(res, collectImpl(rest, pf))
+ }
+ }
+
+ private def flatMapImpl[A, B](ll: LazyListIterable[A]^, f: A => IterableOnce[B]^): LazyListIterable[B]^{ll, f} = {
+ // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD
+ var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[flatMapImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric
+ newLL {
+ var it: Iterator[B @uncheckedCaptures]^{ll, f} = null
+ var itHasNext = false
+ var rest = restRef // var rest = restRef.elem
+ while (!itHasNext && !rest.isEmpty) {
+ it = f(rest.head).iterator
+ itHasNext = it.hasNext
+ if (!itHasNext) { // wait to advance `rest` because `it.next()` can throw
+ rest = rest.tail
+ restRef = rest // restRef.elem = rest
+ }
+ }
+ if (itHasNext) {
+ val head = it.next()
+ rest = rest.tail
+ restRef = rest // restRef.elem = rest
+ sCons(head, newLL(stateFromIteratorConcatSuffix(it)(flatMapImpl(rest, f).state)))
+ } else State.Empty
+ }
+ }
+
+ private def dropImpl[A](ll: LazyListIterable[A]^, n: Int): LazyListIterable[A]^{ll} = {
+ // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD
+ var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[dropImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric
+ var iRef = n // val iRef = new IntRef(n)
+ newLL {
+ var rest = restRef // var rest = restRef.elem
+ var i = iRef // var i = iRef.elem
+ while (i > 0 && !rest.isEmpty) {
+ rest = rest.tail
+ restRef = rest // restRef.elem = rest
+ i -= 1
+ iRef = i // iRef.elem = i
+ }
+ rest.state
+ }
+ }
+
+ private def dropWhileImpl[A](ll: LazyListIterable[A]^, p: A => Boolean): LazyListIterable[A]^{ll, p} = {
+ // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD
+ var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[dropWhileImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric
+ newLL {
+ var rest = restRef // var rest = restRef.elem
+ while (!rest.isEmpty && p(rest.head)) {
+ rest = rest.tail
+ restRef = rest // restRef.elem = rest
+ }
+ rest.state
+ }
+ }
+
+ private def takeRightImpl[A](ll: LazyListIterable[A]^, n: Int): LazyListIterable[A]^{ll} = {
+ // DO NOT REFERENCE `ll` ANYWHERE ELSE, OR IT WILL LEAK THE HEAD
+ var restRef: LazyListIterable[A @uncheckedCaptures]^{cap[takeRightImpl]} = ll // restRef is captured by closure arg to newLL, so A is not recognized as parametric
+ var scoutRef: LazyListIterable[A @uncheckedCaptures]^{cap[takeRightImpl]} = ll // same situation
+ var remainingRef = n // val remainingRef = new IntRef(n)
+ newLL {
+ var scout = scoutRef // var scout = scoutRef.elem
+ var remaining = remainingRef // var remaining = remainingRef.elem
+ // advance `scout` `n` elements ahead (or until empty)
+ while (remaining > 0 && !scout.isEmpty) {
+ scout = scout.tail
+ scoutRef = scout // scoutRef.elem = scout
+ remaining -= 1
+ remainingRef = remaining // remainingRef.elem = remaining
+ }
+ var rest = restRef // var rest = restRef.elem
+ // advance `rest` and `scout` in tandem until `scout` reaches the end
+ while(!scout.isEmpty) {
+ scout = scout.tail
+ scoutRef = scout // scoutRef.elem = scout
+ rest = rest.tail // can't throw an exception as `scout` has already evaluated its tail
+ restRef = rest // restRef.elem = rest
+ }
+ // `rest` is the last `n` elements (or all of them)
+ rest.state
+ }
+ }
+
+ /** An alternative way of building and matching lazy lists using LazyListIterable.cons(hd, tl).
+ */
+ object cons {
+ /** A lazy list consisting of a given first element and remaining elements
+ * @param hd The first element of the result lazy list
+ * @param tl The remaining elements of the result lazy list
+ */
+ def apply[A](hd: => A, tl: => LazyListIterable[A]^): LazyListIterable[A]^{hd, tl} = newLL(sCons(hd, newLL(tl.state)))
+
+ /** Maps a lazy list to its head and tail */
+ def unapply[A](xs: LazyListIterable[A]^): Option[(A, LazyListIterable[A]^{xs})] = #::.unapply(xs)
+ }
+
+ extension [A](l: => LazyListIterable[A])
+ /** Construct a LazyListIterable consisting of a given first element followed by elements
+ * from another LazyListIterable.
+ */
+ def #:: [B >: A](elem: => B): LazyListIterable[B]^{elem, l} = newLL(sCons(elem, newLL(l.state)))
+
+ /** Construct a LazyListIterable consisting of the concatenation of the given LazyListIterable and
+ * another LazyListIterable.
+ */
+ def #:::[B >: A](prefix: LazyListIterable[B]^): LazyListIterable[B]^{prefix, l} = prefix lazyAppendedAll l
+
+ object #:: {
+ def unapply[A](s: LazyListIterable[A]^): Option[(A, LazyListIterable[A]^{s})] =
+ if (!s.isEmpty) Some((s.head, s.tail)) else None
+ }
+
+ def from[A](coll: collection.IterableOnce[A]^): LazyListIterable[A]^{coll} = coll match {
+ case lazyList: LazyListIterable[A] => lazyList
+ case _ if coll.knownSize == 0 => empty[A]
+ case _ => newLL(stateFromIterator(coll.iterator))
+ }
+
+ def empty[A]: LazyListIterable[A] = _empty
+
+ /** Creates a State from an Iterator, with another State appended after the Iterator
+ * is empty.
+ */
+ private def stateFromIteratorConcatSuffix[A](it: Iterator[A]^)(suffix: => State[A]^): State[A]^{it, suffix} =
+ if (it.hasNext) sCons(it.next(), newLL(stateFromIteratorConcatSuffix(it)(suffix)))
+ else suffix
+
+ /** Creates a State from an IterableOnce. */
+ private def stateFromIterator[A](it: Iterator[A]^): State[A]^{it} =
+ if (it.hasNext) sCons(it.next(), newLL(stateFromIterator(it)))
+ else State.Empty
+
+ override def concat[A](xss: collection.Iterable[A]*): LazyListIterable[A] =
+ if (xss.knownSize == 0) empty
+ else newLL(concatIterator(xss.iterator))
+
+ private def concatIterator[A](it: Iterator[collection.Iterable[A]]^): State[A]^{it} =
+ if (!it.hasNext) State.Empty
+ else stateFromIteratorConcatSuffix(it.next().iterator)(concatIterator(it))
+
+ /** An infinite LazyListIterable that repeatedly applies a given function to a start value.
+ *
+ * @param start the start value of the LazyListIterable
+ * @param f the function that's repeatedly applied
+ * @return the LazyListIterable returning the infinite sequence of values `start, f(start), f(f(start)), ...`
+ */
+ def iterate[A](start: => A)(f: A => A): LazyListIterable[A]^{start, f} =
+ newLL {
+ val head = start
+ sCons(head, iterate(f(head))(f))
+ }
+
+ /**
+ * Create an infinite LazyListIterable starting at `start` and incrementing by
+ * step `step`.
+ *
+ * @param start the start value of the LazyListIterable
+ * @param step the increment value of the LazyListIterable
+ * @return the LazyListIterable starting at value `start`.
+ */
+ def from(start: Int, step: Int): LazyListIterable[Int] =
+ newLL(sCons(start, from(start + step, step)))
+
+ /**
+ * Create an infinite LazyListIterable starting at `start` and incrementing by `1`.
+ *
+ * @param start the start value of the LazyListIterable
+ * @return the LazyListIterable starting at value `start`.
+ */
+ def from(start: Int): LazyListIterable[Int] = from(start, 1)
+
+ /**
+ * Create an infinite LazyListIterable containing the given element expression (which
+ * is computed for each occurrence).
+ *
+ * @param elem the element composing the resulting LazyListIterable
+ * @return the LazyListIterable containing an infinite number of elem
+ */
+ def continually[A](elem: => A): LazyListIterable[A]^{elem} = newLL(sCons(elem, continually(elem)))
+
+ override def fill[A](n: Int)(elem: => A): LazyListIterable[A]^{elem} =
+ if (n > 0) newLL(sCons(elem, fill(n - 1)(elem))) else empty
+
+ override def tabulate[A](n: Int)(f: Int => A): LazyListIterable[A]^{f} = {
+ def at(index: Int): LazyListIterable[A]^{f} =
+ if (index < n) newLL(sCons(f(index), at(index + 1))) else empty
+
+ at(0)
+ }
+
+ // significantly simpler than the iterator returned by Iterator.unfold
+ override def unfold[A, S](init: S)(f: S => Option[(A, S)]): LazyListIterable[A]^{f} =
+ newLL {
+ f(init) match {
+ case Some((elem, state)) => sCons(elem, unfold(state)(f))
+ case None => State.Empty
+ }
+ }
+
+ /** The builder returned by this method only evaluates elements
+ * of collections added to it as needed.
+ *
+ * @tparam A the type of the ${coll}’s elements
+ * @return A builder for $Coll objects.
+ */
+ def newBuilder[A]: Builder[A, LazyListIterable[A]] = new LazyBuilder[A]
+
+ private class LazyIterator[+A](private[this] var lazyList: LazyListIterable[A]^) extends AbstractIterator[A] {
+ override def hasNext: Boolean = !lazyList.isEmpty
+
+ override def next(): A =
+ if (lazyList.isEmpty) Iterator.empty.next()
+ else {
+ val res = lazyList.head
+ lazyList = lazyList.tail
+ res
+ }
+ }
+
+ private class SlidingIterator[A](private[this] var lazyList: LazyListIterable[A]^, size: Int, step: Int)
+ extends AbstractIterator[LazyListIterable[A]] {
+ this: SlidingIterator[A]^ =>
+ private val minLen = size - step max 0
+ private var first = true
+
+ def hasNext: Boolean =
+ if (first) !lazyList.isEmpty
+ else lazyList.lengthGt(minLen)
+
+ def next(): LazyListIterable[A] = {
+ if (!hasNext) Iterator.empty.next()
+ else {
+ first = false
+ val list = lazyList
+ lazyList = list.drop(step)
+ list.take(size)
+ }
+ }
+ }
+
+ private final class WithFilter[A] private[LazyListIterable](lazyList: LazyListIterable[A]^, p: A => Boolean)
+ extends collection.WithFilter[A, LazyListIterable] {
+ this: WithFilter[A]^ =>
+ private[this] val filtered = lazyList.filter(p)
+ def map[B](f: A => B): LazyListIterable[B]^{this, f} = filtered.map(f)
+ def flatMap[B](f: A => IterableOnce[B]^): LazyListIterable[B]^{this, f} = filtered.flatMap(f)
+ def foreach[U](f: A => U): Unit = filtered.foreach(f)
+ def withFilter(q: A => Boolean): collection.WithFilter[A, LazyListIterable]^{this, q} = new WithFilter(filtered, q)
+ }
+
+ private final class LazyBuilder[A] extends ReusableBuilder[A, LazyListIterable[A]] {
+ import LazyBuilder._
+
+ private[this] var next: DeferredState[A @uncheckedCaptures] = _
+ private[this] var list: LazyListIterable[A @uncheckedCaptures] = _
+
+ clear()
+
+ override def clear(): Unit = {
+ val deferred = new DeferredState[A]
+ list = newLL(deferred.eval())
+ next = deferred
+ }
+
+ override def result(): LazyListIterable[A] = {
+ next init State.Empty
+ list
+ }
+
+ override def addOne(elem: A): this.type = {
+ val deferred = new DeferredState[A]
+ next init sCons(elem, newLL(deferred.eval()))
+ next = deferred
+ this
+ }
+
+ // lazy implementation which doesn't evaluate the collection being added
+ override def addAll(xs: IterableOnce[A]^): this.type = {
+ if (xs.knownSize != 0) {
+ val deferred = new DeferredState[A]
+ next.init(stateFromIteratorConcatSuffix(xs.iterator)(deferred.eval()))
+ next = deferred
+ }
+ this
+ }
+ }
+
+ private object LazyBuilder {
+ final class DeferredState[A] {
+ this: DeferredState[A]^ =>
+ private[this] var _state: (() => State[A]^) @uncheckedCaptures = _
+
+ def eval(): State[A]^ = {
+ val state = _state
+ if (state == null) throw new IllegalStateException("uninitialized")
+ state()
+ }
+
+ // racy
+ def init(state: => State[A]^): Unit = {
+ if (_state != null) throw new IllegalStateException("already initialized")
+ _state = () => state
+ }
+ }
+ }
+
+ /** This serialization proxy is used for LazyLists which start with a sequence of evaluated cons cells.
+ * The forced sequence is serialized in a compact, sequential format, followed by the unevaluated tail, which uses
+ * standard Java serialization to store the complete structure of unevaluated thunks. This allows the serialization
+ * of long evaluated lazy lists without exhausting the stack through recursive serialization of cons cells.
+ */
+ @SerialVersionUID(3L)
+ final class SerializationProxy[A](@transient protected var coll: LazyListIterable[A]^) extends Serializable {
+
+ private[this] def writeObject(out: ObjectOutputStream): Unit = {
+ out.defaultWriteObject()
+ var these = coll
+ while (these.knownNonEmpty) {
+ out.writeObject(these.head)
+ these = these.tail
+ }
+ out.writeObject(SerializeEnd)
+ out.writeObject(these)
+ }
+
+ private[this] def readObject(in: ObjectInputStream): Unit = {
+ in.defaultReadObject()
+ val init = new mutable.ListBuffer[A @uncheckedCaptures]
+ var initRead = false
+ while (!initRead) in.readObject match {
+ case SerializeEnd => initRead = true
+ case a => init += a.asInstanceOf[A]
+ }
+ val tail = in.readObject().asInstanceOf[LazyListIterable[A]]
+ // scala/scala#10118: caution that no code path can evaluate `tail.state`
+ // before the resulting LazyListIterable is returned
+ val it = init.toList.iterator
+ coll = newLL(stateFromIteratorConcatSuffix(it)(tail.state))
+ }
+
+ private[this] def readResolve(): Any = coll
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/immutable/ListMap.scala b/tests/pos-special/stdlib/collection/immutable/ListMap.scala
new file mode 100644
index 000000000000..c5000d785144
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/immutable/ListMap.scala
@@ -0,0 +1,373 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package immutable
+
+import scala.annotation.tailrec
+import scala.collection.mutable.ReusableBuilder
+import scala.collection.generic.DefaultSerializable
+import scala.runtime.Statics.releaseFence
+import scala.util.hashing.MurmurHash3
+import language.experimental.captureChecking
+import scala.annotation.unchecked.uncheckedCaptures
+
+/**
+ * This class implements immutable maps using a list-based data structure. List map iterators and
+ * traversal methods visit key-value pairs in the order they were first inserted.
+ *
+ * Entries are stored internally in reversed insertion order, which means the newest key is at the
+ * head of the list. As such, methods such as `head` and `tail` are O(n), while `last` and `init`
+ * are O(1). Other operations, such as inserting or removing entries, are also O(n), which makes
+ * this collection suitable only for a small number of elements.
+ *
+ * Instances of `ListMap` represent empty maps; they can be either created by calling the
+ * constructor directly, or by applying the function `ListMap.empty`.
+ *
+ * @tparam K the type of the keys contained in this list map
+ * @tparam V the type of the values associated with the keys
+ *
+ * @define Coll ListMap
+ * @define coll list map
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ */
+sealed class ListMap[K, +V]
+ extends AbstractMap[K, V]
+ with SeqMap[K, V]
+ with StrictOptimizedMapOps[K, V, ListMap, ListMap[K, V]]
+ with MapFactoryDefaults[K, V, ListMap, Iterable]
+ with DefaultSerializable {
+
+ override def mapFactory: MapFactory[ListMap] = ListMap
+
+ override def size: Int = 0
+
+ override def isEmpty: Boolean = true
+
+ override def knownSize: Int = 0
+ def get(key: K): Option[V] = None
+
+ def updated[V1 >: V](key: K, value: V1): ListMap[K, V1] = new ListMap.Node[K, V1](key, value, this)
+
+ def removed(key: K): ListMap[K, V] = this
+
+ def iterator: Iterator[(K, V)] = {
+ var curr: ListMap[K, V] = this
+ var res: List[(K, V)] = Nil
+ while (curr.nonEmpty) {
+ res = (curr.key, curr.value) :: res
+ curr = curr.next
+ }
+ res.iterator
+ }
+
+ override def keys: Iterable[K] = {
+ var curr: ListMap[K, V] = this
+ var res: List[K] = Nil
+ while (curr.nonEmpty) {
+ res = curr.key :: res
+ curr = curr.next
+ }
+ res
+ }
+
+ override def hashCode(): Int = {
+ if (isEmpty) MurmurHash3.emptyMapHash
+ else {
+ // Can't efficiently override foreachEntry directly in ListMap because it would need to preserve iteration
+ // order be reversing the list first. But mapHash is symmetric so the reversed order is fine here.
+ val _reversed = new immutable.AbstractMap[K, V] {
+ override def isEmpty: Boolean = ListMap.this.isEmpty
+ override def removed(key: K): Map[K, V] = ListMap.this.removed(key)
+ override def updated[V1 >: V](key: K, value: V1): Map[K, V1] = ListMap.this.updated(key, value)
+ override def get(key: K): Option[V] = ListMap.this.get(key)
+ override def iterator: Iterator[(K, V)] = ListMap.this.iterator
+ override def foreachEntry[U](f: (K, V) => U): Unit = {
+ var curr: ListMap[K, V] = ListMap.this
+ while (curr.nonEmpty) {
+ f(curr.key, curr.value)
+ curr = curr.next
+ }
+ }
+ }
+ MurmurHash3.mapHash(_reversed)
+ }
+ }
+
+ private[immutable] def key: K = throw new NoSuchElementException("key of empty map")
+ private[immutable] def value: V = throw new NoSuchElementException("value of empty map")
+ private[immutable] def next: ListMap[K, V] = throw new NoSuchElementException("next of empty map")
+
+ override def foldRight[Z](z: Z)(op: ((K, V), Z) => Z): Z = ListMap.foldRightInternal(this, z, op)
+ override protected[this] def className = "ListMap"
+
+}
+
+/**
+ * $factoryInfo
+ *
+ * Note that each element insertion takes O(n) time, which means that creating a list map with
+ * n elements will take O(n^2^) time. This makes the builder suitable only for a small number of
+ * elements.
+ *
+ * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#list-maps "Scala's Collection Library overview"]]
+ * section on `List Maps` for more information.
+ * @define Coll ListMap
+ * @define coll list map
+ */
+@SerialVersionUID(3L)
+object ListMap extends MapFactory[ListMap] {
+ /**
+ * Represents an entry in the `ListMap`.
+ */
+ private[immutable] final class Node[K, V](
+ override private[immutable] val key: K,
+ private[immutable] var _value: V @uncheckedCaptures,
+ private[immutable] var _init: ListMap[K, V] @uncheckedCaptures
+ ) extends ListMap[K, V] {
+ releaseFence()
+
+ override private[immutable] def value: V = _value
+
+ override def size: Int = sizeInternal(this, 0)
+
+ @tailrec private[this] def sizeInternal(cur: ListMap[K, V], acc: Int): Int =
+ if (cur.isEmpty) acc
+ else sizeInternal(cur.next, acc + 1)
+
+ override def isEmpty: Boolean = false
+
+ override def knownSize: Int = -1
+
+ @throws[NoSuchElementException]
+ override def apply(k: K): V = applyInternal(this, k)
+
+ @tailrec private[this] def applyInternal(cur: ListMap[K, V], k: K): V =
+ if (cur.isEmpty) throw new NoSuchElementException("key not found: " + k)
+ else if (k == cur.key) cur.value
+ else applyInternal(cur.next, k)
+
+ override def get(k: K): Option[V] = getInternal(this, k)
+
+ @tailrec private[this] def getInternal(cur: ListMap[K, V], k: K): Option[V] =
+ if (cur.isEmpty) None
+ else if (k == cur.key) Some(cur.value)
+ else getInternal(cur.next, k)
+
+ override def contains(k: K): Boolean = containsInternal(this, k)
+
+ @tailrec private[this] def containsInternal(cur: ListMap[K, V], k: K): Boolean =
+ if (cur.isEmpty) false
+ else if (k == cur.key) true
+ else containsInternal(cur.next, k)
+
+ override def updated[V1 >: V](k: K, v: V1): ListMap[K, V1] = {
+
+ var index = -1 // the index (in reverse) where the key to update exists, if it is found
+ var found = false // true if the key is found int he map
+ var isDifferent = false // true if the key was found and the values are different
+
+ {
+ var curr: ListMap[K, V] = this
+
+ while (curr.nonEmpty && !found) {
+ if (k == curr.key) {
+ found = true
+ isDifferent = v.asInstanceOf[AnyRef] ne curr.value.asInstanceOf[AnyRef]
+ }
+ index += 1
+ curr = curr.init
+ }
+ }
+
+ if (found) {
+ if (isDifferent) {
+ var newHead: ListMap.Node[K, V1] = null
+ var prev: ListMap.Node[K, V1] = null
+ var curr: ListMap[K, V1] = this
+ var i = 0
+ while (i < index) {
+ val temp = new ListMap.Node(curr.key, curr.value, null)
+ if (prev ne null) {
+ prev._init = temp
+ }
+ prev = temp
+ curr = curr.init
+ if (newHead eq null) {
+ newHead = prev
+ }
+ i += 1
+ }
+ val newNode = new ListMap.Node(curr.key, v, curr.init)
+ if (prev ne null) {
+ prev._init = newNode
+ }
+ releaseFence()
+ if (newHead eq null) newNode else newHead
+ } else {
+ this
+ }
+ } else {
+ new ListMap.Node(k, v, this)
+ }
+ }
+
+ @tailrec private[this] def removeInternal(k: K, cur: ListMap[K, V], acc: List[ListMap[K, V]]): ListMap[K, V] =
+ if (cur.isEmpty) acc.last
+ else if (k == cur.key) acc.foldLeft(cur.next) { (t, h) => new Node(h.key, h.value, t) }
+ else removeInternal(k, cur.next, cur :: acc)
+
+ override def removed(k: K): ListMap[K, V] = removeInternal(k, this, Nil)
+
+ override private[immutable] def next: ListMap[K, V] = _init
+
+ override def last: (K, V) = (key, value)
+ override def init: ListMap[K, V] = next
+
+ }
+
+ def empty[K, V]: ListMap[K, V] = EmptyListMap.asInstanceOf[ListMap[K, V]]
+
+ private object EmptyListMap extends ListMap[Any, Nothing]
+
+ def from[K, V](it: collection.IterableOnce[(K, V)]^): ListMap[K, V] =
+ it match {
+ case lm: ListMap[K, V] => lm
+ case lhm: collection.mutable.LinkedHashMap[K, V] =>
+ // by directly iterating through LinkedHashMap entries, we save creating intermediate tuples for each
+ // key-value pair
+ var current: ListMap[K, V] = empty[K, V]
+ var firstEntry = lhm._firstEntry
+ while (firstEntry ne null) {
+ current = new Node(firstEntry.key, firstEntry.value, current)
+ firstEntry = firstEntry.later
+ }
+ current
+ case _: collection.Map[K, V] | _: collection.MapView[K, V] =>
+ // when creating from a map, we need not handle duplicate keys, so we can just append each key-value to the end
+ var current: ListMap[K, V] = empty[K, V]
+ val iter = it.iterator
+ while (iter.hasNext) {
+ val (k, v) = iter.next()
+ current = new Node(k, v, current)
+ }
+ current
+
+ case _ => (newBuilder[K, V] ++= it).result()
+ }
+
+ /** Returns a new ListMap builder
+ *
+ * The implementation safely handles additions after `result()` without calling `clear()`
+ *
+ * @tparam K the map key type
+ * @tparam V the map value type
+ */
+ def newBuilder[K, V]: ReusableBuilder[(K, V), ListMap[K, V]] = new ListMapBuilder[K, V]
+
+ @tailrec private def foldRightInternal[K, V, Z](map: ListMap[K, V], prevValue: Z, op: ((K, V), Z) => Z): Z = {
+ if (map.isEmpty) prevValue
+ else foldRightInternal(map.init, op(map.last, prevValue), op)
+ }
+}
+
+/** Builder for ListMap.
+ * $multipleResults
+ */
+private[immutable] final class ListMapBuilder[K, V] extends mutable.ReusableBuilder[(K, V), ListMap[K, V]] {
+ private[this] var isAliased: Boolean = false
+ private[this] var underlying: ListMap[K, V] @uncheckedCaptures = ListMap.empty
+
+ override def clear(): Unit = {
+ underlying = ListMap.empty
+ isAliased = false
+ }
+
+ override def result(): ListMap[K, V] = {
+ isAliased = true
+ releaseFence()
+ underlying
+ }
+
+ override def addOne(elem: (K, V)): this.type = addOne(elem._1, elem._2)
+
+ @tailrec
+ private[this] def insertValueAtKeyReturnFound(m: ListMap[K, V], key: K, value: V): Boolean = m match {
+ case n: ListMap.Node[K, V] =>
+ if (n.key == key) {
+ n._value = value
+ true
+ } else {
+ insertValueAtKeyReturnFound(n.init, key, value)
+ }
+ case _ => false
+ }
+
+ def addOne(key: K, value: V): this.type = {
+ if (isAliased) {
+ underlying = underlying.updated(key, value)
+ } else {
+ if (!insertValueAtKeyReturnFound(underlying, key, value)) {
+ underlying = new ListMap.Node(key, value, underlying)
+ }
+ }
+ this
+ }
+ override def addAll(xs: IterableOnce[(K, V)]^): this.type = {
+ if (isAliased) {
+ super.addAll(xs)
+ } else if (underlying.nonEmpty) {
+ xs match {
+ case m: collection.Map[K, V] =>
+ // if it is a map, then its keys will not collide with themselves.
+ // therefor we only need to check the already-existing elements for collisions.
+ // No need to check the entire list
+
+ val iter = m.iterator
+ var newUnderlying = underlying
+ while (iter.hasNext) {
+ val next = iter.next()
+ if (!insertValueAtKeyReturnFound(underlying, next._1, next._2)) {
+ newUnderlying = new ListMap.Node[K, V](next._1, next._2, newUnderlying)
+ }
+ }
+ underlying = newUnderlying
+ this
+
+ case _ =>
+ super.addAll(xs)
+ }
+ } else xs match {
+ case lhm: collection.mutable.LinkedHashMap[K, V] =>
+ // special-casing LinkedHashMap avoids creating of Iterator and tuples for each key-value
+ var firstEntry = lhm._firstEntry
+ while (firstEntry ne null) {
+ underlying = new ListMap.Node(firstEntry.key, firstEntry.value, underlying)
+ firstEntry = firstEntry.later
+ }
+ this
+
+ case _: collection.Map[K, V] | _: collection.MapView[K, V] =>
+ val iter = xs.iterator
+ while (iter.hasNext) {
+ val (k, v) = iter.next()
+ underlying = new ListMap.Node(k, v, underlying)
+ }
+
+ this
+ case _ =>
+ super.addAll(xs)
+ }
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/immutable/ListSet.scala b/tests/pos-special/stdlib/collection/immutable/ListSet.scala
new file mode 100644
index 000000000000..719abd78e1e6
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/immutable/ListSet.scala
@@ -0,0 +1,140 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package immutable
+
+import mutable.{Builder, ImmutableBuilder}
+import scala.annotation.tailrec
+import scala.collection.generic.DefaultSerializable
+import language.experimental.captureChecking
+import scala.annotation.unchecked.uncheckedCaptures
+
+/**
+ * This class implements immutable sets using a list-based data structure. List set iterators and
+ * traversal methods visit elements in the order they were first inserted.
+ *
+ * Elements are stored internally in reversed insertion order, which means the newest element is at
+ * the head of the list. As such, methods such as `head` and `tail` are O(n), while `last` and
+ * `init` are O(1). Other operations, such as inserting or removing entries, are also O(n), which
+ * makes this collection suitable only for a small number of elements.
+ *
+ * Instances of `ListSet` represent empty sets; they can be either created by calling the
+ * constructor directly, or by applying the function `ListSet.empty`.
+ *
+ * @tparam A the type of the elements contained in this list set
+ *
+ * @define Coll ListSet
+ * @define coll list set
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ */
+sealed class ListSet[A]
+ extends AbstractSet[A]
+ with StrictOptimizedSetOps[A, ListSet, ListSet[A]]
+ with IterableFactoryDefaults[A, ListSet]
+ with DefaultSerializable {
+
+ override protected[this] def className: String = "ListSet"
+
+ override def size: Int = 0
+ override def knownSize: Int = 0
+ override def isEmpty: Boolean = true
+
+ def contains(elem: A): Boolean = false
+
+ def incl(elem: A): ListSet[A] = new Node(elem)
+ def excl(elem: A): ListSet[A] = this
+
+ def iterator: scala.collection.Iterator[A] = {
+ var curr: ListSet[A] = this
+ var res: List[A] = Nil
+ while (!curr.isEmpty) {
+ res = curr.elem :: res
+ curr = curr.next
+ }
+ res.iterator
+ }
+
+ protected def elem: A = throw new NoSuchElementException("elem of empty set")
+ protected def next: ListSet[A] = throw new NoSuchElementException("next of empty set")
+
+ override def iterableFactory: IterableFactory[ListSet] = ListSet
+
+ /**
+ * Represents an entry in the `ListSet`.
+ */
+ protected class Node(override protected val elem: A) extends ListSet[A] {
+
+ override def size = sizeInternal(this, 0)
+ override def knownSize: Int = -1
+ @tailrec private[this] def sizeInternal(n: ListSet[A], acc: Int): Int =
+ if (n.isEmpty) acc
+ else sizeInternal(n.next, acc + 1)
+
+ override def isEmpty: Boolean = false
+
+ override def contains(e: A): Boolean = containsInternal(this, e)
+
+ @tailrec private[this] def containsInternal(n: ListSet[A], e: A): Boolean =
+ !n.isEmpty && (n.elem == e || containsInternal(n.next, e))
+
+ override def incl(e: A): ListSet[A] = if (contains(e)) this else new Node(e)
+
+ override def excl(e: A): ListSet[A] = removeInternal(e, this, Nil)
+
+ @tailrec private[this] def removeInternal(k: A, cur: ListSet[A], acc: List[ListSet[A]]): ListSet[A] =
+ if (cur.isEmpty) acc.last
+ else if (k == cur.elem) acc.foldLeft(cur.next)((t, h) => new t.Node(h.elem))
+ else removeInternal(k, cur.next, cur :: acc)
+
+ override protected def next: ListSet[A] = ListSet.this
+
+ override def last: A = elem
+
+ override def init: ListSet[A] = next
+ }
+}
+
+/**
+ * $factoryInfo
+ *
+ * Note that each element insertion takes O(n) time, which means that creating a list set with
+ * n elements will take O(n^2^) time. This makes the builder suitable only for a small number of
+ * elements.
+ *
+ * @define Coll ListSet
+ * @define coll list set
+ */
+@SerialVersionUID(3L)
+object ListSet extends IterableFactory[ListSet] {
+
+ def from[E](it: scala.collection.IterableOnce[E]^): ListSet[E] =
+ it match {
+ case ls: ListSet[E] => ls
+ case _ if it.knownSize == 0 => empty[E]
+ case _ => (newBuilder[E] ++= it).result()
+ }
+
+ private object EmptyListSet extends ListSet[Any] {
+ override def knownSize: Int = 0
+ }
+ private[collection] def emptyInstance: ListSet[Any] = EmptyListSet
+
+ def empty[A]: ListSet[A] = EmptyListSet.asInstanceOf[ListSet[A]]
+
+ def newBuilder[A]: Builder[A, ListSet[A]] =
+ new ImmutableBuilder[A, ListSet[A]](empty) {
+ def addOne(elem: A): this.type = { elems = elems + elem; this }
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/immutable/LongMap.scala b/tests/pos-special/stdlib/collection/immutable/LongMap.scala
new file mode 100644
index 000000000000..4abf433273f2
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/immutable/LongMap.scala
@@ -0,0 +1,492 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+package immutable
+
+import java.lang.IllegalStateException
+
+import scala.collection.generic.{BitOperations, DefaultSerializationProxy}
+import scala.collection.mutable.{Builder, ImmutableBuilder, ListBuffer}
+import scala.annotation.tailrec
+import scala.annotation.unchecked.uncheckedVariance
+import scala.language.implicitConversions
+import language.experimental.captureChecking
+import scala.annotation.unchecked.uncheckedCaptures
+
+/** Utility class for long maps.
+ */
+private[immutable] object LongMapUtils extends BitOperations.Long {
+ def branchMask(i: Long, j: Long) = highestOneBit(i ^ j)
+
+ def join[T](p1: Long, t1: LongMap[T], p2: Long, t2: LongMap[T]): LongMap[T] = {
+ val m = branchMask(p1, p2)
+ val p = mask(p1, m)
+ if (zero(p1, m)) LongMap.Bin(p, m, t1, t2)
+ else LongMap.Bin(p, m, t2, t1)
+ }
+
+ def bin[T](prefix: Long, mask: Long, left: LongMap[T], right: LongMap[T]): LongMap[T] = (left, right) match {
+ case (left, LongMap.Nil) => left
+ case (LongMap.Nil, right) => right
+ case (left, right) => LongMap.Bin(prefix, mask, left, right)
+ }
+}
+
+import LongMapUtils._
+
+/** A companion object for long maps.
+ *
+ * @define Coll `LongMap`
+ */
+object LongMap {
+ def empty[T]: LongMap[T] = LongMap.Nil
+ def singleton[T](key: Long, value: T): LongMap[T] = LongMap.Tip(key, value)
+ def apply[T](elems: (Long, T)*): LongMap[T] =
+ elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2))
+
+ def from[V](coll: IterableOnce[(Long, V)]^): LongMap[V] =
+ newBuilder[V].addAll(coll).result()
+
+ def newBuilder[V]: Builder[(Long, V), LongMap[V]] =
+ new ImmutableBuilder[(Long, V), LongMap[V]](empty) {
+ def addOne(elem: (Long, V)): this.type = { elems = elems + elem; this }
+ }
+
+ private[immutable] case object Nil extends LongMap[Nothing] {
+ // Important, don't remove this! See IntMap for explanation.
+ override def equals(that : Any) = that match {
+ case _: this.type => true
+ case _: LongMap[_] => false // The only empty LongMaps are eq Nil
+ case _ => super.equals(that)
+ }
+ }
+
+ private[immutable] case class Tip[+T](key: Long, value: T) extends LongMap[T] {
+ def withValue[S](s: S) =
+ if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap.Tip[S]]
+ else LongMap.Tip(key, s)
+ }
+
+ private[immutable] case class Bin[+T](prefix: Long, mask: Long, left: LongMap[T], right: LongMap[T]) extends LongMap[T] {
+ def bin[S](left: LongMap[S], right: LongMap[S]): LongMap[S] = {
+ if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[LongMap.Bin[S]]
+ else LongMap.Bin[S](prefix, mask, left, right)
+ }
+ }
+
+ implicit def toFactory[V](dummy: LongMap.type): Factory[(Long, V), LongMap[V]] = ToFactory.asInstanceOf[Factory[(Long, V), LongMap[V]]]
+
+ @SerialVersionUID(3L)
+ private[this] object ToFactory extends Factory[(Long, AnyRef), LongMap[AnyRef]] with Serializable {
+ def fromSpecific(it: IterableOnce[(Long, AnyRef)]^): LongMap[AnyRef] = LongMap.from[AnyRef](it)
+ def newBuilder: Builder[(Long, AnyRef), LongMap[AnyRef]] = LongMap.newBuilder[AnyRef]
+ }
+
+ implicit def toBuildFrom[V](factory: LongMap.type): BuildFrom[Any, (Long, V), LongMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Long, V), LongMap[V]]]
+ private[this] object ToBuildFrom extends BuildFrom[Any, (Long, AnyRef), LongMap[AnyRef]] {
+ def fromSpecific(from: Any)(it: IterableOnce[(Long, AnyRef)]^) = LongMap.from(it)
+ def newBuilder(from: Any) = LongMap.newBuilder[AnyRef]
+ }
+
+ implicit def iterableFactory[V]: Factory[(Long, V), LongMap[V]] = toFactory(this)
+ implicit def buildFromLongMap[V]: BuildFrom[LongMap[_], (Long, V), LongMap[V]] = toBuildFrom(this)
+}
+
+// Iterator over a non-empty LongMap.
+private[immutable] abstract class LongMapIterator[V, T](it: LongMap[V]) extends AbstractIterator[T] {
+
+ // Basically this uses a simple stack to emulate conversion over the tree. However
+ // because we know that Longs are only 64 bits we can have at most 64 LongMap.Bins and
+ // one LongMap.Tip sitting on the tree at any point. Therefore we know the maximum stack
+ // depth is 65
+ var index = 0
+ var buffer = new Array[AnyRef](65)
+
+ def pop() = {
+ index -= 1
+ buffer(index).asInstanceOf[LongMap[V]]
+ }
+
+ def push(x: LongMap[V]): Unit = {
+ buffer(index) = x.asInstanceOf[AnyRef]
+ index += 1
+ }
+ push(it)
+
+ /**
+ * What value do we assign to a tip?
+ */
+ def valueOf(tip: LongMap.Tip[V]): T
+
+ def hasNext = index != 0
+ @tailrec
+ final def next(): T =
+ pop() match {
+ case LongMap.Bin(_,_, t@LongMap.Tip(_, _), right) => {
+ push(right)
+ valueOf(t)
+ }
+ case LongMap.Bin(_, _, left, right) => {
+ push(right)
+ push(left)
+ next()
+ }
+ case t@LongMap.Tip(_, _) => valueOf(t)
+ // This should never happen. We don't allow LongMap.Nil in subtrees of the LongMap
+ // and don't return an LongMapIterator for LongMap.Nil.
+ case LongMap.Nil => throw new IllegalStateException("Empty maps not allowed as subtrees")
+ }
+}
+
+private[immutable] class LongMapEntryIterator[V](it: LongMap[V]) extends LongMapIterator[V, (Long, V)](it){
+ def valueOf(tip: LongMap.Tip[V]) = (tip.key, tip.value)
+}
+
+private[immutable] class LongMapValueIterator[V](it: LongMap[V]) extends LongMapIterator[V, V](it){
+ def valueOf(tip: LongMap.Tip[V]) = tip.value
+}
+
+private[immutable] class LongMapKeyIterator[V](it: LongMap[V]) extends LongMapIterator[V, Long](it){
+ def valueOf(tip: LongMap.Tip[V]) = tip.key
+}
+
+/**
+ * Specialised immutable map structure for long keys, based on
+ * [[https://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.37.5452 Fast Mergeable Long Maps]]
+ * by Okasaki and Gill. Essentially a trie based on binary digits of the integers.
+ *
+ * Note: This class is as of 2.8 largely superseded by HashMap.
+ *
+ * @tparam T type of the values associated with the long keys.
+ *
+ * @define Coll `immutable.LongMap`
+ * @define coll immutable long integer map
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ */
+sealed abstract class LongMap[+T] extends AbstractMap[Long, T]
+ with StrictOptimizedMapOps[Long, T, Map, LongMap[T]]
+ with Serializable {
+
+ override protected def fromSpecific(coll: scala.collection.IterableOnce[(Long, T) @uncheckedVariance]^): LongMap[T] = {
+ //TODO should this be the default implementation of this method in StrictOptimizedIterableOps?
+ val b = newSpecificBuilder
+ b.sizeHint(coll)
+ b.addAll(coll)
+ b.result()
+ }
+ override protected def newSpecificBuilder: Builder[(Long, T), LongMap[T]] @uncheckedVariance =
+ new ImmutableBuilder[(Long, T), LongMap[T]](empty) {
+ def addOne(elem: (Long, T)): this.type = { elems = elems + elem; this }
+ }
+
+ override def empty: LongMap[T] = LongMap.Nil
+
+ override def toList = {
+ val buffer = new ListBuffer[(Long, T) @uncheckedCaptures]
+ foreach(buffer += _)
+ buffer.toList
+ }
+
+ /**
+ * Iterator over key, value pairs of the map in unsigned order of the keys.
+ *
+ * @return an iterator over pairs of long keys and corresponding values.
+ */
+ def iterator: Iterator[(Long, T)] = this match {
+ case LongMap.Nil => Iterator.empty
+ case _ => new LongMapEntryIterator(this)
+ }
+
+ /**
+ * Loops over the key, value pairs of the map in unsigned order of the keys.
+ */
+ override final def foreach[U](f: ((Long, T)) => U): Unit = this match {
+ case LongMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) }
+ case LongMap.Tip(key, value) => f((key, value))
+ case LongMap.Nil =>
+ }
+
+ override final def foreachEntry[U](f: (Long, T) => U): Unit = this match {
+ case LongMap.Bin(_, _, left, right) => { left.foreachEntry(f); right.foreachEntry(f) }
+ case LongMap.Tip(key, value) => f(key, value)
+ case LongMap.Nil =>
+ }
+
+ override def keysIterator: Iterator[Long] = this match {
+ case LongMap.Nil => Iterator.empty
+ case _ => new LongMapKeyIterator(this)
+ }
+
+ /**
+ * Loop over the keys of the map. The same as keys.foreach(f), but may
+ * be more efficient.
+ *
+ * @param f The loop body
+ */
+ final def foreachKey[U](f: Long => U): Unit = this match {
+ case LongMap.Bin(_, _, left, right) => { left.foreachKey(f); right.foreachKey(f) }
+ case LongMap.Tip(key, _) => f(key)
+ case LongMap.Nil =>
+ }
+
+ override def valuesIterator: Iterator[T] = this match {
+ case LongMap.Nil => Iterator.empty
+ case _ => new LongMapValueIterator(this)
+ }
+
+ /**
+ * Loop over the values of the map. The same as values.foreach(f), but may
+ * be more efficient.
+ *
+ * @param f The loop body
+ */
+ final def foreachValue[U](f: T => U): Unit = this match {
+ case LongMap.Bin(_, _, left, right) => { left.foreachValue(f); right.foreachValue(f) }
+ case LongMap.Tip(_, value) => f(value)
+ case LongMap.Nil =>
+ }
+
+ override protected[this] def className = "LongMap"
+
+ override def isEmpty = this eq LongMap.Nil
+ override def knownSize: Int = if (isEmpty) 0 else super.knownSize
+ override def filter(f: ((Long, T)) => Boolean): LongMap[T] = this match {
+ case LongMap.Bin(prefix, mask, left, right) => {
+ val (newleft, newright) = (left.filter(f), right.filter(f))
+ if ((left eq newleft) && (right eq newright)) this
+ else bin(prefix, mask, newleft, newright)
+ }
+ case LongMap.Tip(key, value) =>
+ if (f((key, value))) this
+ else LongMap.Nil
+ case LongMap.Nil => LongMap.Nil
+ }
+
+ override def transform[S](f: (Long, T) => S): LongMap[S] = this match {
+ case b@LongMap.Bin(prefix, mask, left, right) => b.bin(left.transform(f), right.transform(f))
+ case t@LongMap.Tip(key, value) => t.withValue(f(key, value))
+ case LongMap.Nil => LongMap.Nil
+ }
+
+ final override def size: Int = this match {
+ case LongMap.Nil => 0
+ case LongMap.Tip(_, _) => 1
+ case LongMap.Bin(_, _, left, right) => left.size + right.size
+ }
+
+ @tailrec
+ final def get(key: Long): Option[T] = this match {
+ case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left.get(key) else right.get(key)
+ case LongMap.Tip(key2, value) => if (key == key2) Some(value) else None
+ case LongMap.Nil => None
+ }
+
+ @tailrec
+ final override def getOrElse[S >: T](key: Long, default: => S): S = this match {
+ case LongMap.Nil => default
+ case LongMap.Tip(key2, value) => if (key == key2) value else default
+ case LongMap.Bin(prefix, mask, left, right) =>
+ if (zero(key, mask)) left.getOrElse(key, default) else right.getOrElse(key, default)
+ }
+
+ @tailrec
+ final override def apply(key: Long): T = this match {
+ case LongMap.Bin(prefix, mask, left, right) => if (zero(key, mask)) left(key) else right(key)
+ case LongMap.Tip(key2, value) => if (key == key2) value else throw new IllegalArgumentException("Key not found")
+ case LongMap.Nil => throw new IllegalArgumentException("key not found")
+ }
+
+ override def + [S >: T] (kv: (Long, S)): LongMap[S] = updated(kv._1, kv._2)
+
+ override def updated[S >: T](key: Long, value: S): LongMap[S] = this match {
+ case LongMap.Bin(prefix, mask, left, right) =>
+ if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this)
+ else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updated(key, value), right)
+ else LongMap.Bin(prefix, mask, left, right.updated(key, value))
+ case LongMap.Tip(key2, value2) =>
+ if (key == key2) LongMap.Tip(key, value)
+ else join(key, LongMap.Tip(key, value), key2, this)
+ case LongMap.Nil => LongMap.Tip(key, value)
+ }
+
+ /**
+ * Updates the map, using the provided function to resolve conflicts if the key is already present.
+ *
+ * Equivalent to
+ * {{{
+ * this.get(key) match {
+ * case None => this.update(key, value)
+ * case Some(oldvalue) => this.update(key, f(oldvalue, value)
+ * }
+ * }}}
+ *
+ * @tparam S The supertype of values in this `LongMap`.
+ * @param key The key to update.
+ * @param value The value to use if there is no conflict.
+ * @param f The function used to resolve conflicts.
+ * @return The updated map.
+ */
+ def updateWith[S >: T](key: Long, value: S, f: (T, S) => S): LongMap[S] = this match {
+ case LongMap.Bin(prefix, mask, left, right) =>
+ if (!hasMatch(key, prefix, mask)) join(key, LongMap.Tip(key, value), prefix, this)
+ else if (zero(key, mask)) LongMap.Bin(prefix, mask, left.updateWith(key, value, f), right)
+ else LongMap.Bin(prefix, mask, left, right.updateWith(key, value, f))
+ case LongMap.Tip(key2, value2) =>
+ if (key == key2) LongMap.Tip(key, f(value2, value))
+ else join(key, LongMap.Tip(key, value), key2, this)
+ case LongMap.Nil => LongMap.Tip(key, value)
+ }
+
+ def removed(key: Long): LongMap[T] = this match {
+ case LongMap.Bin(prefix, mask, left, right) =>
+ if (!hasMatch(key, prefix, mask)) this
+ else if (zero(key, mask)) bin(prefix, mask, left - key, right)
+ else bin(prefix, mask, left, right - key)
+ case LongMap.Tip(key2, _) =>
+ if (key == key2) LongMap.Nil
+ else this
+ case LongMap.Nil => LongMap.Nil
+ }
+
+ /**
+ * A combined transform and filter function. Returns an `LongMap` such that
+ * for each `(key, value)` mapping in this map, if `f(key, value) == None`
+ * the map contains no mapping for key, and if `f(key, value)`.
+ *
+ * @tparam S The type of the values in the resulting `LongMap`.
+ * @param f The transforming function.
+ * @return The modified map.
+ */
+ def modifyOrRemove[S](f: (Long, T) => Option[S]): LongMap[S] = this match {
+ case LongMap.Bin(prefix, mask, left, right) => {
+ val newleft = left.modifyOrRemove(f)
+ val newright = right.modifyOrRemove(f)
+ if ((left eq newleft) && (right eq newright)) this.asInstanceOf[LongMap[S]]
+ else bin(prefix, mask, newleft, newright)
+ }
+ case LongMap.Tip(key, value) => f(key, value) match {
+ case None => LongMap.Nil
+ case Some(value2) =>
+ //hack to preserve sharing
+ if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap[S]]
+ else LongMap.Tip(key, value2)
+ }
+ case LongMap.Nil => LongMap.Nil
+ }
+
+ /**
+ * Forms a union map with that map, using the combining function to resolve conflicts.
+ *
+ * @tparam S The type of values in `that`, a supertype of values in `this`.
+ * @param that The map to form a union with.
+ * @param f The function used to resolve conflicts between two mappings.
+ * @return Union of `this` and `that`, with identical key conflicts resolved using the function `f`.
+ */
+ def unionWith[S >: T](that: LongMap[S], f: (Long, S, S) => S): LongMap[S] = (this, that) match{
+ case (LongMap.Bin(p1, m1, l1, r1), that@(LongMap.Bin(p2, m2, l2, r2))) =>
+ if (shorter(m1, m2)) {
+ if (!hasMatch(p2, p1, m1)) join(p1, this, p2, that)
+ else if (zero(p2, m1)) LongMap.Bin(p1, m1, l1.unionWith(that, f), r1)
+ else LongMap.Bin(p1, m1, l1, r1.unionWith(that, f))
+ } else if (shorter(m2, m1)){
+ if (!hasMatch(p1, p2, m2)) join(p1, this, p2, that)
+ else if (zero(p1, m2)) LongMap.Bin(p2, m2, this.unionWith(l2, f), r2)
+ else LongMap.Bin(p2, m2, l2, this.unionWith(r2, f))
+ }
+ else {
+ if (p1 == p2) LongMap.Bin(p1, m1, l1.unionWith(l2,f), r1.unionWith(r2, f))
+ else join(p1, this, p2, that)
+ }
+ case (LongMap.Tip(key, value), x) => x.updateWith(key, value, (x, y) => f(key, y, x))
+ case (x, LongMap.Tip(key, value)) => x.updateWith[S](key, value, (x, y) => f(key, x, y))
+ case (LongMap.Nil, x) => x
+ case (x, LongMap.Nil) => x
+ }
+
+ /**
+ * Forms the intersection of these two maps with a combining function. The
+ * resulting map is a map that has only keys present in both maps and has
+ * values produced from the original mappings by combining them with `f`.
+ *
+ * @tparam S The type of values in `that`.
+ * @tparam R The type of values in the resulting `LongMap`.
+ * @param that The map to intersect with.
+ * @param f The combining function.
+ * @return Intersection of `this` and `that`, with values for identical keys produced by function `f`.
+ */
+ def intersectionWith[S, R](that: LongMap[S], f: (Long, T, S) => R): LongMap[R] = (this, that) match {
+ case (LongMap.Bin(p1, m1, l1, r1), that@LongMap.Bin(p2, m2, l2, r2)) =>
+ if (shorter(m1, m2)) {
+ if (!hasMatch(p2, p1, m1)) LongMap.Nil
+ else if (zero(p2, m1)) l1.intersectionWith(that, f)
+ else r1.intersectionWith(that, f)
+ } else if (m1 == m2) bin(p1, m1, l1.intersectionWith(l2, f), r1.intersectionWith(r2, f))
+ else {
+ if (!hasMatch(p1, p2, m2)) LongMap.Nil
+ else if (zero(p1, m2)) this.intersectionWith(l2, f)
+ else this.intersectionWith(r2, f)
+ }
+ case (LongMap.Tip(key, value), that) => that.get(key) match {
+ case None => LongMap.Nil
+ case Some(value2) => LongMap.Tip(key, f(key, value, value2))
+ }
+ case (_, LongMap.Tip(key, value)) => this.get(key) match {
+ case None => LongMap.Nil
+ case Some(value2) => LongMap.Tip(key, f(key, value2, value))
+ }
+ case (_, _) => LongMap.Nil
+ }
+
+ /**
+ * Left biased intersection. Returns the map that has all the same mappings as this but only for keys
+ * which are present in the other map.
+ *
+ * @tparam R The type of values in `that`.
+ * @param that The map to intersect with.
+ * @return A map with all the keys both in `this` and `that`, mapped to corresponding values from `this`.
+ */
+ def intersection[R](that: LongMap[R]): LongMap[T] =
+ this.intersectionWith(that, (key: Long, value: T, value2: R) => value)
+
+ def ++[S >: T](that: LongMap[S]) =
+ this.unionWith[S](that, (key, x, y) => y)
+
+ @tailrec
+ final def firstKey: Long = this match {
+ case LongMap.Bin(_, _, l, r) => l.firstKey
+ case LongMap.Tip(k, v) => k
+ case LongMap.Nil => throw new IllegalStateException("Empty set")
+ }
+
+ @tailrec
+ final def lastKey: Long = this match {
+ case LongMap.Bin(_, _, l, r) => r.lastKey
+ case LongMap.Tip(k , v) => k
+ case LongMap.Nil => throw new IllegalStateException("Empty set")
+ }
+
+ def map[V2](f: ((Long, T)) => (Long, V2)): LongMap[V2] = LongMap.from(new View.Map(coll, f))
+
+ def flatMap[V2](f: ((Long, T)) => IterableOnce[(Long, V2)]): LongMap[V2] = LongMap.from(new View.FlatMap(coll, f))
+
+ override def concat[V1 >: T](that: scala.collection.IterableOnce[(Long, V1)]^): LongMap[V1] =
+ super.concat(that).asInstanceOf[LongMap[V1]] // Already has correct type but not declared as such
+
+ override def ++ [V1 >: T](that: scala.collection.IterableOnce[(Long, V1)]^): LongMap[V1] = concat(that)
+
+ def collect[V2](pf: PartialFunction[(Long, T), (Long, V2)]): LongMap[V2] =
+ strictOptimizedCollect(LongMap.newBuilder[V2], pf)
+
+ protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(LongMap.toFactory[T](LongMap), this)
+}
diff --git a/tests/pos-special/stdlib/collection/immutable/Map.scala b/tests/pos-special/stdlib/collection/immutable/Map.scala
new file mode 100644
index 000000000000..6daad829bf55
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/immutable/Map.scala
@@ -0,0 +1,694 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package immutable
+
+import scala.annotation.unchecked.uncheckedVariance
+import scala.collection.generic.DefaultSerializable
+import scala.collection.immutable.Map.Map4
+import scala.collection.mutable.{Builder, ReusableBuilder}
+import language.experimental.captureChecking
+import scala.annotation.unchecked.uncheckedCaptures
+
+/** Base type of immutable Maps */
+trait Map[K, +V]
+ extends Iterable[(K, V)]
+ with collection.Map[K, V]
+ with MapOps[K, V, Map, Map[K, V]]
+ with MapFactoryDefaults[K, V, Map, Iterable] {
+
+ override def mapFactory: scala.collection.MapFactory[Map] = Map
+
+ override final def toMap[K2, V2](implicit ev: (K, V) <:< (K2, V2)): Map[K2, V2] = this.asInstanceOf[Map[K2, V2]]
+
+ /** The same map with a given default function.
+ * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc.
+ * are not affected by `withDefault`.
+ *
+ * Invoking transformer methods (e.g. `map`) will not preserve the default value.
+ *
+ * @param d the function mapping keys to values, used for non-present keys
+ * @return a wrapper of the map with a default value
+ */
+ def withDefault[V1 >: V](d: K -> V1): Map[K, V1] = new Map.WithDefault[K, V1](this, d)
+
+ /** The same map with a given default value.
+ * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc.
+ * are not affected by `withDefaultValue`.
+ *
+ * Invoking transformer methods (e.g. `map`) will not preserve the default value.
+ *
+ * @param d default value used for non-present keys
+ * @return a wrapper of the map with a default value
+ */
+ def withDefaultValue[V1 >: V](d: V1): Map[K, V1] = new Map.WithDefault[K, V1](this, _ => d)
+}
+
+/** Base trait of immutable Maps implementations
+ *
+ * @define coll immutable map
+ * @define Coll `immutable.Map`
+ */
+trait MapOps[K, +V, +CC[X, +Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]]
+ extends IterableOps[(K, V), Iterable, C]
+ with collection.MapOps[K, V, CC, C] {
+
+ protected def coll: C with CC[K, V]
+
+ /** Removes a key from this map, returning a new map.
+ *
+ * @param key the key to be removed
+ * @return a new map without a binding for ''key''
+ */
+ def removed(key: K): C
+
+ /** Alias for `removed` */
+ @`inline` final def - (key: K): C = removed(key)
+
+ @deprecated("Use -- with an explicit collection", "2.13.0")
+ def - (key1: K, key2: K, keys: K*): C = removed(key1).removed(key2).removedAll(keys)
+
+ /** Creates a new $coll from this $coll by removing all elements of another
+ * collection.
+ *
+ * $willForceEvaluation
+ *
+ * @param keys the collection containing the removed elements.
+ * @return a new $coll that contains all elements of the current $coll
+ * except one less occurrence of each of the elements of `elems`.
+ */
+ def removedAll(keys: IterableOnce[K]^): C = keys.iterator.foldLeft[C](coll)(_ - _)
+
+ /** Alias for `removedAll` */
+ @`inline` final override def -- (keys: IterableOnce[K]^): C = removedAll(keys)
+
+ /** Creates a new map obtained by updating this map with a given key/value pair.
+ * @param key the key
+ * @param value the value
+ * @tparam V1 the type of the added value
+ * @return A new map with the new key/value mapping added to this map.
+ */
+ def updated[V1 >: V](key: K, value: V1): CC[K, V1]
+
+ /**
+ * Update a mapping for the specified key and its current optionally-mapped value
+ * (`Some` if there is current mapping, `None` if not).
+ *
+ * If the remapping function returns `Some(v)`, the mapping is updated with the new value `v`.
+ * If the remapping function returns `None`, the mapping is removed (or remains absent if initially absent).
+ * If the function itself throws an exception, the exception is rethrown, and the current mapping is left unchanged.
+ *
+ * @param key the key value
+ * @param remappingFunction a partial function that receives current optionally-mapped value and return a new mapping
+ * @return A new map with the updated mapping with the key
+ */
+ def updatedWith[V1 >: V](key: K)(remappingFunction: Option[V] => Option[V1]): CC[K,V1] = {
+ val previousValue = this.get(key)
+ remappingFunction(previousValue) match {
+ case None => previousValue.fold(coll)(_ => this.removed(key).coll)
+ case Some(nextValue) =>
+ if (previousValue.exists(_.asInstanceOf[AnyRef] eq nextValue.asInstanceOf[AnyRef])) coll
+ else coll.updated(key, nextValue)
+ }
+ }
+
+ /**
+ * Alias for `updated`
+ *
+ * @param kv the key/value pair.
+ * @tparam V1 the type of the value in the key/value pair.
+ * @return A new map with the new binding added to this map.
+ */
+ override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = updated(kv._1, kv._2)
+
+ /** This function transforms all the values of mappings contained
+ * in this map with function `f`.
+ *
+ * @param f A function over keys and values
+ * @return the updated map
+ */
+ def transform[W](f: (K, V) => W): CC[K, W] = map { case (k, v) => (k, f(k, v)) }
+
+ override def keySet: Set[K] = new ImmutableKeySet
+
+ /** The implementation class of the set returned by `keySet` */
+ protected class ImmutableKeySet extends AbstractSet[K] with GenKeySet with DefaultSerializable {
+ def incl(elem: K): Set[K] = if (this(elem)) this else empty ++ this + elem
+ def excl(elem: K): Set[K] = if (this(elem)) empty ++ this - elem else this
+ }
+
+}
+
+trait StrictOptimizedMapOps[K, +V, +CC[X, +Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]]
+ extends MapOps[K, V, CC, C]
+ with collection.StrictOptimizedMapOps[K, V, CC, C]
+ with StrictOptimizedIterableOps[(K, V), Iterable, C] {
+
+ override def concat [V1 >: V](that: collection.IterableOnce[(K, V1)]^): CC[K, V1] = {
+ var result: CC[K, V1] = coll
+ val it = that.iterator
+ while (it.hasNext) result = result + it.next()
+ result
+ }
+}
+
+
+/**
+ * $factoryInfo
+ * @define coll immutable map
+ * @define Coll `immutable.Map`
+ */
+@SerialVersionUID(3L)
+object Map extends MapFactory[Map] {
+
+ @SerialVersionUID(3L)
+ class WithDefault[K, +V](val underlying: Map[K, V], val defaultValue: K -> V)
+ extends AbstractMap[K, V]
+ with MapOps[K, V, Map, WithDefault[K, V]] with Serializable {
+
+ def get(key: K): Option[V] = underlying.get(key)
+
+ override def default(key: K): V = defaultValue(key)
+
+ override def iterableFactory: IterableFactory[Iterable] = underlying.iterableFactory
+
+ def iterator: Iterator[(K, V)] = underlying.iterator
+
+ override def isEmpty: Boolean = underlying.isEmpty
+
+ override def mapFactory: MapFactory[Map] = underlying.mapFactory
+
+ override def concat [V2 >: V](xs: collection.IterableOnce[(K, V2)]^): WithDefault[K, V2] =
+ new WithDefault(underlying.concat(xs), defaultValue)
+
+ def removed(key: K): WithDefault[K, V] = new WithDefault[K, V](underlying.removed(key), defaultValue)
+
+ def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] =
+ new WithDefault[K, V1](underlying.updated(key, value), defaultValue)
+
+ override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue)
+
+ override protected def fromSpecific(coll: collection.IterableOnce[(K, V) @uncheckedVariance]^): WithDefault[K, V] =
+ new WithDefault[K, V](mapFactory.from(coll), defaultValue)
+
+ override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] @uncheckedVariance =
+ Map.newBuilder.mapResult((p: Map[K, V]) => new WithDefault[K, V](p, defaultValue))
+ }
+
+ def empty[K, V]: Map[K, V] = EmptyMap.asInstanceOf[Map[K, V]]
+
+ def from[K, V](it: collection.IterableOnce[(K, V)]^): Map[K, V] =
+ it match {
+ case it: Iterable[_] if it.isEmpty => empty[K, V]
+ case m: Map[K, V] => m
+ case _ => (newBuilder[K, V] ++= it).result()
+ }
+
+ def newBuilder[K, V]: Builder[(K, V), Map[K, V]] = new MapBuilderImpl
+
+ @SerialVersionUID(3L)
+ private object EmptyMap extends AbstractMap[Any, Nothing] with Serializable {
+ override def size: Int = 0
+ override def knownSize: Int = 0
+ override def isEmpty: Boolean = true
+ override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: Any) = false
+ def get(key: Any): Option[Nothing] = None
+ override def getOrElse [V1](key: Any, default: => V1): V1 = default
+ def iterator: Iterator[(Any, Nothing)] = Iterator.empty
+ override def keysIterator: Iterator[Any] = Iterator.empty
+ override def valuesIterator: Iterator[Nothing] = Iterator.empty
+ def updated [V1] (key: Any, value: V1): Map[Any, V1] = new Map1(key, value)
+ def removed(key: Any): Map[Any, Nothing] = this
+ override def concat[V2 >: Nothing](suffix: IterableOnce[(Any, V2)]^): Map[Any, V2] = suffix match {
+ case m: immutable.Map[Any, V2] => m
+ case _ => super.concat(suffix)
+ }
+ }
+
+ @SerialVersionUID(3L)
+ final class Map1[K, +V](key1: K, value1: V) extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable {
+ override def size: Int = 1
+ override def knownSize: Int = 1
+ override def isEmpty: Boolean = false
+ override def apply(key: K): V = if (key == key1) value1 else throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: K): Boolean = key == key1
+ def get(key: K): Option[V] =
+ if (key == key1) Some(value1) else None
+ override def getOrElse [V1 >: V](key: K, default: => V1): V1 =
+ if (key == key1) value1 else default
+ def iterator: Iterator[(K, V)] = Iterator.single((key1, value1))
+ override def keysIterator: Iterator[K] = Iterator.single(key1)
+ override def valuesIterator: Iterator[V] = Iterator.single(value1)
+ def updated[V1 >: V](key: K, value: V1): Map[K, V1] =
+ if (key == key1) new Map1(key1, value)
+ else new Map2(key1, value1, key, value)
+ def removed(key: K): Map[K, V] =
+ if (key == key1) Map.empty else this
+ override def foreach[U](f: ((K, V)) => U): Unit = {
+ f((key1, value1))
+ }
+ override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1))
+ override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1))
+ override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] =
+ if (pred((key1, value1)) != isFlipped) this else Map.empty
+ override def transform[W](f: (K, V) => W): Map[K, W] = {
+ val walue1 = f(key1, value1)
+ if (walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) this.asInstanceOf[Map[K, W]]
+ else new Map1(key1, walue1)
+ }
+ override def hashCode(): Int = {
+ import scala.util.hashing.MurmurHash3
+ var a, b = 0
+ val N = 1
+ var c = 1
+
+ var h = MurmurHash3.tuple2Hash(key1, value1)
+ a += h
+ b ^= h
+ c *= h | 1
+
+ h = MurmurHash3.mapSeed
+ h = MurmurHash3.mix(h, a)
+ h = MurmurHash3.mix(h, b)
+ h = MurmurHash3.mixLast(h, c)
+ MurmurHash3.finalizeHash(h, N)
+ }
+ }
+
+ @SerialVersionUID(3L)
+ final class Map2[K, +V](key1: K, value1: V, key2: K, value2: V) extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable {
+ override def size: Int = 2
+ override def knownSize: Int = 2
+ override def isEmpty: Boolean = false
+ override def apply(key: K): V =
+ if (key == key1) value1
+ else if (key == key2) value2
+ else throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: K): Boolean = (key == key1) || (key == key2)
+ def get(key: K): Option[V] =
+ if (key == key1) Some(value1)
+ else if (key == key2) Some(value2)
+ else None
+ override def getOrElse [V1 >: V](key: K, default: => V1): V1 =
+ if (key == key1) value1
+ else if (key == key2) value2
+ else default
+ def iterator: Iterator[(K, V)] = new Map2Iterator[(K, V)] {
+ override protected def nextResult(k: K, v: V): (K, V) = (k, v)
+ }
+ override def keysIterator: Iterator[K] = new Map2Iterator[K] {
+ override protected def nextResult(k: K, v: V): K = k
+ }
+ override def valuesIterator: Iterator[V] = new Map2Iterator[V] {
+ override protected def nextResult(k: K, v: V): V = v
+ }
+
+ private abstract class Map2Iterator[A] extends AbstractIterator[A], Pure {
+ private[this] var i = 0
+ override def hasNext: Boolean = i < 2
+ override def next(): A = {
+ val result = i match {
+ case 0 => nextResult(key1, value1)
+ case 1 => nextResult(key2, value2)
+ case _ => Iterator.empty.next()
+ }
+ i += 1
+ result
+ }
+ override def drop(n: Int): Iterator[A] = { i += n; this }
+ protected def nextResult(k: K, v: V @uncheckedVariance): A
+ }
+ def updated[V1 >: V](key: K, value: V1): Map[K, V1] =
+ if (key == key1) new Map2(key1, value, key2, value2)
+ else if (key == key2) new Map2(key1, value1, key2, value)
+ else new Map3(key1, value1, key2, value2, key, value)
+ def removed(key: K): Map[K, V] =
+ if (key == key1) new Map1(key2, value2)
+ else if (key == key2) new Map1(key1, value1)
+ else this
+ override def foreach[U](f: ((K, V)) => U): Unit = {
+ f((key1, value1)); f((key2, value2))
+ }
+ override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) || p((key2, value2))
+ override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) && p((key2, value2))
+ override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = {
+ var k1 = null.asInstanceOf[K]
+ var v1 = null.asInstanceOf[V]
+ var n = 0
+ if (pred((key1, value1)) != isFlipped) { {k1 = key1; v1 = value1}; n += 1}
+ if (pred((key2, value2)) != isFlipped) { if (n == 0) {k1 = key2; v1 = value2}; n += 1}
+
+ n match {
+ case 0 => Map.empty
+ case 1 => new Map1(k1, v1)
+ case 2 => this
+ }
+ }
+ override def transform[W](f: (K, V) => W): Map[K, W] = {
+ val walue1 = f(key1, value1)
+ val walue2 = f(key2, value2)
+ if ((walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) &&
+ (walue2.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef])) this.asInstanceOf[Map[K, W]]
+ else new Map2(key1, walue1, key2, walue2)
+ }
+ override def hashCode(): Int = {
+ import scala.util.hashing.MurmurHash3
+ var a, b = 0
+ val N = 2
+ var c = 1
+
+ var h = MurmurHash3.tuple2Hash(key1, value1)
+ a += h
+ b ^= h
+ c *= h | 1
+
+ h = MurmurHash3.tuple2Hash(key2, value2)
+ a += h
+ b ^= h
+ c *= h | 1
+
+ h = MurmurHash3.mapSeed
+ h = MurmurHash3.mix(h, a)
+ h = MurmurHash3.mix(h, b)
+ h = MurmurHash3.mixLast(h, c)
+ MurmurHash3.finalizeHash(h, N)
+ }
+ }
+
+ @SerialVersionUID(3L)
+ class Map3[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V) extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable {
+ override def size: Int = 3
+ override def knownSize: Int = 3
+ override def isEmpty: Boolean = false
+ override def apply(key: K): V =
+ if (key == key1) value1
+ else if (key == key2) value2
+ else if (key == key3) value3
+ else throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: K): Boolean = (key == key1) || (key == key2) || (key == key3)
+ def get(key: K): Option[V] =
+ if (key == key1) Some(value1)
+ else if (key == key2) Some(value2)
+ else if (key == key3) Some(value3)
+ else None
+ override def getOrElse [V1 >: V](key: K, default: => V1): V1 =
+ if (key == key1) value1
+ else if (key == key2) value2
+ else if (key == key3) value3
+ else default
+ def iterator: Iterator[(K, V)] = new Map3Iterator[(K, V)] {
+ override protected def nextResult(k: K, v: V): (K, V) = (k, v)
+ }
+ override def keysIterator: Iterator[K] = new Map3Iterator[K] {
+ override protected def nextResult(k: K, v: V): K = k
+ }
+ override def valuesIterator: Iterator[V] = new Map3Iterator[V] {
+ override protected def nextResult(k: K, v: V): V = v
+ }
+
+ private abstract class Map3Iterator[A] extends AbstractIterator[A], Pure {
+ private[this] var i = 0
+ override def hasNext: Boolean = i < 3
+ override def next(): A = {
+ val result = i match {
+ case 0 => nextResult(key1, value1)
+ case 1 => nextResult(key2, value2)
+ case 2 => nextResult(key3, value3)
+ case _ => Iterator.empty.next()
+ }
+ i += 1
+ result
+ }
+ override def drop(n: Int): Iterator[A] = { i += n; this }
+ protected def nextResult(k: K, v: V @uncheckedVariance): A
+ }
+ def updated[V1 >: V](key: K, value: V1): Map[K, V1] =
+ if (key == key1) new Map3(key1, value, key2, value2, key3, value3)
+ else if (key == key2) new Map3(key1, value1, key2, value, key3, value3)
+ else if (key == key3) new Map3(key1, value1, key2, value2, key3, value)
+ else new Map4(key1, value1, key2, value2, key3, value3, key, value)
+ def removed(key: K): Map[K, V] =
+ if (key == key1) new Map2(key2, value2, key3, value3)
+ else if (key == key2) new Map2(key1, value1, key3, value3)
+ else if (key == key3) new Map2(key1, value1, key2, value2)
+ else this
+ override def foreach[U](f: ((K, V)) => U): Unit = {
+ f((key1, value1)); f((key2, value2)); f((key3, value3))
+ }
+ override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) || p((key2, value2)) || p((key3, value3))
+ override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) && p((key2, value2)) && p((key3, value3))
+ override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = {
+ var k1, k2 = null.asInstanceOf[K]
+ var v1, v2 = null.asInstanceOf[V]
+ var n = 0
+ if (pred((key1, value1)) != isFlipped) { { k1 = key1; v1 = value1 }; n += 1}
+ if (pred((key2, value2)) != isFlipped) { if (n == 0) { k1 = key2; v1 = value2 } else { k2 = key2; v2 = value2 }; n += 1}
+ if (pred((key3, value3)) != isFlipped) { if (n == 0) { k1 = key3; v1 = value3 } else if (n == 1) { k2 = key3; v2 = value3 }; n += 1}
+
+ n match {
+ case 0 => Map.empty
+ case 1 => new Map1(k1, v1)
+ case 2 => new Map2(k1, v1, k2, v2)
+ case 3 => this
+ }
+ }
+ override def transform[W](f: (K, V) => W): Map[K, W] = {
+ val walue1 = f(key1, value1)
+ val walue2 = f(key2, value2)
+ val walue3 = f(key3, value3)
+ if ((walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) &&
+ (walue2.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) &&
+ (walue3.asInstanceOf[AnyRef] eq value3.asInstanceOf[AnyRef])) this.asInstanceOf[Map[K, W]]
+ else new Map3(key1, walue1, key2, walue2, key3, walue3)
+ }
+ override def hashCode(): Int = {
+ import scala.util.hashing.MurmurHash3
+ var a, b = 0
+ val N = 3
+ var c = 1
+
+ var h = MurmurHash3.tuple2Hash(key1, value1)
+ a += h
+ b ^= h
+ c *= h | 1
+
+ h = MurmurHash3.tuple2Hash(key2, value2)
+ a += h
+ b ^= h
+ c *= h | 1
+
+ h = MurmurHash3.tuple2Hash(key3, value3)
+ a += h
+ b ^= h
+ c *= h | 1
+
+ h = MurmurHash3.mapSeed
+ h = MurmurHash3.mix(h, a)
+ h = MurmurHash3.mix(h, b)
+ h = MurmurHash3.mixLast(h, c)
+ MurmurHash3.finalizeHash(h, N)
+ }
+ }
+
+ @SerialVersionUID(3L)
+ final class Map4[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V, key4: K, value4: V)
+ extends AbstractMap[K, V] with StrictOptimizedIterableOps[(K, V), Iterable, Map[K, V]] with Serializable {
+
+ override def size: Int = 4
+ override def knownSize: Int = 4
+ override def isEmpty: Boolean = false
+ override def apply(key: K): V =
+ if (key == key1) value1
+ else if (key == key2) value2
+ else if (key == key3) value3
+ else if (key == key4) value4
+ else throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: K): Boolean = (key == key1) || (key == key2) || (key == key3) || (key == key4)
+ def get(key: K): Option[V] =
+ if (key == key1) Some(value1)
+ else if (key == key2) Some(value2)
+ else if (key == key3) Some(value3)
+ else if (key == key4) Some(value4)
+ else None
+ override def getOrElse [V1 >: V](key: K, default: => V1): V1 =
+ if (key == key1) value1
+ else if (key == key2) value2
+ else if (key == key3) value3
+ else if (key == key4) value4
+ else default
+ def iterator: Iterator[(K, V)] = new Map4Iterator[(K, V)] {
+ override protected def nextResult(k: K, v: V): (K, V) = (k, v)
+ }
+ override def keysIterator: Iterator[K] = new Map4Iterator[K] {
+ override protected def nextResult(k: K, v: V): K = k
+ }
+ override def valuesIterator: Iterator[V] = new Map4Iterator[V] {
+ override protected def nextResult(k: K, v: V): V = v
+ }
+
+ private abstract class Map4Iterator[A] extends AbstractIterator[A], Pure {
+ private[this] var i = 0
+ override def hasNext: Boolean = i < 4
+ override def next(): A = {
+ val result = i match {
+ case 0 => nextResult(key1, value1)
+ case 1 => nextResult(key2, value2)
+ case 2 => nextResult(key3, value3)
+ case 3 => nextResult(key4, value4)
+ case _ => Iterator.empty.next()
+ }
+ i += 1
+ result
+ }
+ override def drop(n: Int): Iterator[A] = { i += n; this }
+ protected def nextResult(k: K, v: V @uncheckedVariance): A
+ }
+ def updated[V1 >: V](key: K, value: V1): Map[K, V1] =
+ if (key == key1) new Map4(key1, value, key2, value2, key3, value3, key4, value4)
+ else if (key == key2) new Map4(key1, value1, key2, value, key3, value3, key4, value4)
+ else if (key == key3) new Map4(key1, value1, key2, value2, key3, value, key4, value4)
+ else if (key == key4) new Map4(key1, value1, key2, value2, key3, value3, key4, value)
+ else HashMap.empty[K, V1].updated(key1,value1).updated(key2, value2).updated(key3, value3).updated(key4, value4).updated(key, value)
+ def removed(key: K): Map[K, V] =
+ if (key == key1) new Map3(key2, value2, key3, value3, key4, value4)
+ else if (key == key2) new Map3(key1, value1, key3, value3, key4, value4)
+ else if (key == key3) new Map3(key1, value1, key2, value2, key4, value4)
+ else if (key == key4) new Map3(key1, value1, key2, value2, key3, value3)
+ else this
+ override def foreach[U](f: ((K, V)) => U): Unit = {
+ f((key1, value1)); f((key2, value2)); f((key3, value3)); f((key4, value4))
+ }
+ override def exists(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) || p((key2, value2)) || p((key3, value3)) || p((key4, value4))
+ override def forall(p: ((K, V)) => Boolean): Boolean = p((key1, value1)) && p((key2, value2)) && p((key3, value3)) && p((key4, value4))
+ override protected[collection] def filterImpl(pred: ((K, V)) => Boolean, isFlipped: Boolean): Map[K, V] = {
+ var k1, k2, k3 = null.asInstanceOf[K]
+ var v1, v2, v3 = null.asInstanceOf[V]
+ var n = 0
+ if (pred((key1, value1)) != isFlipped) { { k1 = key1; v1 = value1 }; n += 1}
+ if (pred((key2, value2)) != isFlipped) { if (n == 0) { k1 = key2; v1 = value2 } else { k2 = key2; v2 = value2 }; n += 1}
+ if (pred((key3, value3)) != isFlipped) { if (n == 0) { k1 = key3; v1 = value3 } else if (n == 1) { k2 = key3; v2 = value3 } else { k3 = key3; v3 = value3}; n += 1}
+ if (pred((key4, value4)) != isFlipped) { if (n == 0) { k1 = key4; v1 = value4 } else if (n == 1) { k2 = key4; v2 = value4 } else if (n == 2) { k3 = key4; v3 = value4 }; n += 1}
+
+ n match {
+ case 0 => Map.empty
+ case 1 => new Map1(k1, v1)
+ case 2 => new Map2(k1, v1, k2, v2)
+ case 3 => new Map3(k1, v1, k2, v2, k3, v3)
+ case 4 => this
+ }
+ }
+ override def transform[W](f: (K, V) => W): Map[K, W] = {
+ val walue1 = f(key1, value1)
+ val walue2 = f(key2, value2)
+ val walue3 = f(key3, value3)
+ val walue4 = f(key4, value4)
+ if ((walue1.asInstanceOf[AnyRef] eq value1.asInstanceOf[AnyRef]) &&
+ (walue2.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) &&
+ (walue3.asInstanceOf[AnyRef] eq value3.asInstanceOf[AnyRef]) &&
+ (walue4.asInstanceOf[AnyRef] eq value4.asInstanceOf[AnyRef])) this.asInstanceOf[Map[K, W]]
+ else new Map4(key1, walue1, key2, walue2, key3, walue3, key4, walue4)
+ }
+ private[immutable] def buildTo[V1 >: V](builder: HashMapBuilder[K, V1]): builder.type =
+ builder.addOne(key1, value1).addOne(key2, value2).addOne(key3, value3).addOne(key4, value4)
+ override def hashCode(): Int = {
+ import scala.util.hashing.MurmurHash3
+ var a, b = 0
+ val N = 4
+ var c = 1
+
+ var h = MurmurHash3.tuple2Hash(key1, value1)
+ a += h
+ b ^= h
+ c *= h | 1
+
+ h = MurmurHash3.tuple2Hash(key2, value2)
+ a += h
+ b ^= h
+ c *= h | 1
+
+ h = MurmurHash3.tuple2Hash(key3, value3)
+ a += h
+ b ^= h
+ c *= h | 1
+
+ h = MurmurHash3.tuple2Hash(key4, value4)
+ a += h
+ b ^= h
+ c *= h | 1
+
+ h = MurmurHash3.mapSeed
+ h = MurmurHash3.mix(h, a)
+ h = MurmurHash3.mix(h, b)
+ h = MurmurHash3.mixLast(h, c)
+ MurmurHash3.finalizeHash(h, N)
+ }
+ }
+}
+
+/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */
+abstract class AbstractMap[K, +V] extends scala.collection.AbstractMap[K, V] with Map[K, V]
+
+private[immutable] final class MapBuilderImpl[K, V] extends ReusableBuilder[(K, V), Map[K, V]] {
+ private[this] var elems: Map[K, V] @uncheckedCaptures = Map.empty
+ private[this] var switchedToHashMapBuilder: Boolean = false
+ private[this] var hashMapBuilder: HashMapBuilder[K, V] @uncheckedCaptures = _
+
+ private[immutable] def getOrElse[V0 >: V](key: K, value: V0): V0 =
+ if (hashMapBuilder ne null) hashMapBuilder.getOrElse(key, value)
+ else elems.getOrElse(key, value)
+
+ override def clear(): Unit = {
+ elems = Map.empty
+ if (hashMapBuilder != null) {
+ hashMapBuilder.clear()
+ }
+ switchedToHashMapBuilder = false
+ }
+
+ override def result(): Map[K, V] =
+ if (switchedToHashMapBuilder) hashMapBuilder.result() else elems
+
+ def addOne(key: K, value: V): this.type = {
+ if (switchedToHashMapBuilder) {
+ hashMapBuilder.addOne(key, value)
+ } else if (elems.size < 4) {
+ elems = elems.updated(key, value)
+ } else {
+ // assert(elems.size == 4)
+ if (elems.contains(key)) {
+ elems = elems.updated(key, value)
+ } else {
+ switchedToHashMapBuilder = true
+ if (hashMapBuilder == null) {
+ hashMapBuilder = new HashMapBuilder
+ }
+ elems.asInstanceOf[Map4[K, V]].buildTo(hashMapBuilder)
+ hashMapBuilder.addOne(key, value)
+ }
+ }
+
+ this
+ }
+
+ def addOne(elem: (K, V)) = addOne(elem._1, elem._2)
+
+ override def addAll(xs: IterableOnce[(K, V)]^): this.type =
+ if (switchedToHashMapBuilder) {
+ hashMapBuilder.addAll(xs)
+ this
+ } else {
+ super.addAll(xs)
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/immutable/NumericRange.scala b/tests/pos-special/stdlib/collection/immutable/NumericRange.scala
new file mode 100644
index 000000000000..f26d9728e5ad
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/immutable/NumericRange.scala
@@ -0,0 +1,509 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection.immutable
+
+import scala.collection.Stepper.EfficientSplit
+import scala.collection.{AbstractIterator, AnyStepper, IterableFactoryDefaults, Iterator, Stepper, StepperShape}
+import language.experimental.captureChecking
+import scala.annotation.unchecked.uncheckedCaptures
+
+/** `NumericRange` is a more generic version of the
+ * `Range` class which works with arbitrary types.
+ * It must be supplied with an `Integral` implementation of the
+ * range type.
+ *
+ * Factories for likely types include `Range.BigInt`, `Range.Long`,
+ * and `Range.BigDecimal`. `Range.Int` exists for completeness, but
+ * the `Int`-based `scala.Range` should be more performant.
+ *
+ * {{{
+ * val r1 = Range(0, 100, 1)
+ * val veryBig = Int.MaxValue.toLong + 1
+ * val r2 = Range.Long(veryBig, veryBig + 100, 1)
+ * assert(r1 sameElements r2.map(_ - veryBig))
+ * }}}
+ *
+ * @define Coll `NumericRange`
+ * @define coll numeric range
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ */
+@SerialVersionUID(3L)
+sealed class NumericRange[T](
+ val start: T,
+ val end: T,
+ val step: T,
+ val isInclusive: Boolean
+)(implicit
+ num: Integral[T]
+)
+ extends AbstractSeq[T]
+ with IndexedSeq[T]
+ with IndexedSeqOps[T, IndexedSeq, IndexedSeq[T]]
+ with StrictOptimizedSeqOps[T, IndexedSeq, IndexedSeq[T]]
+ with IterableFactoryDefaults[T, IndexedSeq]
+ with Serializable { self =>
+
+ override def iterator: Iterator[T] = new NumericRange.NumericRangeIterator(this, num)
+
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit = {
+ import scala.collection.convert._
+ import impl._
+ val s = shape.shape match {
+ case StepperShape.IntShape => new IntNumericRangeStepper (this.asInstanceOf[NumericRange[Int]], 0, length)
+ case StepperShape.LongShape => new LongNumericRangeStepper (this.asInstanceOf[NumericRange[Long]], 0, length)
+ case _ => shape.parUnbox(new AnyNumericRangeStepper[T](this, 0, length).asInstanceOf[AnyStepper[T] with EfficientSplit])
+ }
+ s.asInstanceOf[S with EfficientSplit]
+ }
+
+
+ /** Note that NumericRange must be invariant so that constructs
+ * such as "1L to 10 by 5" do not infer the range type as AnyVal.
+ */
+ import num._
+
+ // See comment in Range for why this must be lazy.
+ override lazy val length: Int = NumericRange.count(start, end, step, isInclusive)
+ override lazy val isEmpty: Boolean = (
+ (num.gt(start, end) && num.gt(step, num.zero))
+ || (num.lt(start, end) && num.lt(step, num.zero))
+ || (num.equiv(start, end) && !isInclusive)
+ )
+ override def last: T =
+ if (isEmpty) Nil.head
+ else locationAfterN(length - 1)
+ override def init: NumericRange[T] =
+ if (isEmpty) Nil.init
+ else new NumericRange(start, end - step, step, isInclusive)
+
+ override def head: T = if (isEmpty) Nil.head else start
+ override def tail: NumericRange[T] =
+ if (isEmpty) Nil.tail
+ else if(isInclusive) new NumericRange.Inclusive(start + step, end, step)
+ else new NumericRange.Exclusive(start + step, end, step)
+
+ /** Create a new range with the start and end values of this range and
+ * a new `step`.
+ */
+ def by(newStep: T): NumericRange[T] = copy(start, end, newStep)
+
+
+ /** Create a copy of this range.
+ */
+ def copy(start: T, end: T, step: T): NumericRange[T] =
+ new NumericRange(start, end, step, isInclusive)
+
+ @throws[IndexOutOfBoundsException]
+ def apply(idx: Int): T = {
+ if (idx < 0 || idx >= length) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${length - 1})")
+ else locationAfterN(idx)
+ }
+
+ override def foreach[@specialized(Specializable.Unit) U](f: T => U): Unit = {
+ var count = 0
+ var current = start
+ while (count < length) {
+ f(current)
+ current += step
+ count += 1
+ }
+ }
+
+ // TODO: these private methods are straight copies from Range, duplicated
+ // to guard against any (most likely illusory) performance drop. They should
+ // be eliminated one way or another.
+
+ // Tests whether a number is within the endpoints, without testing
+ // whether it is a member of the sequence (i.e. when step > 1.)
+ private def isWithinBoundaries(elem: T) = !isEmpty && (
+ (step > zero && start <= elem && elem <= last ) ||
+ (step < zero && last <= elem && elem <= start)
+ )
+ // Methods like apply throw exceptions on invalid n, but methods like take/drop
+ // are forgiving: therefore the checks are with the methods.
+ private def locationAfterN(n: Int): T = start + (step * fromInt(n))
+
+ private def crossesTheEndAfterN(n: Int): Boolean = {
+ // if we're sure that subtraction in the context of T won't overflow, we use this function
+ // to calculate the length of the range
+ def unsafeRangeLength(r: NumericRange[T]): T = {
+ val diff = num.minus(r.end, r.start)
+ val quotient = num.quot(diff, r.step)
+ val remainder = num.rem(diff, r.step)
+ if (!r.isInclusive && num.equiv(remainder, num.zero))
+ num.max(quotient, num.zero)
+ else
+ num.max(num.plus(quotient, num.one), num.zero)
+ }
+
+ // detects whether value can survive a bidirectional trip to -and then from- Int.
+ def fitsInInteger(value: T): Boolean = num.equiv(num.fromInt(num.toInt(value)), value)
+
+ val stepIsInTheSameDirectionAsStartToEndVector =
+ (num.gt(end, start) && num.gt(step, num.zero)) || (num.lt(end, start) && num.sign(step) == -num.one)
+
+ if (num.equiv(start, end) || n <= 0 || !stepIsInTheSameDirectionAsStartToEndVector) return n >= 1
+
+ val sameSign = num.equiv(num.sign(start), num.sign(end))
+
+ if (sameSign) { // subtraction is safe
+ val len = unsafeRangeLength(this)
+ if (fitsInInteger(len)) n >= num.toInt(len) else num.gteq(num.fromInt(n), len)
+ } else {
+ // split to two ranges, which subtraction is safe in both of them (around zero)
+ val stepsRemainderToZero = num.rem(start, step)
+ val walksOnZero = num.equiv(stepsRemainderToZero, num.zero)
+ val closestToZero = if (walksOnZero) -step else stepsRemainderToZero
+
+ /*
+ When splitting into two ranges, we should be super-careful about one of the sides hitting MinValue of T,
+ so we take two steps smaller than zero to ensure unsafeRangeLength won't overflow (taking one step may overflow depending on the step).
+ Same thing happens for MaxValue from zero, so we take one step further to ensure the safety of unsafeRangeLength.
+ After performing such operation, there are some elements remaining in between and around zero,
+ which their length is represented by carry.
+ */
+ val (l: NumericRange[T], r: NumericRange[T], carry: Int) =
+ if (num.lt(start, num.zero)) {
+ if (walksOnZero) {
+ val twoStepsAfterLargestNegativeNumber = num.plus(closestToZero, num.times(step, num.fromInt(2)))
+ (NumericRange(start, closestToZero, step), copy(twoStepsAfterLargestNegativeNumber, end, step), 2)
+ } else {
+ (NumericRange(start, closestToZero, step), copy(num.plus(closestToZero, step), end, step), 1)
+ }
+ } else {
+ if (walksOnZero) {
+ val twoStepsAfterZero = num.times(step, num.fromInt(2))
+ (copy(twoStepsAfterZero, end, step), NumericRange.inclusive(start, -step, step), 2)
+ } else {
+ val twoStepsAfterSmallestPositiveNumber = num.plus(closestToZero, num.times(step, num.fromInt(2)))
+ (copy(twoStepsAfterSmallestPositiveNumber, end, step), NumericRange.inclusive(start, closestToZero, step), 2)
+ }
+ }
+
+ val leftLength = unsafeRangeLength(l)
+ val rightLength = unsafeRangeLength(r)
+
+ // instead of `n >= rightLength + leftLength + curry` which may cause addition overflow,
+ // this can be used `(n - leftLength - curry) >= rightLength` (Both in Int and T, depends on whether the lengths fit in Int)
+ if (fitsInInteger(leftLength) && fitsInInteger(rightLength))
+ n - num.toInt(leftLength) - carry >= num.toInt(rightLength)
+ else
+ num.gteq(num.minus(num.minus(num.fromInt(n), leftLength), num.fromInt(carry)), rightLength)
+ }
+ }
+
+ // When one drops everything. Can't ever have unchecked operations
+ // like "end + 1" or "end - 1" because ranges involving Int.{ MinValue, MaxValue }
+ // will overflow. This creates an exclusive range where start == end
+ // based on the given value.
+ private def newEmptyRange(value: T) = NumericRange(value, value, step)
+
+ override def take(n: Int): NumericRange[T] = {
+ if (n <= 0 || isEmpty) newEmptyRange(start)
+ else if (crossesTheEndAfterN(n)) this
+ else new NumericRange.Inclusive(start, locationAfterN(n - 1), step)
+ }
+
+ override def drop(n: Int): NumericRange[T] = {
+ if (n <= 0 || isEmpty) this
+ else if (crossesTheEndAfterN(n)) newEmptyRange(end)
+ else copy(locationAfterN(n), end, step)
+ }
+
+ override def splitAt(n: Int): (NumericRange[T], NumericRange[T]) = (take(n), drop(n))
+
+ override def reverse: NumericRange[T] =
+ if (isEmpty) this
+ else {
+ val newStep = -step
+ if (num.sign(newStep) == num.sign(step)) {
+ throw new ArithmeticException("number type is unsigned, and .reverse requires a negative step")
+ } else new NumericRange.Inclusive(last, start, newStep)
+ }
+
+ import NumericRange.defaultOrdering
+
+ override def min[T1 >: T](implicit ord: Ordering[T1]): T =
+ // We can take the fast path:
+ // - If the Integral of this NumericRange is also the requested Ordering
+ // (Integral <: Ordering). This can happen for custom Integral types.
+ // - The Ordering is the default Ordering of a well-known Integral type.
+ if ((ord eq num) || defaultOrdering.get(num).exists(ord eq _)) {
+ if (num.sign(step) > zero) head
+ else last
+ } else super.min(ord)
+
+ override def max[T1 >: T](implicit ord: Ordering[T1]): T =
+ // See comment for fast path in min().
+ if ((ord eq num) || defaultOrdering.get(num).exists(ord eq _)) {
+ if (num.sign(step) > zero) last
+ else head
+ } else super.max(ord)
+
+ // a well-typed contains method.
+ def containsTyped(x: T): Boolean =
+ isWithinBoundaries(x) && (((x - start) % step) == zero)
+
+ override def contains[A1 >: T](x: A1): Boolean =
+ try containsTyped(x.asInstanceOf[T])
+ catch { case _: ClassCastException => false }
+
+ override def sum[B >: T](implicit num: Numeric[B]): B = {
+ if (isEmpty) num.zero
+ else if (size == 1) head
+ else {
+ // If there is no overflow, use arithmetic series formula
+ // a + ... (n terms total) ... + b = n*(a+b)/2
+ if ((num eq scala.math.Numeric.IntIsIntegral)||
+ (num eq scala.math.Numeric.ShortIsIntegral)||
+ (num eq scala.math.Numeric.ByteIsIntegral)||
+ (num eq scala.math.Numeric.CharIsIntegral)) {
+ // We can do math with no overflow in a Long--easy
+ val exact = (size * ((num toLong head) + (num toInt last))) / 2
+ num fromInt exact.toInt
+ }
+ else if (num eq scala.math.Numeric.LongIsIntegral) {
+ // Uh-oh, might be overflow, so we have to divide before we overflow.
+ // Either numRangeElements or (head + last) must be even, so divide the even one before multiplying
+ val a = head.toLong
+ val b = last.toLong
+ val ans =
+ if ((size & 1) == 0) (size / 2) * (a + b)
+ else size * {
+ // Sum is even, but we might overflow it, so divide in pieces and add back remainder
+ val ha = a/2
+ val hb = b/2
+ ha + hb + ((a - 2*ha) + (b - 2*hb)) / 2
+ }
+ ans.asInstanceOf[B]
+ }
+ else if ((num eq scala.math.Numeric.BigIntIsIntegral) ||
+ (num eq scala.math.Numeric.BigDecimalAsIfIntegral)) {
+ // No overflow, so we can use arithmetic series formula directly
+ // (not going to worry about running out of memory)
+ val numAsIntegral = num.asInstanceOf[Integral[B]]
+ import numAsIntegral._
+ ((num fromInt size) * (head + last)) / (num fromInt 2)
+ }
+ else {
+ // User provided custom Numeric, so we cannot rely on arithmetic series formula (e.g. won't work on something like Z_6)
+ if (isEmpty) num.zero
+ else {
+ var acc = num.zero
+ var i = head
+ var idx = 0
+ while(idx < length) {
+ acc = num.plus(acc, i)
+ i = i + step
+ idx = idx + 1
+ }
+ acc
+ }
+ }
+ }
+ }
+
+ override lazy val hashCode: Int = super.hashCode()
+ override protected final def applyPreferredMaxLength: Int = Int.MaxValue
+
+ override def equals(other: Any): Boolean = other match {
+ case x: NumericRange[_] =>
+ (x canEqual this) && (length == x.length) && (
+ (isEmpty) || // all empty sequences are equal
+ (start == x.start && last == x.last) // same length and same endpoints implies equality
+ )
+ case _ =>
+ super.equals(other)
+ }
+
+ override def toString: String = {
+ val empty = if (isEmpty) "empty " else ""
+ val preposition = if (isInclusive) "to" else "until"
+ val stepped = if (step == 1) "" else s" by $step"
+ s"${empty}NumericRange $start $preposition $end$stepped"
+ }
+
+ override protected[this] def className = "NumericRange"
+}
+
+/** A companion object for numeric ranges.
+ * @define Coll `NumericRange`
+ * @define coll numeric range
+ */
+object NumericRange {
+ private def bigDecimalCheckUnderflow[T](start: T, end: T, step: T)(implicit num: Integral[T]): Unit = {
+ def FAIL(boundary: T, step: T): Unit = {
+ val msg = boundary match {
+ case bd: BigDecimal => s"Precision ${bd.mc.getPrecision}"
+ case _ => "Precision"
+ }
+ throw new IllegalArgumentException(
+ s"$msg inadequate to represent steps of size $step near $boundary"
+ )
+ }
+ if (num.minus(num.plus(start, step), start) != step) FAIL(start, step)
+ if (num.minus(end, num.minus(end, step)) != step) FAIL(end, step)
+ }
+
+ /** Calculates the number of elements in a range given start, end, step, and
+ * whether or not it is inclusive. Throws an exception if step == 0 or
+ * the number of elements exceeds the maximum Int.
+ */
+ def count[T](start: T, end: T, step: T, isInclusive: Boolean)(implicit num: Integral[T]): Int = {
+ val zero = num.zero
+ val upward = num.lt(start, end)
+ val posStep = num.gt(step, zero)
+
+ if (step == zero) throw new IllegalArgumentException("step cannot be 0.")
+ else if (start == end) if (isInclusive) 1 else 0
+ else if (upward != posStep) 0
+ else {
+ /* We have to be frightfully paranoid about running out of range.
+ * We also can't assume that the numbers will fit in a Long.
+ * We will assume that if a > 0, -a can be represented, and if
+ * a < 0, -a+1 can be represented. We also assume that if we
+ * can't fit in Int, we can represent 2*Int.MaxValue+3 (at least).
+ * And we assume that numbers wrap rather than cap when they overflow.
+ */
+ // Check whether we can short-circuit by deferring to Int range.
+ val startint = num.toInt(start)
+ if (start == num.fromInt(startint)) {
+ val endint = num.toInt(end)
+ if (end == num.fromInt(endint)) {
+ val stepint = num.toInt(step)
+ if (step == num.fromInt(stepint)) {
+ return {
+ if (isInclusive) Range.inclusive(startint, endint, stepint).length
+ else Range (startint, endint, stepint).length
+ }
+ }
+ }
+ }
+ // If we reach this point, deferring to Int failed.
+ // Numbers may be big.
+ if (num.isInstanceOf[Numeric.BigDecimalAsIfIntegral]) {
+ bigDecimalCheckUnderflow(start, end, step) // Throw exception if math is inaccurate (including no progress at all)
+ }
+ val one = num.one
+ val limit = num.fromInt(Int.MaxValue)
+ def check(t: T): T =
+ if (num.gt(t, limit)) throw new IllegalArgumentException("More than Int.MaxValue elements.")
+ else t
+ // If the range crosses zero, it might overflow when subtracted
+ val startside = num.sign(start)
+ val endside = num.sign(end)
+ num.toInt{
+ if (num.gteq(num.times(startside, endside), zero)) {
+ // We're sure we can subtract these numbers.
+ // Note that we do not use .rem because of different conventions for Long and BigInt
+ val diff = num.minus(end, start)
+ val quotient = check(num.quot(diff, step))
+ val remainder = num.minus(diff, num.times(quotient, step))
+ if (!isInclusive && zero == remainder) quotient else check(num.plus(quotient, one))
+ }
+ else {
+ // We might not even be able to subtract these numbers.
+ // Jump in three pieces:
+ // * start to -1 or 1, whichever is closer (waypointA)
+ // * one step, which will take us at least to 0 (ends at waypointB)
+ // * (except with really small numbers)
+ // * there to the end
+ val negone = num.fromInt(-1)
+ val startlim = if (posStep) negone else one
+ //Use start value if the start value is closer to zero than startlim
+ // * e.g. .5 is closer to zero than 1 and -.5 is closer to zero than -1
+ val startdiff = {
+ if ((posStep && num.lt(startlim, start)) || (!posStep && num.gt(startlim, start)))
+ start
+ else
+ num.minus(startlim, start)
+ }
+ val startq = check(num.quot(startdiff, step))
+ val waypointA = if (startq == zero) start else num.plus(start, num.times(startq, step))
+ val waypointB = num.plus(waypointA, step)
+ check {
+ if (num.lt(waypointB, end) != upward) {
+ // No last piece
+ if (isInclusive && waypointB == end) num.plus(startq, num.fromInt(2))
+ else num.plus(startq, one)
+ }
+ else {
+ // There is a last piece
+ val enddiff = num.minus(end,waypointB)
+ val endq = check(num.quot(enddiff, step))
+ val last = if (endq == zero) waypointB else num.plus(waypointB, num.times(endq, step))
+ // Now we have to tally up all the pieces
+ // 1 for the initial value
+ // startq steps to waypointA
+ // 1 step to waypointB
+ // endq steps to the end (one less if !isInclusive and last==end)
+ num.plus(startq, num.plus(endq, if (!isInclusive && last==end) one else num.fromInt(2)))
+ }
+ }
+ }
+ }
+ }
+ }
+
+ @SerialVersionUID(3L)
+ class Inclusive[T](start: T, end: T, step: T)(implicit num: Integral[T])
+ extends NumericRange(start, end, step, true) {
+ override def copy(start: T, end: T, step: T): Inclusive[T] =
+ NumericRange.inclusive(start, end, step)
+
+ def exclusive: Exclusive[T] = NumericRange(start, end, step)
+ }
+
+ @SerialVersionUID(3L)
+ class Exclusive[T](start: T, end: T, step: T)(implicit num: Integral[T])
+ extends NumericRange(start, end, step, false) {
+ override def copy(start: T, end: T, step: T): Exclusive[T] =
+ NumericRange(start, end, step)
+
+ def inclusive: Inclusive[T] = NumericRange.inclusive(start, end, step)
+ }
+
+ def apply[T](start: T, end: T, step: T)(implicit num: Integral[T]): Exclusive[T] =
+ new Exclusive(start, end, step)
+ def inclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]): Inclusive[T] =
+ new Inclusive(start, end, step)
+
+ private[collection] val defaultOrdering = Map[Numeric[_], Ordering[_]](
+ Numeric.BigIntIsIntegral -> Ordering.BigInt,
+ Numeric.IntIsIntegral -> Ordering.Int,
+ Numeric.ShortIsIntegral -> Ordering.Short,
+ Numeric.ByteIsIntegral -> Ordering.Byte,
+ Numeric.CharIsIntegral -> Ordering.Char,
+ Numeric.LongIsIntegral -> Ordering.Long,
+ Numeric.BigDecimalAsIfIntegral -> Ordering.BigDecimal
+ )
+
+ @SerialVersionUID(3L)
+ private final class NumericRangeIterator[T](self: NumericRange[T], num: Integral[T]) extends AbstractIterator[T] with Serializable {
+ import num.mkNumericOps
+
+ private[this] var _hasNext = !self.isEmpty
+ private[this] var _next: T @uncheckedCaptures = self.start
+ private[this] val lastElement: T = if (_hasNext) self.last else self.start
+ override def knownSize: Int = if (_hasNext) num.toInt((lastElement - _next) / self.step) + 1 else 0
+ def hasNext: Boolean = _hasNext
+ def next(): T = {
+ if (!_hasNext) Iterator.empty.next()
+ val value = _next
+ _hasNext = value != lastElement
+ _next = num.plus(value, self.step)
+ value
+ }
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/immutable/Queue.scala b/tests/pos-special/stdlib/collection/immutable/Queue.scala
new file mode 100644
index 000000000000..929c79ce588a
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/immutable/Queue.scala
@@ -0,0 +1,218 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+package immutable
+
+import scala.collection.generic.DefaultSerializable
+import scala.collection.mutable.{Builder, ListBuffer}
+import language.experimental.captureChecking
+
+/** `Queue` objects implement data structures that allow to
+ * insert and retrieve elements in a first-in-first-out (FIFO) manner.
+ *
+ * `Queue` is implemented as a pair of `List`s, one containing the ''in'' elements and the other the ''out'' elements.
+ * Elements are added to the ''in'' list and removed from the ''out'' list. When the ''out'' list runs dry, the
+ * queue is pivoted by replacing the ''out'' list by ''in.reverse'', and ''in'' by ''Nil''.
+ *
+ * Adding items to the queue always has cost `O(1)`. Removing items has cost `O(1)`, except in the case
+ * where a pivot is required, in which case, a cost of `O(n)` is incurred, where `n` is the number of elements in the queue. When this happens,
+ * `n` remove operations with `O(1)` cost are guaranteed. Removing an item is on average `O(1)`.
+ *
+ * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#immutable-queues "Scala's Collection Library overview"]]
+ * section on `Immutable Queues` for more information.
+ *
+ * @define Coll `immutable.Queue`
+ * @define coll immutable queue
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ */
+
+sealed class Queue[+A] protected(protected val in: List[A], protected val out: List[A])
+ extends AbstractSeq[A]
+ with LinearSeq[A]
+ with LinearSeqOps[A, Queue, Queue[A]]
+ with StrictOptimizedLinearSeqOps[A, Queue, Queue[A]]
+ with StrictOptimizedSeqOps[A, Queue, Queue[A]]
+ with IterableFactoryDefaults[A, Queue]
+ with DefaultSerializable {
+
+ override def iterableFactory: SeqFactory[Queue] = Queue
+
+ /** Returns the `n`-th element of this queue.
+ * The first element is at position `0`.
+ *
+ * @param n index of the element to return
+ * @return the element at position `n` in this queue.
+ * @throws NoSuchElementException if the queue is too short.
+ */
+ override def apply(n: Int): A = {
+ def indexOutOfRange(): Nothing = throw new IndexOutOfBoundsException(n.toString)
+
+ var index = 0
+ var curr = out
+
+ while (index < n && curr.nonEmpty) {
+ index += 1
+ curr = curr.tail
+ }
+
+ if (index == n) {
+ if (curr.nonEmpty) curr.head
+ else if (in.nonEmpty) in.last
+ else indexOutOfRange()
+ } else {
+ val indexFromBack = n - index
+ val inLength = in.length
+ if (indexFromBack >= inLength) indexOutOfRange()
+ else in(inLength - indexFromBack - 1)
+ }
+ }
+
+ /** Returns the elements in the list as an iterator
+ */
+ override def iterator: Iterator[A] = out.iterator.concat(in.reverse)
+
+ /** Checks if the queue is empty.
+ *
+ * @return true, iff there is no element in the queue.
+ */
+ override def isEmpty: Boolean = in.isEmpty && out.isEmpty
+
+ override def head: A =
+ if (out.nonEmpty) out.head
+ else if (in.nonEmpty) in.last
+ else throw new NoSuchElementException("head on empty queue")
+
+ override def tail: Queue[A] =
+ if (out.nonEmpty) new Queue(in, out.tail)
+ else if (in.nonEmpty) new Queue(Nil, in.reverse.tail)
+ else throw new NoSuchElementException("tail on empty queue")
+
+ override def last: A =
+ if (in.nonEmpty) in.head
+ else if (out.nonEmpty) out.last
+ else throw new NoSuchElementException("last on empty queue")
+
+ /* This is made to avoid inefficient implementation of iterator. */
+ override def forall(p: A => Boolean): Boolean =
+ in.forall(p) && out.forall(p)
+
+ /* This is made to avoid inefficient implementation of iterator. */
+ override def exists(p: A => Boolean): Boolean =
+ in.exists(p) || out.exists(p)
+
+ override protected[this] def className = "Queue"
+
+ /** Returns the length of the queue. */
+ override def length: Int = in.length + out.length
+
+ override def prepended[B >: A](elem: B): Queue[B] = new Queue(in, elem :: out)
+
+ override def appended[B >: A](elem: B): Queue[B] = enqueue(elem)
+
+ override def appendedAll[B >: A](that: scala.collection.IterableOnce[B]^): Queue[B] = {
+ val newIn = that match {
+ case that: Queue[B] => that.in ++ (that.out reverse_::: this.in)
+ case that: List[B] => that reverse_::: this.in
+ case _ =>
+ var result: List[B] = this.in
+ val iter = that.iterator
+ while (iter.hasNext) {
+ result = iter.next() :: result
+ }
+ result
+ }
+ if (newIn eq this.in) this else new Queue[B](newIn, this.out)
+ }
+
+ /** Creates a new queue with element added at the end
+ * of the old queue.
+ *
+ * @param elem the element to insert
+ */
+ def enqueue[B >: A](elem: B): Queue[B] = new Queue(elem :: in, out)
+
+ /** Creates a new queue with all elements provided by an `Iterable` object
+ * added at the end of the old queue.
+ *
+ * The elements are appended in the order they are given out by the
+ * iterator.
+ *
+ * @param iter an iterable object
+ */
+ @deprecated("Use `enqueueAll` instead of `enqueue` to enqueue a collection of elements", "2.13.0")
+ @`inline` final def enqueue[B >: A](iter: scala.collection.Iterable[B]) = enqueueAll(iter)
+
+ /** Creates a new queue with all elements provided by an `Iterable` object
+ * added at the end of the old queue.
+ *
+ * The elements are appended in the order they are given out by the
+ * iterator.
+ *
+ * @param iter an iterable object
+ */
+ def enqueueAll[B >: A](iter: scala.collection.Iterable[B]): Queue[B] = appendedAll(iter)
+
+ /** Returns a tuple with the first element in the queue,
+ * and a new queue with this element removed.
+ *
+ * @throws NoSuchElementException
+ * @return the first element of the queue.
+ */
+ def dequeue: (A, Queue[A]) = out match {
+ case Nil if !in.isEmpty => val rev = in.reverse ; (rev.head, new Queue(Nil, rev.tail))
+ case x :: xs => (x, new Queue(in, xs))
+ case _ => throw new NoSuchElementException("dequeue on empty queue")
+ }
+
+ /** Optionally retrieves the first element and a queue of the remaining elements.
+ *
+ * @return A tuple of the first element of the queue, and a new queue with this element removed.
+ * If the queue is empty, `None` is returned.
+ */
+ def dequeueOption: Option[(A, Queue[A])] = if(isEmpty) None else Some(dequeue)
+
+ /** Returns the first element in the queue, or throws an error if there
+ * is no element contained in the queue.
+ *
+ * @throws NoSuchElementException
+ * @return the first element.
+ */
+ def front: A = head
+
+ /** Returns a string representation of this queue.
+ */
+ override def toString(): String = mkString("Queue(", ", ", ")")
+}
+
+/** $factoryInfo
+ * @define Coll `immutable.Queue`
+ * @define coll immutable queue
+ */
+@SerialVersionUID(3L)
+object Queue extends StrictOptimizedSeqFactory[Queue] {
+ def newBuilder[sealed A]: Builder[A, Queue[A]] = new ListBuffer[A] mapResult (x => new Queue[A](Nil, x))
+
+ def from[A](source: IterableOnce[A]^): Queue[A] = source match {
+ case q: Queue[A] => q
+ case _ =>
+ val list = List.from(source)
+ if (list.isEmpty) empty
+ else new Queue(Nil, list)
+ }
+
+ def empty[A]: Queue[A] = EmptyQueue
+ override def apply[A](xs: A*): Queue[A] = new Queue[A](Nil, xs.toList)
+
+ private object EmptyQueue extends Queue[Nothing](Nil, Nil) { }
+}
diff --git a/tests/pos-special/stdlib/collection/immutable/Range.scala b/tests/pos-special/stdlib/collection/immutable/Range.scala
new file mode 100644
index 000000000000..459591d1a9cb
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/immutable/Range.scala
@@ -0,0 +1,673 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection.immutable
+
+import scala.collection.Stepper.EfficientSplit
+import scala.collection.convert.impl.RangeStepper
+import scala.collection.{AbstractIterator, AnyStepper, IterableFactoryDefaults, Iterator, Stepper, StepperShape}
+import scala.util.hashing.MurmurHash3
+import language.experimental.captureChecking
+
+/** The `Range` class represents integer values in range
+ * ''[start;end)'' with non-zero step value `step`.
+ * It's a special case of an indexed sequence.
+ * For example:
+ *
+ * {{{
+ * val r1 = 0 until 10
+ * val r2 = r1.start until r1.end by r1.step + 1
+ * println(r2.length) // = 5
+ * }}}
+ *
+ * Ranges that contain more than `Int.MaxValue` elements can be created, but
+ * these overfull ranges have only limited capabilities. Any method that
+ * could require a collection of over `Int.MaxValue` length to be created, or
+ * could be asked to index beyond `Int.MaxValue` elements will throw an
+ * exception. Overfull ranges can safely be reduced in size by changing
+ * the step size (e.g. `by 3`) or taking/dropping elements. `contains`,
+ * `equals`, and access to the ends of the range (`head`, `last`, `tail`,
+ * `init`) are also permitted on overfull ranges.
+ *
+ * @param start the start of this range.
+ * @param end the end of the range. For exclusive ranges, e.g.
+ * `Range(0,3)` or `(0 until 3)`, this is one
+ * step past the last one in the range. For inclusive
+ * ranges, e.g. `Range.inclusive(0,3)` or `(0 to 3)`,
+ * it may be in the range if it is not skipped by the step size.
+ * To find the last element inside a non-empty range,
+ * use `last` instead.
+ * @param step the step for the range.
+ *
+ * @define coll range
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ * @define doesNotUseBuilders
+ * '''Note:''' this method does not use builders to construct a new range,
+ * and its complexity is O(1).
+ */
+@SerialVersionUID(3L)
+sealed abstract class Range(
+ val start: Int,
+ val end: Int,
+ val step: Int
+)
+ extends AbstractSeq[Int]
+ with IndexedSeq[Int]
+ with IndexedSeqOps[Int, IndexedSeq, IndexedSeq[Int]]
+ with StrictOptimizedSeqOps[Int, IndexedSeq, IndexedSeq[Int]]
+ with IterableFactoryDefaults[Int, IndexedSeq]
+ with Serializable { range =>
+
+ final override def iterator: Iterator[Int] = new RangeIterator(start, step, lastElement, isEmpty)
+
+ override final def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = {
+ val st = new RangeStepper(start, step, 0, length)
+ val r =
+ if (shape.shape == StepperShape.IntShape) st
+ else {
+ assert(shape.shape == StepperShape.ReferenceShape, s"unexpected StepperShape: $shape")
+ AnyStepper.ofParIntStepper(st)
+ }
+ r.asInstanceOf[S with EfficientSplit]
+ }
+
+ private[this] def gap = end.toLong - start.toLong
+ private[this] def isExact = gap % step == 0
+ private[this] def hasStub = isInclusive || !isExact
+ private[this] def longLength = gap / step + ( if (hasStub) 1 else 0 )
+
+ def isInclusive: Boolean
+
+ final override val isEmpty: Boolean = (
+ (start > end && step > 0)
+ || (start < end && step < 0)
+ || (start == end && !isInclusive)
+ )
+
+ private[this] val numRangeElements: Int = {
+ if (step == 0) throw new IllegalArgumentException("step cannot be 0.")
+ else if (isEmpty) 0
+ else {
+ val len = longLength
+ if (len > scala.Int.MaxValue) -1
+ else len.toInt
+ }
+ }
+
+ final def length = if (numRangeElements < 0) fail() else numRangeElements
+
+ // This field has a sensible value only for non-empty ranges
+ private[this] val lastElement = step match {
+ case 1 => if (isInclusive) end else end-1
+ case -1 => if (isInclusive) end else end+1
+ case _ =>
+ val remainder = (gap % step).toInt
+ if (remainder != 0) end - remainder
+ else if (isInclusive) end
+ else end - step
+ }
+
+ /** The last element of this range. This method will return the correct value
+ * even if there are too many elements to iterate over.
+ */
+ final override def last: Int =
+ if (isEmpty) throw Range.emptyRangeError("last") else lastElement
+ final override def head: Int =
+ if (isEmpty) throw Range.emptyRangeError("head") else start
+
+ /** Creates a new range containing all the elements of this range except the last one.
+ *
+ * $doesNotUseBuilders
+ *
+ * @return a new range consisting of all the elements of this range except the last one.
+ */
+ final override def init: Range =
+ if (isEmpty) throw Range.emptyRangeError("init") else dropRight(1)
+
+ /** Creates a new range containing all the elements of this range except the first one.
+ *
+ * $doesNotUseBuilders
+ *
+ * @return a new range consisting of all the elements of this range except the first one.
+ */
+ final override def tail: Range = {
+ if (isEmpty) throw Range.emptyRangeError("tail")
+ if (numRangeElements == 1) newEmptyRange(end)
+ else if(isInclusive) new Range.Inclusive(start + step, end, step)
+ else new Range.Exclusive(start + step, end, step)
+ }
+
+ override def map[B](f: Int => B): IndexedSeq[B] = {
+ validateMaxLength()
+ super.map(f)
+ }
+
+ final protected def copy(start: Int = start, end: Int = end, step: Int = step, isInclusive: Boolean = isInclusive): Range =
+ if(isInclusive) new Range.Inclusive(start, end, step) else new Range.Exclusive(start, end, step)
+
+ /** Create a new range with the `start` and `end` values of this range and
+ * a new `step`.
+ *
+ * @return a new range with a different step
+ */
+ final def by(step: Int): Range = copy(start, end, step)
+
+ // Check cannot be evaluated eagerly because we have a pattern where
+ // ranges are constructed like: "x to y by z" The "x to y" piece
+ // should not trigger an exception. So the calculation is delayed,
+ // which means it will not fail fast for those cases where failing was
+ // correct.
+ private[this] def validateMaxLength(): Unit = {
+ if (numRangeElements < 0)
+ fail()
+ }
+ private[this] def description = "%d %s %d by %s".format(start, if (isInclusive) "to" else "until", end, step)
+ private[this] def fail() = throw new IllegalArgumentException(description + ": seqs cannot contain more than Int.MaxValue elements.")
+
+ @throws[IndexOutOfBoundsException]
+ final def apply(idx: Int): Int = {
+ validateMaxLength()
+ if (idx < 0 || idx >= numRangeElements) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${numRangeElements-1})")
+ else start + (step * idx)
+ }
+
+ /*@`inline`*/ final override def foreach[@specialized(Unit) U](f: Int => U): Unit = {
+ // Implementation chosen on the basis of favorable microbenchmarks
+ // Note--initialization catches step == 0 so we don't need to here
+ if (!isEmpty) {
+ var i = start
+ while (true) {
+ f(i)
+ if (i == lastElement) return
+ i += step
+ }
+ }
+ }
+
+ override final def indexOf[@specialized(Int) B >: Int](elem: B, from: Int = 0): Int =
+ elem match {
+ case i: Int =>
+ val pos = posOf(i)
+ if (pos >= from) pos else -1
+ case _ => super.indexOf(elem, from)
+ }
+
+ override final def lastIndexOf[@specialized(Int) B >: Int](elem: B, end: Int = length - 1): Int =
+ elem match {
+ case i: Int =>
+ val pos = posOf(i)
+ if (pos <= end) pos else -1
+ case _ => super.lastIndexOf(elem, end)
+ }
+
+ private[this] def posOf(i: Int): Int =
+ if (contains(i)) (i - start) / step else -1
+
+ override def sameElements[B >: Int](that: IterableOnce[B]^): Boolean = that match {
+ case other: Range =>
+ (this.length : @annotation.switch) match {
+ case 0 => other.isEmpty
+ case 1 => other.length == 1 && this.start == other.start
+ case n => other.length == n && (
+ (this.start == other.start)
+ && (this.step == other.step)
+ )
+ }
+ case _ => super.sameElements(that)
+ }
+
+ /** Creates a new range containing the first `n` elements of this range.
+ *
+ * @param n the number of elements to take.
+ * @return a new range consisting of `n` first elements.
+ */
+ final override def take(n: Int): Range =
+ if (n <= 0 || isEmpty) newEmptyRange(start)
+ else if (n >= numRangeElements && numRangeElements >= 0) this
+ else {
+ // May have more than Int.MaxValue elements in range (numRangeElements < 0)
+ // but the logic is the same either way: take the first n
+ new Range.Inclusive(start, locationAfterN(n - 1), step)
+ }
+
+ /** Creates a new range containing all the elements of this range except the first `n` elements.
+ *
+ * @param n the number of elements to drop.
+ * @return a new range consisting of all the elements of this range except `n` first elements.
+ */
+ final override def drop(n: Int): Range =
+ if (n <= 0 || isEmpty) this
+ else if (n >= numRangeElements && numRangeElements >= 0) newEmptyRange(end)
+ else {
+ // May have more than Int.MaxValue elements (numRangeElements < 0)
+ // but the logic is the same either way: go forwards n steps, keep the rest
+ copy(locationAfterN(n), end, step)
+ }
+
+ /** Creates a new range consisting of the last `n` elements of the range.
+ *
+ * $doesNotUseBuilders
+ */
+ final override def takeRight(n: Int): Range = {
+ if (n <= 0) newEmptyRange(start)
+ else if (numRangeElements >= 0) drop(numRangeElements - n)
+ else {
+ // Need to handle over-full range separately
+ val y = last
+ val x = y - step.toLong*(n-1)
+ if ((step > 0 && x < start) || (step < 0 && x > start)) this
+ else Range.inclusive(x.toInt, y, step)
+ }
+ }
+
+ /** Creates a new range consisting of the initial `length - n` elements of the range.
+ *
+ * $doesNotUseBuilders
+ */
+ final override def dropRight(n: Int): Range = {
+ if (n <= 0) this
+ else if (numRangeElements >= 0) take(numRangeElements - n)
+ else {
+ // Need to handle over-full range separately
+ val y = last - step.toInt*n
+ if ((step > 0 && y < start) || (step < 0 && y > start)) newEmptyRange(start)
+ else Range.inclusive(start, y.toInt, step)
+ }
+ }
+
+ // Advance from the start while we meet the given test
+ private[this] def argTakeWhile(p: Int => Boolean): Long = {
+ if (isEmpty) start
+ else {
+ var current = start
+ val stop = last
+ while (current != stop && p(current)) current += step
+ if (current != stop || !p(current)) current
+ else current.toLong + step
+ }
+ }
+
+ final override def takeWhile(p: Int => Boolean): Range = {
+ val stop = argTakeWhile(p)
+ if (stop==start) newEmptyRange(start)
+ else {
+ val x = (stop - step).toInt
+ if (x == last) this
+ else Range.inclusive(start, x, step)
+ }
+ }
+
+ final override def dropWhile(p: Int => Boolean): Range = {
+ val stop = argTakeWhile(p)
+ if (stop == start) this
+ else {
+ val x = (stop - step).toInt
+ if (x == last) newEmptyRange(last)
+ else Range.inclusive(x + step, last, step)
+ }
+ }
+
+ final override def span(p: Int => Boolean): (Range, Range) = {
+ val border = argTakeWhile(p)
+ if (border == start) (newEmptyRange(start), this)
+ else {
+ val x = (border - step).toInt
+ if (x == last) (this, newEmptyRange(last))
+ else (Range.inclusive(start, x, step), Range.inclusive(x+step, last, step))
+ }
+ }
+
+ /** Creates a new range containing the elements starting at `from` up to but not including `until`.
+ *
+ * $doesNotUseBuilders
+ *
+ * @param from the element at which to start
+ * @param until the element at which to end (not included in the range)
+ * @return a new range consisting of a contiguous interval of values in the old range
+ */
+ final override def slice(from: Int, until: Int): Range =
+ if (from <= 0) take(until)
+ else if (until >= numRangeElements && numRangeElements >= 0) drop(from)
+ else {
+ val fromValue = locationAfterN(from)
+ if (from >= until) newEmptyRange(fromValue)
+ else Range.inclusive(fromValue, locationAfterN(until-1), step)
+ }
+
+ // Overridden only to refine the return type
+ final override def splitAt(n: Int): (Range, Range) = (take(n), drop(n))
+
+ // Methods like apply throw exceptions on invalid n, but methods like take/drop
+ // are forgiving: therefore the checks are with the methods.
+ private[this] def locationAfterN(n: Int) = start + (step * n)
+
+ // When one drops everything. Can't ever have unchecked operations
+ // like "end + 1" or "end - 1" because ranges involving Int.{ MinValue, MaxValue }
+ // will overflow. This creates an exclusive range where start == end
+ // based on the given value.
+ private[this] def newEmptyRange(value: Int) = new Range.Exclusive(value, value, step)
+
+ /** Returns the reverse of this range.
+ */
+ final override def reverse: Range =
+ if (isEmpty) this
+ else new Range.Inclusive(last, start, -step)
+
+ /** Make range inclusive.
+ */
+ final def inclusive: Range =
+ if (isInclusive) this
+ else new Range.Inclusive(start, end, step)
+
+ final def contains(x: Int): Boolean = {
+ if (x == end && !isInclusive) false
+ else if (step > 0) {
+ if (x < start || x > end) false
+ else (step == 1) || (Integer.remainderUnsigned(x - start, step) == 0)
+ }
+ else {
+ if (x < end || x > start) false
+ else (step == -1) || (Integer.remainderUnsigned(start - x, -step) == 0)
+ }
+ }
+ /* Seq#contains has a type parameter so the optimised contains above doesn't override it */
+ override final def contains[B >: Int](elem: B): Boolean = elem match {
+ case i: Int => this.contains(i)
+ case _ => super.contains(elem)
+ }
+
+ final override def sum[B >: Int](implicit num: Numeric[B]): Int = {
+ if (num eq scala.math.Numeric.IntIsIntegral) {
+ // this is normal integer range with usual addition. arithmetic series formula can be used
+ if (isEmpty) 0
+ else if (size == 1) head
+ else ((size * (head.toLong + last)) / 2).toInt
+ } else {
+ // user provided custom Numeric, we cannot rely on arithmetic series formula
+ if (isEmpty) num.toInt(num.zero)
+ else {
+ var acc = num.zero
+ var i = head
+ while (true) {
+ acc = num.plus(acc, i)
+ if (i == lastElement) return num.toInt(acc)
+ i = i + step
+ }
+ 0 // Never hit this--just to satisfy compiler since it doesn't know while(true) has type Nothing
+ }
+ }
+ }
+
+ final override def min[A1 >: Int](implicit ord: Ordering[A1]): Int =
+ if (ord eq Ordering.Int) {
+ if (step > 0) head
+ else last
+ } else if (Ordering.Int isReverseOf ord) {
+ if (step > 0) last
+ else head
+ } else super.min(ord)
+
+ final override def max[A1 >: Int](implicit ord: Ordering[A1]): Int =
+ if (ord eq Ordering.Int) {
+ if (step > 0) last
+ else head
+ } else if (Ordering.Int isReverseOf ord) {
+ if (step > 0) head
+ else last
+ } else super.max(ord)
+
+ override def tails: Iterator[Range] =
+ new AbstractIterator[Range] {
+ private[this] var i = 0
+ override def hasNext = i <= Range.this.length
+ override def next() = {
+ if (hasNext) {
+ val res = Range.this.drop(i)
+ i += 1
+ res
+ } else {
+ Iterator.empty.next()
+ }
+ }
+ }
+
+ override def inits: Iterator[Range] =
+ new AbstractIterator[Range] {
+ private[this] var i = 0
+ override def hasNext = i <= Range.this.length
+ override def next() = {
+ if (hasNext) {
+ val res = Range.this.dropRight(i)
+ i += 1
+ res
+ } else {
+ Iterator.empty.next()
+ }
+ }
+ }
+ override protected final def applyPreferredMaxLength: Int = Int.MaxValue
+
+ final override def equals(other: Any): Boolean = other match {
+ case x: Range =>
+ // Note: this must succeed for overfull ranges (length > Int.MaxValue)
+ if (isEmpty) x.isEmpty // empty sequences are equal
+ else // this is non-empty...
+ x.nonEmpty && start == x.start && { // ...so other must contain something and have same start
+ val l0 = last
+ (l0 == x.last && ( // And same end
+ start == l0 || step == x.step // And either the same step, or not take any steps
+ ))
+ }
+ case _ =>
+ super.equals(other)
+ }
+
+ final override def hashCode: Int =
+ if(length >= 2) MurmurHash3.rangeHash(start, step, lastElement)
+ else super.hashCode
+
+ final override def toString: String = {
+ val preposition = if (isInclusive) "to" else "until"
+ val stepped = if (step == 1) "" else s" by $step"
+ val prefix = if (isEmpty) "empty " else if (!isExact) "inexact " else ""
+ s"${prefix}Range $start $preposition $end$stepped"
+ }
+
+ override protected[this] def className = "Range"
+
+ override def distinct: Range = this
+
+ override def grouped(size: Int): Iterator[Range] = {
+ require(size >= 1, f"size=$size%d, but size must be positive")
+ if (isEmpty) {
+ Iterator.empty
+ } else {
+ val s = size
+ new AbstractIterator[Range] {
+ private[this] var i = 0
+ override def hasNext = Range.this.length > i
+ override def next() =
+ if (hasNext) {
+ val x = Range.this.slice(i, i + s)
+ i += s
+ x
+ } else {
+ Iterator.empty.next()
+ }
+ }
+ }
+ }
+
+ override def sorted[B >: Int](implicit ord: Ordering[B]): IndexedSeq[Int] =
+ if (ord eq Ordering.Int) {
+ if (step > 0) {
+ this
+ } else {
+ reverse
+ }
+ } else {
+ super.sorted(ord)
+ }
+}
+
+/**
+ * Companion object for ranges.
+ * @define Coll `Range`
+ * @define coll range
+ */
+object Range {
+
+ /** Counts the number of range elements.
+ * precondition: step != 0
+ * If the size of the range exceeds Int.MaxValue, the
+ * result will be negative.
+ */
+ def count(start: Int, end: Int, step: Int, isInclusive: Boolean): Int = {
+ if (step == 0)
+ throw new IllegalArgumentException("step cannot be 0.")
+
+ val isEmpty =
+ if (start == end) !isInclusive
+ else if (start < end) step < 0
+ else step > 0
+
+ if (isEmpty) 0
+ else {
+ // Counts with Longs so we can recognize too-large ranges.
+ val gap: Long = end.toLong - start.toLong
+ val jumps: Long = gap / step
+ // Whether the size of this range is one larger than the
+ // number of full-sized jumps.
+ val hasStub = isInclusive || (gap % step != 0)
+ val result: Long = jumps + ( if (hasStub) 1 else 0 )
+
+ if (result > scala.Int.MaxValue) -1
+ else result.toInt
+ }
+ }
+ def count(start: Int, end: Int, step: Int): Int =
+ count(start, end, step, isInclusive = false)
+
+ /** Make a range from `start` until `end` (exclusive) with given step value.
+ * @note step != 0
+ */
+ def apply(start: Int, end: Int, step: Int): Range.Exclusive = new Range.Exclusive(start, end, step)
+
+ /** Make a range from `start` until `end` (exclusive) with step value 1.
+ */
+ def apply(start: Int, end: Int): Range.Exclusive = new Range.Exclusive(start, end, 1)
+
+ /** Make an inclusive range from `start` to `end` with given step value.
+ * @note step != 0
+ */
+ def inclusive(start: Int, end: Int, step: Int): Range.Inclusive = new Range.Inclusive(start, end, step)
+
+ /** Make an inclusive range from `start` to `end` with step value 1.
+ */
+ def inclusive(start: Int, end: Int): Range.Inclusive = new Range.Inclusive(start, end, 1)
+
+ @SerialVersionUID(3L)
+ final class Inclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) {
+ def isInclusive: Boolean = true
+ }
+
+ @SerialVersionUID(3L)
+ final class Exclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) {
+ def isInclusive: Boolean = false
+ }
+
+ // BigInt and Long are straightforward generic ranges.
+ object BigInt {
+ def apply(start: BigInt, end: BigInt, step: BigInt): NumericRange.Exclusive[BigInt] = NumericRange(start, end, step)
+ def inclusive(start: BigInt, end: BigInt, step: BigInt): NumericRange.Inclusive[BigInt] = NumericRange.inclusive(start, end, step)
+ }
+
+ object Long {
+ def apply(start: Long, end: Long, step: Long): NumericRange.Exclusive[Long] = NumericRange(start, end, step)
+ def inclusive(start: Long, end: Long, step: Long): NumericRange.Inclusive[Long] = NumericRange.inclusive(start, end, step)
+ }
+
+ // BigDecimal uses an alternative implementation of Numeric in which
+ // it pretends to be Integral[T] instead of Fractional[T]. See Numeric for
+ // details. The intention is for it to throw an exception anytime
+ // imprecision or surprises might result from anything, although this may
+ // not yet be fully implemented.
+ object BigDecimal {
+ implicit val bigDecAsIntegral: Numeric.BigDecimalAsIfIntegral = Numeric.BigDecimalAsIfIntegral
+
+ def apply(start: BigDecimal, end: BigDecimal, step: BigDecimal): NumericRange.Exclusive[BigDecimal] =
+ NumericRange(start, end, step)
+ def inclusive(start: BigDecimal, end: BigDecimal, step: BigDecimal): NumericRange.Inclusive[BigDecimal] =
+ NumericRange.inclusive(start, end, step)
+ }
+
+ // As there is no appealing default step size for not-really-integral ranges,
+ // we offer a partially constructed object.
+ class Partial[T, U](private val f: T -> U) extends AnyVal {
+ def by(x: T): U = f(x)
+ override def toString = "Range requires step"
+ }
+
+ // Illustrating genericity with Int Range, which should have the same behavior
+ // as the original Range class. However we leave the original Range
+ // indefinitely, for performance and because the compiler seems to bootstrap
+ // off it and won't do so with our parameterized version without modifications.
+ object Int {
+ def apply(start: Int, end: Int, step: Int): NumericRange.Exclusive[Int] = NumericRange(start, end, step)
+ def inclusive(start: Int, end: Int, step: Int): NumericRange.Inclusive[Int] = NumericRange.inclusive(start, end, step)
+ }
+
+ private def emptyRangeError(what: String): Throwable =
+ new NoSuchElementException(what + " on empty Range")
+}
+
+/**
+ * @param lastElement The last element included in the Range
+ * @param initiallyEmpty Whether the Range was initially empty or not
+ */
+@SerialVersionUID(3L)
+private class RangeIterator(
+ start: Int,
+ step: Int,
+ lastElement: Int,
+ initiallyEmpty: Boolean
+) extends AbstractIterator[Int] with Serializable {
+ private[this] var _hasNext: Boolean = !initiallyEmpty
+ private[this] var _next: Int = start
+ override def knownSize: Int = if (_hasNext) (lastElement - _next) / step + 1 else 0
+ def hasNext: Boolean = _hasNext
+ @throws[NoSuchElementException]
+ def next(): Int = {
+ if (!_hasNext) Iterator.empty.next()
+ val value = _next
+ _hasNext = value != lastElement
+ _next = value + step
+ value
+ }
+
+ override def drop(n: Int): Iterator[Int] = {
+ if (n > 0) {
+ val longPos = _next.toLong + step * n
+ if (step > 0) {
+ _next = Math.min(lastElement, longPos).toInt
+ _hasNext = longPos <= lastElement
+ }
+ else if (step < 0) {
+ _next = Math.max(lastElement, longPos).toInt
+ _hasNext = longPos >= lastElement
+ }
+ }
+ this
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala b/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala
new file mode 100644
index 000000000000..5fbc927d7a21
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/immutable/RedBlackTree.scala
@@ -0,0 +1,1234 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package immutable
+
+import scala.annotation.meta.{getter, setter}
+import scala.annotation.tailrec
+import scala.runtime.Statics.releaseFence
+import language.experimental.captureChecking
+import scala.annotation.unchecked.uncheckedCaptures
+
+/** An object containing the RedBlack tree implementation used by for `TreeMaps` and `TreeSets`.
+ *
+ * Implementation note: since efficiency is important for data structures this implementation
+ * uses `null` to represent empty trees. This also means pattern matching cannot
+ * easily be used. The API represented by the RedBlackTree object tries to hide these
+ * optimizations behind a reasonably clean API.
+ */
+private[collection] object RedBlackTree {
+
+ def isEmpty(tree: Tree[_, _]): Boolean = tree eq null
+
+ def contains[A: Ordering](tree: Tree[A, _], x: A): Boolean = lookup(tree, x) ne null
+ def get[A: Ordering, B](tree: Tree[A, B], x: A): Option[B] = lookup(tree, x) match {
+ case null => None
+ case tree => Some(tree.value)
+ }
+
+ @tailrec
+ def lookup[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else {
+ val cmp = ordering.compare(x, tree.key)
+ if (cmp < 0) lookup(tree.left, x)
+ else if (cmp > 0) lookup(tree.right, x)
+ else tree
+ }
+ private[immutable] abstract class Helper[A](implicit val ordering: Ordering[A]) {
+ def beforePublish[B](tree: Tree[A, B]): Tree[A, B] = {
+ if (tree eq null) tree
+ else if (tree.isMutable) {
+ val res = tree.mutableBlack.makeImmutable
+ releaseFence()
+ res
+ } else tree.black
+ }
+ /** Create a new balanced tree where `newLeft` replaces `tree.left`.
+ * tree and newLeft are never null */
+ protected[this] final def mutableBalanceLeft[A1, B, B1 >: B](tree: Tree[A1, B], newLeft: Tree[A1, B1]): Tree[A1, B1] = {
+ // Parameter trees
+ // tree | newLeft
+ // -- KV R | nl.L nl.KV nl.R
+ // | nl.R.L nl.R.KV nl.R.R
+ //Note - unlike the immutable trees we can't consider tree.left eq newLeft
+ //as the balance operations may mutate the same object
+ //but that check was mostly to avoid the object creation
+ if (newLeft.isRed) {
+ val newLeft_left = newLeft.left
+ val newLeft_right = newLeft.right
+ if (isRedTree(newLeft_left)) {
+ // RED
+ // black(nl.L) nl.KV black
+ // nl.R KV R
+ val resultLeft = newLeft_left.mutableBlack
+ val resultRight = tree.mutableBlackWithLeft(newLeft_right)
+
+ newLeft.mutableWithLeftRight(resultLeft, resultRight)
+ } else if (isRedTree(newLeft_right)) {
+ // RED
+ // black nl.R.KV black
+ // nl.L nl.KV nl.R.L nl.R.R KV R
+
+ val newLeft_right_right = newLeft_right.right
+
+ val resultLeft = newLeft.mutableBlackWithRight(newLeft_right.left)
+ val resultRight = tree.mutableBlackWithLeft(newLeft_right_right)
+
+ newLeft_right.mutableWithLeftRight(resultLeft, resultRight)
+ } else {
+ // tree
+ // newLeft KV R
+ tree.mutableWithLeft(newLeft)
+ }
+ } else {
+ // tree
+ // newLeft KV R
+ tree.mutableWithLeft(newLeft)
+ }
+ }
+ /** Create a new balanced tree where `newRight` replaces `tree.right`.
+ * tree and newRight are never null */
+ protected[this] final def mutableBalanceRight[A1, B, B1 >: B](tree: Tree[A1, B], newRight: Tree[A1, B1]): Tree[A1, B1] = {
+ // Parameter trees
+ // tree | newRight
+ // L KV -- | nr.L nr.KV nr.R
+ // | nr.L.L nr.L.KV nr.L.R
+ //Note - unlike the immutable trees we can't consider tree.right eq newRight
+ //as the balance operations may mutate the same object
+ //but that check was mostly to avoid the object creation
+ if (newRight.isRed) {
+ val newRight_left = newRight.left
+ if (isRedTree(newRight_left)) {
+ // RED
+ // black nr.L.KV black
+ // L KV nr.L.L nr.L.R nr.KV nr.R
+
+ val resultLeft = tree.mutableBlackWithRight(newRight_left.left)
+ val resultRight = newRight.mutableBlackWithLeft(newRight_left.right)
+
+ newRight_left.mutableWithLeftRight(resultLeft, resultRight)
+
+ } else {
+ val newRight_right = newRight.right
+ if (isRedTree(newRight_right)) {
+ // RED
+ // black nr.KV black(nr.R)
+ // L KV nr.L
+
+ val resultLeft = tree.mutableBlackWithRight(newRight_left)
+ val resultRight = newRight_right.mutableBlack
+
+ newRight.mutableWithLeftRight(resultLeft, resultRight)
+ } else {
+ // tree
+ // L KV newRight
+ tree.mutableWithRight(newRight)
+ }
+ }
+ } else {
+ // tree
+ // L KV newRight
+ tree.mutableWithRight(newRight)
+ }
+ }
+ }
+ private[immutable] class SetHelper[A](implicit ordering: Ordering[A]) extends Helper[A] {
+ protected[this] final def mutableUpd(tree: Tree[A, Any], k: A): Tree[A, Any] =
+ if (tree eq null) {
+ mutableRedTree(k, (), null, null)
+ } else if (k.asInstanceOf[AnyRef] eq tree.key.asInstanceOf[AnyRef]) {
+ tree
+ } else {
+ val cmp = ordering.compare(k, tree.key)
+ if (cmp < 0)
+ mutableBalanceLeft(tree, mutableUpd(tree.left, k))
+ else if (cmp > 0)
+ mutableBalanceRight(tree, mutableUpd(tree.right, k))
+ else tree
+ }
+ }
+ private[immutable] class MapHelper[A, B](implicit ordering: Ordering[A]) extends Helper[A] {
+ protected[this] final def mutableUpd[B1 >: B](tree: Tree[A, B], k: A, v: B1): Tree[A, B1] =
+ if (tree eq null) {
+ mutableRedTree(k, v, null, null)
+ } else if (k.asInstanceOf[AnyRef] eq tree.key.asInstanceOf[AnyRef]) {
+ tree.mutableWithV(v)
+ } else {
+ val cmp = ordering.compare(k, tree.key)
+ if (cmp < 0)
+ mutableBalanceLeft(tree, mutableUpd(tree.left, k, v))
+ else if (cmp > 0)
+ mutableBalanceRight(tree, mutableUpd(tree.right, k, v))
+ else tree.mutableWithV(v)
+ }
+ }
+
+ def count(tree: Tree[_, _]) = if (tree eq null) 0 else tree.count
+ def update[A: Ordering, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean): Tree[A, B1] = blacken(upd(tree, k, v, overwrite))
+ def delete[A: Ordering, B](tree: Tree[A, B], k: A): Tree[A, B] = blacken(del(tree, k))
+ def rangeImpl[A: Ordering, B](tree: Tree[A, B], from: Option[A], until: Option[A]): Tree[A, B] = (from, until) match {
+ case (Some(from), Some(until)) => this.range(tree, from, until)
+ case (Some(from), None) => this.from(tree, from)
+ case (None, Some(until)) => this.until(tree, until)
+ case (None, None) => tree
+ }
+ def range[A: Ordering, B](tree: Tree[A, B], from: A, until: A): Tree[A, B] = blacken(doRange(tree, from, until))
+ def from[A: Ordering, B](tree: Tree[A, B], from: A): Tree[A, B] = blacken(doFrom(tree, from))
+ def to[A: Ordering, B](tree: Tree[A, B], to: A): Tree[A, B] = blacken(doTo(tree, to))
+ def until[A: Ordering, B](tree: Tree[A, B], key: A): Tree[A, B] = blacken(doUntil(tree, key))
+
+ def drop[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = blacken(doDrop(tree, n))
+ def take[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = blacken(doTake(tree, n))
+ def slice[A: Ordering, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = blacken(doSlice(tree, from, until))
+
+ def smallest[A, B](tree: Tree[A, B]): Tree[A, B] = {
+ if (tree eq null) throw new NoSuchElementException("empty tree")
+ var result = tree
+ while (result.left ne null) result = result.left
+ result
+ }
+ def greatest[A, B](tree: Tree[A, B]): Tree[A, B] = {
+ if (tree eq null) throw new NoSuchElementException("empty tree")
+ var result = tree
+ while (result.right ne null) result = result.right
+ result
+ }
+
+ def tail[A, B](tree: Tree[A, B]): Tree[A, B] = {
+ def _tail(tree: Tree[A, B]): Tree[A, B] =
+ if (tree eq null) throw new NoSuchElementException("empty tree")
+ else {
+ val tl = tree.left
+ if (tl eq null) tree.right
+ else if (tl.isBlack) balLeft(tree, _tail(tl), tree.right)
+ else tree.redWithLeft(_tail(tree.left))
+ }
+ blacken(_tail(tree))
+ }
+
+ def init[A, B](tree: Tree[A, B]): Tree[A, B] = {
+ def _init(tree: Tree[A, B]): Tree[A, B] =
+ if (tree eq null) throw new NoSuchElementException("empty tree")
+ else {
+ val tr = tree.right
+ if (tr eq null) tree.left
+ else if (tr.isBlack) balRight(tree, tree.left, _init(tr))
+ else tree.redWithRight(_init(tr))
+ }
+ blacken(_init(tree))
+ }
+
+ /**
+ * Returns the smallest node with a key larger than or equal to `x`. Returns `null` if there is no such node.
+ */
+ def minAfter[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else {
+ val cmp = ordering.compare(x, tree.key)
+ if (cmp == 0) tree
+ else if (cmp < 0) {
+ val l = minAfter(tree.left, x)
+ if (l != null) l else tree
+ } else minAfter(tree.right, x)
+ }
+
+ /**
+ * Returns the largest node with a key smaller than `x`. Returns `null` if there is no such node.
+ */
+ def maxBefore[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else {
+ val cmp = ordering.compare(x, tree.key)
+ if (cmp <= 0) maxBefore(tree.left, x)
+ else {
+ val r = maxBefore(tree.right, x)
+ if (r != null) r else tree
+ }
+ }
+
+ def foreach[A,B,U](tree:Tree[A,B], f:((A,B)) => U):Unit = if (tree ne null) _foreach(tree,f)
+
+ def keysEqual[A: Ordering, X, Y](a: Tree[A, X], b: Tree[A, Y]): Boolean = {
+ if (a eq b) true
+ else if (a eq null) false
+ else if (b eq null) false
+ else a.count == b.count && (new EqualsIterator(a)).sameKeys(new EqualsIterator(b))
+ }
+ def valuesEqual[A: Ordering, X, Y](a: Tree[A, X], b: Tree[A, Y]): Boolean = {
+ if (a eq b) true
+ else if (a eq null) false
+ else if (b eq null) false
+ else a.count == b.count && (new EqualsIterator(a)).sameValues(new EqualsIterator(b))
+ }
+ def entriesEqual[A: Ordering, X, Y](a: Tree[A, X], b: Tree[A, Y]): Boolean = {
+ if (a eq b) true
+ else if (a eq null) false
+ else if (b eq null) false
+ else a.count == b.count && (new EqualsIterator(a)).sameEntries(new EqualsIterator(b))
+ }
+
+ private[this] def _foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U): Unit = {
+ if (tree.left ne null) _foreach(tree.left, f)
+ f((tree.key, tree.value))
+ if (tree.right ne null) _foreach(tree.right, f)
+ }
+
+ def foreachKey[A, U](tree:Tree[A,_], f: A => U):Unit = if (tree ne null) _foreachKey(tree,f)
+
+ private[this] def _foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = {
+ if (tree.left ne null) _foreachKey(tree.left, f)
+ f((tree.key))
+ if (tree.right ne null) _foreachKey(tree.right, f)
+ }
+
+ def foreachEntry[A, B, U](tree:Tree[A,B], f: (A, B) => U):Unit = if (tree ne null) _foreachEntry(tree,f)
+
+ private[this] def _foreachEntry[A, B, U](tree: Tree[A, B], f: (A, B) => U): Unit = {
+ if (tree.left ne null) _foreachEntry(tree.left, f)
+ f(tree.key, tree.value)
+ if (tree.right ne null) _foreachEntry(tree.right, f)
+ }
+
+ def iterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None): Iterator[(A, B)] = new EntriesIterator(tree, start)
+ def keysIterator[A: Ordering](tree: Tree[A, _], start: Option[A] = None): Iterator[A] = new KeysIterator(tree, start)
+ def valuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None): Iterator[B] = new ValuesIterator(tree, start)
+
+ @tailrec
+ def nth[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = {
+ val count = this.count(tree.left)
+ if (n < count) nth(tree.left, n)
+ else if (n > count) nth(tree.right, n - count - 1)
+ else tree
+ }
+
+ def isBlack(tree: Tree[_, _]) = (tree eq null) || tree.isBlack
+
+ @`inline` private[this] def isRedTree(tree: Tree[_, _]) = (tree ne null) && tree.isRed
+ @`inline` private[this] def isBlackTree(tree: Tree[_, _]) = (tree ne null) && tree.isBlack
+
+ private[this] def blacken[A, B](t: Tree[A, B]): Tree[A, B] = if (t eq null) null else t.black
+
+ // Blacken if the tree is red and has a red child. This is necessary when using methods such as `upd` or `updNth`
+ // for building subtrees. Use `blacken` instead when building top-level trees.
+ private[this] def maybeBlacken[A, B](t: Tree[A, B]): Tree[A, B] =
+ if(isBlack(t)) t else if(isRedTree(t.left) || isRedTree(t.right)) t.black else t
+
+ private[this] def mkTree[A, B](isBlack: Boolean, key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = {
+ val sizeAndColour = sizeOf(left) + sizeOf(right) + 1 | (if(isBlack) initialBlackCount else initialRedCount)
+ new Tree(key, value.asInstanceOf[AnyRef], left, right, sizeAndColour)
+ }
+
+ /** Create a new balanced tree where `newLeft` replaces `tree.left`. */
+ private[this] def balanceLeft[A, B1](tree: Tree[A, B1], newLeft: Tree[A, B1]): Tree[A, B1] = {
+ // Parameter trees
+ // tree | newLeft
+ // -- KV R | nl.L nl.KV nl.R
+ // | nl.R.L nl.R.KV nl.R.R
+ if (tree.left eq newLeft) tree
+ else {
+ if (newLeft.isRed) {
+ val newLeft_left = newLeft.left
+ val newLeft_right = newLeft.right
+ if (isRedTree(newLeft_left)) {
+ // RED
+ // black(nl.L) nl.KV black
+ // nl.R KV R
+ val resultLeft = newLeft_left.black
+ val resultRight = tree.blackWithLeft(newLeft_right)
+
+ newLeft.withLeftRight(resultLeft, resultRight)
+ } else if (isRedTree(newLeft_right)) {
+ // RED
+ // black nl.R.KV black
+ // nl.L nl.KV nl.R.L nl.R.R KV R
+ val newLeft_right_right = newLeft_right.right
+
+ val resultLeft = newLeft.blackWithRight(newLeft_right.left)
+ val resultRight = tree.blackWithLeft(newLeft_right_right)
+
+ newLeft_right.withLeftRight(resultLeft, resultRight)
+ } else {
+ // tree
+ // newLeft KV R
+ tree.withLeft(newLeft)
+ }
+ } else {
+ // tree
+ // newLeft KV R
+ tree.withLeft(newLeft)
+ }
+ }
+ }
+ /** Create a new balanced tree where `newRight` replaces `tree.right`. */
+ private[this] def balanceRight[A, B1](tree: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = {
+ // Parameter trees
+ // tree | newRight
+ // L KV -- | nr.L nr.KV nr.R
+ // | nr.L.L nr.L.KV nr.L.R
+ if (tree.right eq newRight) tree
+ else {
+ if (newRight.isRed) {
+ val newRight_left = newRight.left
+ if (isRedTree(newRight_left)) {
+ // RED
+ // black nr.L.KV black
+ // L KV nr.L.L nr.L.R nr.KV nr.R
+ val resultLeft = tree.blackWithRight(newRight_left.left)
+ val resultRight = newRight.blackWithLeft(newRight_left.right)
+
+ newRight_left.withLeftRight(resultLeft, resultRight)
+ } else {
+ val newRight_right = newRight.right
+ if (isRedTree(newRight_right)) {
+ // RED
+ // black nr.KV black(nr.R)
+ // L KV nr.L
+ val resultLeft = tree.blackWithRight(newRight_left)
+ val resultRight = newRight_right.black
+
+ newRight.withLeftRight(resultLeft, resultRight)
+ } else {
+ // tree
+ // L KV newRight
+ tree.withRight(newRight)
+ }
+ }
+ } else {
+ // tree
+ // L KV newRight
+ tree.withRight(newRight)
+ }
+ }
+ }
+
+ private[this] def upd[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean)(implicit ordering: Ordering[A]): Tree[A, B1] = if (tree eq null) {
+ RedTree(k, v, null, null)
+ } else if (k.asInstanceOf[AnyRef] eq tree.key.asInstanceOf[AnyRef]) {
+ if (overwrite)
+ tree.withV(v)
+ else tree
+ } else {
+ val cmp = ordering.compare(k, tree.key)
+ if (cmp < 0)
+ balanceLeft(tree, upd(tree.left, k, v, overwrite))
+ else if (cmp > 0)
+ balanceRight(tree, upd(tree.right, k, v, overwrite))
+ else if (overwrite && (v.asInstanceOf[AnyRef] ne tree.value.asInstanceOf[AnyRef]))
+ tree.withV(v)
+ else tree
+ }
+ private[this] def updNth[A, B, B1 >: B](tree: Tree[A, B], idx: Int, k: A, v: B1): Tree[A, B1] = if (tree eq null) {
+ RedTree(k, v, null, null)
+ } else {
+ val rank = count(tree.left) + 1
+ if (idx < rank)
+ balanceLeft(tree, updNth(tree.left, idx, k, v))
+ else if (idx > rank)
+ balanceRight(tree, updNth(tree.right, idx - rank, k, v))
+ else tree
+ }
+
+ private[this] def doFrom[A, B](tree: Tree[A, B], from: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
+ if (tree eq null) return null
+ if (ordering.lt(tree.key, from)) return doFrom(tree.right, from)
+ val newLeft = doFrom(tree.left, from)
+ if (newLeft eq tree.left) tree
+ else if (newLeft eq null) upd(tree.right, tree.key, tree.value, overwrite = false)
+ else join(newLeft, tree.key, tree.value, tree.right)
+ }
+ private[this] def doTo[A, B](tree: Tree[A, B], to: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
+ if (tree eq null) return null
+ if (ordering.lt(to, tree.key)) return doTo(tree.left, to)
+ val newRight = doTo(tree.right, to)
+ if (newRight eq tree.right) tree
+ else if (newRight eq null) upd(tree.left, tree.key, tree.value, overwrite = false)
+ else join (tree.left, tree.key, tree.value, newRight)
+ }
+ private[this] def doUntil[A, B](tree: Tree[A, B], until: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
+ if (tree eq null) return null
+ if (ordering.lteq(until, tree.key)) return doUntil(tree.left, until)
+ val newRight = doUntil(tree.right, until)
+ if (newRight eq tree.right) tree
+ else if (newRight eq null) upd(tree.left, tree.key, tree.value, overwrite = false)
+ else join(tree.left, tree.key, tree.value, newRight)
+ }
+
+ private[this] def doRange[A, B](tree: Tree[A, B], from: A, until: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
+ if (tree eq null) return null
+ if (ordering.lt(tree.key, from)) return doRange(tree.right, from, until)
+ if (ordering.lteq(until, tree.key)) return doRange(tree.left, from, until)
+ val newLeft = doFrom(tree.left, from)
+ val newRight = doUntil(tree.right, until)
+ if ((newLeft eq tree.left) && (newRight eq tree.right)) tree
+ else if (newLeft eq null) upd(newRight, tree.key, tree.value, overwrite = false)
+ else if (newRight eq null) upd(newLeft, tree.key, tree.value, overwrite = false)
+ else join(newLeft, tree.key, tree.value, newRight)
+ }
+
+ private[this] def doDrop[A, B](tree: Tree[A, B], n: Int): Tree[A, B] =
+ if((tree eq null) || (n <= 0)) tree
+ else if(n >= tree.count) null
+ else {
+ val l = count(tree.left)
+ if(n > l) doDrop(tree.right, n-l-1)
+ else if(n == l) join(null, tree.key, tree.value, tree.right)
+ else join(doDrop(tree.left, n), tree.key, tree.value, tree.right)
+ }
+
+ private[this] def doTake[A, B](tree: Tree[A, B], n: Int): Tree[A, B] =
+ if((tree eq null) || (n <= 0)) null
+ else if(n >= tree.count) tree
+ else {
+ val l = count(tree.left)
+ if(n <= l) doTake(tree.left, n)
+ else if(n == l+1) maybeBlacken(updNth(tree.left, n, tree.key, tree.value))
+ else join(tree.left, tree.key, tree.value, doTake(tree.right, n-l-1))
+ }
+
+ private[this] def doSlice[A, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] =
+ if((tree eq null) || (from >= until) || (from >= tree.count) || (until <= 0)) null
+ else if((from <= 0) && (until >= tree.count)) tree
+ else {
+ val l = count(tree.left)
+ if(until <= l) doSlice(tree.left, from, until)
+ else if(from > l) doSlice(tree.right, from-l-1, until-l-1)
+ else join(doDrop(tree.left, from), tree.key, tree.value, doTake(tree.right, until-l-1))
+ }
+
+ /*
+ * Forcing direct fields access using the @`inline` annotation helps speed up
+ * various operations (especially smallest/greatest and update/delete).
+ *
+ * Unfortunately the direct field access is not guaranteed to work (but
+ * works on the current implementation of the Scala compiler).
+ *
+ * An alternative is to implement the these classes using plain old Java code...
+ *
+ * Mutability
+ * This implementation encodes both mutable and immutable trees.
+ * Mutable trees are never exposed to the user code but we get significant reductions in both CPU and allocations
+ * by maintaining a mutable tree during internal operations, e.g. a builder building a Tree, and the other bulk
+ * API such as filter or ++
+ *
+ * Mutable trees are only used within the confines of this bulk operation and not shared
+ * Mutable trees may transition to become immutable by calling beforePublish
+ * Mutable trees may have child nodes (left and right) which are immutable Trees (this promotes structural sharing)
+ *
+ * Immutable trees may only child nodes (left and right) which are immutable Trees, and as such the immutable
+ * trees the entire transitive subtree is immutable
+ *
+ * Colour, mutablity and size encoding
+ * The colour of the Tree, its mutablity and size are all encoded in the _count field
+ * The colour is encoded in the top bit (31) of the _count. This allows a mutable tree to change colour without
+ * additional allocation
+ * The mutable trees always have bits 0 .. 30 (inclusive) set to 0
+ * The immutable trees always have bits 0 .. 30 containing the size of the transitive subtree
+ *
+ * Naming
+ * All of the methods that can yield a mutable result have "mutable" on their name, and generally there
+ * is another method similarly named with doesn't. This is to aid safety and to reduce the cognitive load when
+ * reviewing changes. e.g.
+ * def upd(...) will update an immutable Tree, producing an immutable Tree
+ * def mutableUpd(...) will update a mutable or immutable Tree and may return a mutable or immutable Tree
+ * a method that has mutable in its name may return a immutable tree if the operation can reuse the existing tree
+ *
+ */
+ private[immutable] final class Tree[A, +B](
+ @(`inline` @getter @setter) private var _key: A,
+ @(`inline` @getter @setter) private var _value: AnyRef,
+ @(`inline` @getter @setter) private var _left: Tree[A, _],
+ @(`inline` @getter @setter) private var _right: Tree[A, _],
+ @(`inline` @getter @setter) private var _count: Int)
+ {
+ @`inline` private[RedBlackTree] def isMutable: Boolean = (_count & colourMask) == 0
+ // read only APIs
+ @`inline` private[RedBlackTree] final def count = {
+ //devTimeAssert((_count & 0x7FFFFFFF) != 0)
+ _count & colourMask
+ }
+ //retain the colour, and mark as mutable
+ @`inline` private def mutableRetainingColour = _count & colourBit
+
+ //inlined here to avoid outer object null checks
+ @`inline` private[RedBlackTree] final def sizeOf(tree:Tree[_,_]) = if (tree eq null) 0 else tree.count
+ @`inline` private[immutable] final def key = _key
+ @`inline` private[immutable] final def value = _value.asInstanceOf[B]
+ @`inline` private[immutable] final def left = _left.asInstanceOf[Tree[A, B]]
+ @`inline` private[immutable] final def right = _right.asInstanceOf[Tree[A, B]]
+ //Note - only used in tests outside RedBlackTree
+ @`inline` private[immutable] final def isBlack = _count < 0
+ //Note - only used in tests outside RedBlackTree
+ @`inline` private[immutable] final def isRed = _count >= 0
+
+ override def toString: String = s"${if(isRed) "RedTree" else "BlackTree"}($key, $value, $left, $right)"
+
+ //mutable APIs
+ private[RedBlackTree] def makeImmutable: Tree[A, B] = {
+ def makeImmutableImpl() = {
+ if (isMutable) {
+ var size = 1
+ if (_left ne null) {
+ _left.makeImmutable
+ size += _left.count
+ }
+ if (_right ne null) {
+ _right.makeImmutable
+ size += _right.count
+ }
+ _count |= size //retains colour
+ }
+ this
+ }
+ makeImmutableImpl()
+ this
+ }
+
+ private[RedBlackTree] def mutableBlack: Tree[A, B] = {
+ if (isBlack) this
+ else if (isMutable) {
+ _count = initialBlackCount
+ this
+ }
+ else new Tree(_key, _value, _left, _right, initialBlackCount)
+ }
+// private[RedBlackTree] def mutableRed: Tree[A, B] = {
+// if (isRed) this
+// else if (mutable) {
+// _count = initialRedCount
+// this
+// }
+// else new Tree(_key, _value, _left, _right, initialRedCount)
+// }
+
+ private[RedBlackTree] def mutableWithV[B1 >: B](newValue: B1): Tree[A, B1] = {
+ if (newValue.asInstanceOf[AnyRef] eq _value.asInstanceOf[AnyRef]) this
+ else if (isMutable) {
+ _value = newValue.asInstanceOf[AnyRef]
+ this
+ } else new Tree(_key, newValue.asInstanceOf[AnyRef], _left, _right, mutableRetainingColour)
+ }
+
+ private[RedBlackTree] def mutableWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = {
+ if (_left eq newLeft) this
+ else if (isMutable) {
+ _left = newLeft
+ this
+ } else new Tree(_key, _value, newLeft, _right, mutableRetainingColour)
+ }
+ private[RedBlackTree] def mutableWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = {
+ if (_right eq newRight) this
+ else if (isMutable) {
+ _right = newRight
+ this
+ } else new Tree(_key, _value, _left, newRight, mutableRetainingColour)
+ }
+ private[RedBlackTree] def mutableWithLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = {
+ if ((_left eq newLeft) && (_right eq newRight)) this
+ else if (isMutable) {
+ _left = newLeft
+ _right = newRight
+ this
+ } else new Tree(_key, _value, newLeft, newRight, mutableRetainingColour)
+ }
+ private[RedBlackTree] def mutableBlackWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = {
+ if ((_left eq newLeft) && isBlack) this
+ else if (isMutable) {
+ _count = initialBlackCount
+ _left = newLeft
+ this
+ } else new Tree(_key, _value, newLeft, _right, initialBlackCount)
+ }
+ private[RedBlackTree] def mutableBlackWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = {
+ if ((_right eq newRight) && isBlack) this
+ else if (isMutable) {
+ _count = initialBlackCount
+ _right = newRight
+ this
+ } else new Tree(_key, _value, _left, newRight, initialBlackCount)
+ }
+
+ private[RedBlackTree] def black: Tree[A, B] = {
+ //assertNotMutable(this)
+ if (isBlack) this
+ else new Tree(_key, _value, _left, _right, _count ^ colourBit)
+ }
+ private[RedBlackTree] def red: Tree[A, B] = {
+ //assertNotMutable(this)
+ if (isRed) this
+ else new Tree(_key, _value, _left, _right, _count ^ colourBit)
+ }
+ private[RedBlackTree] def withKV[B1 >: B](newKey: A, newValue: B1): Tree[A, B1] = {
+ //assertNotMutable(this)
+ if ((newKey.asInstanceOf[AnyRef] eq _key.asInstanceOf[AnyRef]) &&
+ (newValue.asInstanceOf[AnyRef] eq _value.asInstanceOf[AnyRef])) this
+ else new Tree(newKey, newValue.asInstanceOf[AnyRef], _left, _right, _count)
+ }
+ private[RedBlackTree] def withV[B1 >: B](newValue: B1): Tree[A, B1] = {
+ //assertNotMutable(this)
+ if (newValue.asInstanceOf[AnyRef] eq _value.asInstanceOf[AnyRef]) this
+ else new Tree(_key, newValue.asInstanceOf[AnyRef], _left, _right, _count)
+ }
+
+ private[RedBlackTree] def withLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = {
+ //assertNotMutable(this)
+ //assertNotMutable(newLeft)
+ if (newLeft eq _left) this
+ else {
+ val size = sizeOf(newLeft) + sizeOf(_right) + 1
+ new Tree(key, value.asInstanceOf[AnyRef], newLeft, _right, (_count & colourBit) | size)
+ }
+ }
+ private[RedBlackTree] def withRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = {
+ //assertNotMutable(this)
+ //assertNotMutable(newRight)
+ if (newRight eq _right) this
+ else {
+ val size = sizeOf(_left) + sizeOf(newRight) + 1
+ new Tree(key, value.asInstanceOf[AnyRef], _left, newRight, (_count & colourBit) | size)
+ }
+ }
+ private[RedBlackTree] def blackWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = {
+ //assertNotMutable(this)
+ //assertNotMutable(newLeft)
+ if ((newLeft eq _left) && isBlack) this
+ else {
+ val size = sizeOf(newLeft) + sizeOf(_right) + 1
+ new Tree(key, value.asInstanceOf[AnyRef], newLeft, _right, initialBlackCount | size)
+ }
+ }
+ private[RedBlackTree] def redWithLeft[B1 >: B](newLeft: Tree[A, B1]): Tree[A, B1] = {
+ //assertNotMutable(this)
+ //assertNotMutable(newLeft)
+ if ((newLeft eq _left) && isRed) this
+ else {
+ val size = sizeOf(newLeft) + sizeOf(_right) + 1
+ new Tree(key, value.asInstanceOf[AnyRef], newLeft, _right, initialRedCount | size)
+ }
+ }
+ private[RedBlackTree] def blackWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = {
+ //assertNotMutable(this)
+ //assertNotMutable(newRight)
+ if ((newRight eq _right) && isBlack) this
+ else {
+ val size = sizeOf(_left) + sizeOf(newRight) + 1
+ new Tree(key, value.asInstanceOf[AnyRef], _left, newRight, initialBlackCount | size)
+ }
+ }
+ private[RedBlackTree] def redWithRight[B1 >: B](newRight: Tree[A, B1]): Tree[A, B1] = {
+ //assertNotMutable(this)
+ //assertNotMutable(newLeft)
+ if ((newRight eq _right) && isRed) this
+ else {
+ val size = sizeOf(_left) + sizeOf(newRight) + 1
+ new Tree(key, value.asInstanceOf[AnyRef], _left, newRight, initialRedCount | size)
+ }
+ }
+ private[RedBlackTree] def withLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = {
+ //assertNotMutable(this)
+ //assertNotMutable(newLeft)
+ //assertNotMutable(newRight)
+ if ((newLeft eq _left) && (newRight eq _right)) this
+ else {
+ val size = sizeOf(newLeft) + sizeOf(newRight) + 1
+ new Tree(key, value.asInstanceOf[AnyRef], newLeft, newRight, (_count & colourBit) | size)
+ }
+ }
+ private[RedBlackTree] def redWithLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = {
+ //assertNotMutable(this)
+ //assertNotMutable(newLeft)
+ //assertNotMutable(newRight)
+ if ((newLeft eq _left) && (newRight eq _right) && isRed) this
+ else {
+ val size = sizeOf(newLeft) + sizeOf(newRight) + 1
+ new Tree(key, value.asInstanceOf[AnyRef], newLeft, newRight, initialRedCount | size)
+ }
+ }
+ private[RedBlackTree] def blackWithLeftRight[B1 >: B](newLeft: Tree[A, B1], newRight: Tree[A, B1]): Tree[A, B1] = {
+ //assertNotMutable(this)
+ //assertNotMutable(newLeft)
+ //assertNotMutable(newRight)
+ if ((newLeft eq _left) && (newRight eq _right) && isBlack) this
+ else {
+ val size = sizeOf(newLeft) + sizeOf(newRight) + 1
+ new Tree(key, value.asInstanceOf[AnyRef], newLeft, newRight, initialBlackCount | size)
+ }
+ }
+ }
+ //see #Tree docs "Colour, mutablity and size encoding"
+ //we make these final vals because the optimiser inlines them, without reference to the enclosing module
+ private[RedBlackTree] final val colourBit = 0x80000000
+ //really its ~colourBit but that doesnt get inlined
+ private[RedBlackTree] final val colourMask = colourBit - 1
+ private[RedBlackTree] final val initialBlackCount = colourBit
+ private[RedBlackTree] final val initialRedCount = 0
+
+ @`inline` private[RedBlackTree] def mutableRedTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new Tree[A,B](key, value.asInstanceOf[AnyRef], left, right, initialRedCount)
+ @`inline` private[RedBlackTree] def mutableBlackTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new Tree[A,B](key, value.asInstanceOf[AnyRef], left, right, initialBlackCount)
+
+ /** create a new immutable red tree.
+ * left and right may be null
+ */
+ private[immutable] def RedTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]): Tree[A, B] = {
+ //assertNotMutable(left)
+ //assertNotMutable(right)
+ val size = sizeOf(left) + sizeOf(right) + 1
+ new Tree(key, value.asInstanceOf[AnyRef], left, right, initialRedCount | size)
+ }
+ private[immutable] def BlackTree[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]): Tree[A, B] = {
+ //assertNotMutable(left)
+ //assertNotMutable(right)
+ val size = sizeOf(left) + sizeOf(right) + 1
+ new Tree(key, value.asInstanceOf[AnyRef], left, right, initialBlackCount | size)
+ }
+ @`inline` private def sizeOf(tree:Tree[_,_]) = if (tree eq null) 0 else tree.count
+ //immutable APIs
+ //assertions - uncomment decls and callers when changing functionality
+ // private def devTimeAssert(assertion: Boolean) = {
+ // //uncomment this during development of the functionality
+ // assert(assertion)
+ // }
+ // private def assertNotMutable(t:Tree[_,_]) = {
+ // devTimeAssert ((t eq null) || t.count > 0)
+ // }
+ private[this] abstract class TreeIterator[A, B, R](root: Tree[A, B], start: Option[A])(protected implicit val ordering: Ordering[A]) extends AbstractIterator[R] {
+ protected[this] def nextResult(tree: Tree[A, B]): R
+
+ override def hasNext: Boolean = lookahead ne null
+
+ @throws[NoSuchElementException]
+ override def next(): R = {
+ val tree = lookahead
+ if(tree ne null) {
+ lookahead = findLeftMostOrPopOnEmpty(goRight(tree))
+ nextResult(tree)
+ } else Iterator.empty.next()
+ }
+
+ @tailrec
+ protected final def findLeftMostOrPopOnEmpty(tree: Tree[A, B]): Tree[A, B] =
+ if (tree eq null) popNext()
+ else if (tree.left eq null) tree
+ else findLeftMostOrPopOnEmpty(goLeft(tree))
+
+ @`inline` private[this] def pushNext(tree: Tree[A, B]): Unit = {
+ stackOfNexts(index) = tree
+ index += 1
+ }
+ @`inline` protected final def popNext(): Tree[A, B] = if (index == 0) null else {
+ index -= 1
+ stackOfNexts(index)
+ }
+
+ protected[this] val stackOfNexts = if (root eq null) null else {
+ /*
+ * According to "Ralf Hinze. Constructing red-black trees" [https://www.cs.ox.ac.uk/ralf.hinze/publications/#P5]
+ * the maximum height of a red-black tree is 2*log_2(n + 2) - 2.
+ *
+ * According to {@see Integer#numberOfLeadingZeros} ceil(log_2(n)) = (32 - Integer.numberOfLeadingZeros(n - 1))
+ *
+ * Although we don't store the deepest nodes in the path during iteration,
+ * we potentially do so in `startFrom`.
+ */
+ val maximumHeight = 2 * (32 - Integer.numberOfLeadingZeros(root.count + 2 - 1)) - 2
+ new Array[Tree[A, B] @uncheckedCaptures](maximumHeight)
+ }
+ private[this] var index = 0
+ protected var lookahead: Tree[A, B] @uncheckedCaptures =
+ if (start.isDefined) startFrom(start.get) else findLeftMostOrPopOnEmpty(root)
+
+ /**
+ * Find the leftmost subtree whose key is equal to the given key, or if no such thing,
+ * the leftmost subtree with the key that would be "next" after it according
+ * to the ordering. Along the way build up the iterator's path stack so that "next"
+ * functionality works.
+ */
+ private[this] def startFrom(key: A) : Tree[A,B] = if (root eq null) null else {
+ @tailrec def find(tree: Tree[A, B]): Tree[A, B] =
+ if (tree eq null) popNext()
+ else find(
+ if (ordering.lteq(key, tree.key)) goLeft(tree)
+ else goRight(tree)
+ )
+ find(root)
+ }
+
+ @`inline` private[this] def goLeft(tree: Tree[A, B]) = {
+ pushNext(tree)
+ tree.left
+ }
+
+ @`inline` protected final def goRight(tree: Tree[A, B]) = tree.right
+ }
+
+ private[this] class EqualsIterator[A: Ordering, B](tree: Tree[A, B]) extends TreeIterator[A, B, Unit](tree, None) {
+ override def nextResult(tree: Tree[A, B]) = ???
+
+ def sameKeys[X](that:EqualsIterator[A,X]): Boolean = {
+ var equal = true
+ while (equal && (this.lookahead ne null) && (that.lookahead ne null)) {
+ if (this.lookahead eq that.lookahead) {
+ this.lookahead = this.popNext()
+ that.lookahead = that.popNext()
+ } else {
+ equal = (this.lookahead.key.asInstanceOf[AnyRef] eq that.lookahead.key.asInstanceOf[AnyRef]) ||
+ ordering.equiv(this.lookahead.key, that.lookahead.key)
+ this.lookahead = this.findLeftMostOrPopOnEmpty(this.goRight(this.lookahead))
+ that.lookahead = that.findLeftMostOrPopOnEmpty(that.goRight(that.lookahead))
+ }
+ }
+ equal && (this.lookahead eq null) && (that.lookahead eq null)
+ }
+ def sameValues[X](that:EqualsIterator[A,X]): Boolean = {
+ var equal = true
+ while (equal && (this.lookahead ne null) && (that.lookahead ne null)) {
+ if (this.lookahead eq that.lookahead) {
+ this.lookahead = this.popNext()
+ that.lookahead = that.popNext()
+ } else {
+ equal = this.lookahead.value == that.lookahead.value
+ this.lookahead = this.findLeftMostOrPopOnEmpty(this.goRight(this.lookahead))
+ that.lookahead = that.findLeftMostOrPopOnEmpty(that.goRight(that.lookahead))
+ }
+ }
+ equal && (this.lookahead eq null) && (that.lookahead eq null)
+ }
+ def sameEntries[X](that:EqualsIterator[A,X]): Boolean = {
+ var equal = true
+ while (equal && (this.lookahead ne null) && (that.lookahead ne null)) {
+ if (this.lookahead eq that.lookahead) {
+ this.lookahead = this.popNext()
+ that.lookahead = that.popNext()
+ } else {
+ equal = ((this.lookahead.key.asInstanceOf[AnyRef] eq that.lookahead.key.asInstanceOf[AnyRef]) ||
+ ordering.equiv(this.lookahead.key, that.lookahead.key)) && this.lookahead.value == that.lookahead.value
+ this.lookahead = this.findLeftMostOrPopOnEmpty(this.goRight(this.lookahead))
+ that.lookahead = that.findLeftMostOrPopOnEmpty(that.goRight(that.lookahead))
+ }
+ }
+ equal && (this.lookahead eq null) && (that.lookahead eq null)
+ }
+ }
+ private[this] class EntriesIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, (A, B)](tree, focus) {
+ override def nextResult(tree: Tree[A, B]) = (tree.key, tree.value)
+ }
+
+ private[this] class KeysIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, A](tree, focus) {
+ override def nextResult(tree: Tree[A, B]) = tree.key
+ }
+
+ private[this] class ValuesIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, B](tree, focus) {
+ override def nextResult(tree: Tree[A, B]) = tree.value
+ }
+
+ /** Build a Tree suitable for a TreeSet from an ordered sequence of keys */
+ def fromOrderedKeys[A](xs: Iterator[A], size: Int): Tree[A, Null] = {
+ val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes
+ def f(level: Int, size: Int): Tree[A, Null] = size match {
+ case 0 => null
+ case 1 => mkTree(level != maxUsedDepth || level == 1, xs.next(), null, null, null)
+ case n =>
+ val leftSize = (size-1)/2
+ val left = f(level+1, leftSize)
+ val x = xs.next()
+ val right = f(level+1, size-1-leftSize)
+ BlackTree(x, null, left, right)
+ }
+ f(1, size)
+ }
+
+ /** Build a Tree suitable for a TreeMap from an ordered sequence of key/value pairs */
+ def fromOrderedEntries[A, B](xs: Iterator[(A, B)], size: Int): Tree[A, B] = {
+ val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes
+ def f(level: Int, size: Int): Tree[A, B] = size match {
+ case 0 => null
+ case 1 =>
+ val (k, v) = xs.next()
+ mkTree(level != maxUsedDepth || level == 1, k, v, null, null)
+ case n =>
+ val leftSize = (size-1)/2
+ val left = f(level+1, leftSize)
+ val (k, v) = xs.next()
+ val right = f(level+1, size-1-leftSize)
+ BlackTree(k, v, left, right)
+ }
+ f(1, size)
+ }
+
+ def transform[A, B, C](t: Tree[A, B], f: (A, B) => C): Tree[A, C] =
+ if(t eq null) null
+ else {
+ val k = t.key
+ val v = t.value
+ val l = t.left
+ val r = t.right
+ val l2 = transform(l, f)
+ val v2 = f(k, v)
+ val r2 = transform(r, f)
+ if((v2.asInstanceOf[AnyRef] eq v.asInstanceOf[AnyRef])
+ && (l2 eq l)
+ && (r2 eq r)) t.asInstanceOf[Tree[A, C]]
+ else mkTree(t.isBlack, k, v2, l2, r2)
+ }
+
+ def filterEntries[A, B](t: Tree[A, B], f: (A, B) => Boolean): Tree[A, B] = if(t eq null) null else {
+ def fk(t: Tree[A, B]): Tree[A, B] = {
+ val k = t.key
+ val v = t.value
+ val l = t.left
+ val r = t.right
+ val l2 = if(l eq null) null else fk(l)
+ val keep = f(k, v)
+ val r2 = if(r eq null) null else fk(r)
+ if(!keep) join2(l2, r2)
+ else if((l2 eq l) && (r2 eq r)) t
+ else join(l2, k, v, r2)
+ }
+ blacken(fk(t))
+ }
+
+ private[this] val null2 = (null, null)
+
+ def partitionEntries[A, B](t: Tree[A, B], p: (A, B) => Boolean): (Tree[A, B], Tree[A, B]) = if(t eq null) (null, null) else {
+ if (t eq null) null2
+ else {
+ object partitioner {
+ var tmpk, tmpd = null: Tree[A, B] // shared vars to avoid returning tuples from fk
+ def fk(t: Tree[A, B]): Unit = {
+ val k = t.key
+ val v = t.value
+ val l = t.left
+ val r = t.right
+ var l2k, l2d, r2k, r2d = null: Tree[A, B]
+ if (l ne null) {
+ fk(l)
+ l2k = tmpk
+ l2d = tmpd
+ }
+ val keep = p(k, v)
+ if (r ne null) {
+ fk(r)
+ r2k = tmpk
+ r2d = tmpd
+ }
+ val jk =
+ if (!keep) join2(l2k, r2k)
+ else if ((l2k eq l) && (r2k eq r)) t
+ else join(l2k, k, v, r2k)
+ val jd =
+ if (keep) join2(l2d, r2d)
+ else if ((l2d eq l) && (r2d eq r)) t
+ else join(l2d, k, v, r2d)
+ tmpk = jk
+ tmpd = jd
+ }
+ }
+
+ partitioner.fk(t)
+ (blacken(partitioner.tmpk), blacken(partitioner.tmpd))
+ }
+ }
+
+ // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees
+ // Constructing Red-Black Trees, Ralf Hinze: [[https://www.cs.ox.ac.uk/ralf.hinze/publications/WAAAPL99b.ps.gz]]
+ // Red-Black Trees in a Functional Setting, Chris Okasaki: [[https://wiki.rice.edu/confluence/download/attachments/2761212/Okasaki-Red-Black.pdf]] */
+
+ private[this] def del[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else {
+ val cmp = ordering.compare(k, tree.key)
+ if (cmp < 0) {
+ val newLeft = del(tree.left, k)
+ if (newLeft eq tree.left) tree
+ else if (isBlackTree(tree.left)) balLeft(tree, newLeft, tree.right)
+ else tree.redWithLeft(newLeft)
+ } else if (cmp > 0) {
+ val newRight = del(tree.right, k)
+ if (newRight eq tree.right) tree
+ else if (isBlackTree(tree.right)) balRight(tree, tree.left, newRight)
+ else tree.redWithRight(newRight)
+ } else append(tree.left, tree.right)
+ }
+
+ private[this] def balance[A, B](tree: Tree[A,B], tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] =
+ if (isRedTree(tl)) {
+ if (isRedTree(tr)) tree.redWithLeftRight(tl.black, tr.black)
+ else if (isRedTree(tl.left)) tl.withLeftRight(tl.left.black, tree.blackWithLeftRight(tl.right, tr))
+ else if (isRedTree(tl.right)) tl.right.withLeftRight(tl.blackWithRight(tl.right.left), tree.blackWithLeftRight(tl.right.right, tr))
+ else tree.blackWithLeftRight(tl, tr)
+ } else if (isRedTree(tr)) {
+ if (isRedTree(tr.right)) tr.withLeftRight(tree.blackWithLeftRight(tl, tr.left), tr.right.black)
+ else if (isRedTree(tr.left)) tr.left.withLeftRight(tree.blackWithLeftRight(tl, tr.left.left), tr.blackWithLeftRight(tr.left.right, tr.right))
+ else tree.blackWithLeftRight(tl, tr)
+ } else tree.blackWithLeftRight(tl, tr)
+
+ private[this] def balLeft[A, B](tree: Tree[A,B], tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] =
+ if (isRedTree(tl)) tree.redWithLeftRight(tl.black, tr)
+ else if (isBlackTree(tr)) balance(tree, tl, tr.red)
+ else if (isRedTree(tr) && isBlackTree(tr.left))
+ tr.left.redWithLeftRight(tree.blackWithLeftRight(tl, tr.left.left), balance(tr, tr.left.right, tr.right.red))
+ else sys.error("Defect: invariance violation")
+
+ private[this] def balRight[A, B](tree: Tree[A,B], tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] =
+ if (isRedTree(tr)) tree.redWithLeftRight(tl, tr.black)
+ else if (isBlackTree(tl)) balance(tree, tl.red, tr)
+ else if (isRedTree(tl) && isBlackTree(tl.right))
+ tl.right.redWithLeftRight(balance(tl, tl.left.red, tl.right.left), tree.blackWithLeftRight(tl.right.right, tr))
+ else sys.error("Defect: invariance violation")
+
+ /** `append` is similar to `join2` but requires that both subtrees have the same black height */
+ private[this] def append[A, B](tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = {
+ if (tl eq null) tr
+ else if (tr eq null) tl
+ else if (tl.isRed) {
+ if (tr.isRed) {
+ //tl is red, tr is red
+ val bc = append(tl.right, tr.left)
+ if (isRedTree(bc)) bc.withLeftRight(tl.withRight(bc.left), tr.withLeft(bc.right))
+ else tl.withRight(tr.withLeft(bc))
+ } else {
+ //tl is red, tr is black
+ tl.withRight(append(tl.right, tr))
+ }
+ } else {
+ if (tr.isBlack) {
+ //tl is black tr is black
+ val bc = append(tl.right, tr.left)
+ if (isRedTree(bc)) bc.withLeftRight(tl.withRight(bc.left), tr.withLeft(bc.right))
+ else balLeft(tl, tl.left, tr.withLeft(bc))
+ } else {
+ //tl is black tr is red
+ tr.withLeft(append(tl, tr.left))
+ }
+ }
+ }
+
+
+ // Bulk operations based on "Just Join for Parallel Ordered Sets" (https://www.cs.cmu.edu/~guyb/papers/BFS16.pdf)
+ // We don't store the black height in the tree so we pass it down into the join methods and derive the black height
+ // of child nodes from it. Where possible the black height is used directly instead of deriving the rank from it.
+ // Our trees are supposed to have a black root so we always blacken as the last step of union/intersect/difference.
+
+ def union[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = blacken(_union(t1, t2))
+
+ def intersect[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] = blacken(_intersect(t1, t2))
+
+ def difference[A, B](t1: Tree[A, B], t2: Tree[A, _])(implicit ordering: Ordering[A]): Tree[A, B] =
+ blacken(_difference(t1, t2.asInstanceOf[Tree[A, B]]))
+
+ /** Compute the rank from a tree and its black height */
+ @`inline` private[this] def rank(t: Tree[_, _], bh: Int): Int = {
+ if(t eq null) 0
+ else if(t.isBlack) 2*(bh-1)
+ else 2*bh-1
+ }
+
+ private[this] def joinRight[A, B](tl: Tree[A, B], k: A, v: B, tr: Tree[A, B], bhtl: Int, rtr: Int): Tree[A, B] = {
+ val rtl = rank(tl, bhtl)
+ if(rtl == (rtr/2)*2) RedTree(k, v, tl, tr)
+ else {
+ val tlBlack = isBlackTree(tl)
+ val bhtlr = if(tlBlack) bhtl-1 else bhtl
+ val ttr = joinRight(tl.right, k, v, tr, bhtlr, rtr)
+ if(tlBlack && isRedTree(ttr) && isRedTree(ttr.right))
+ RedTree(ttr.key, ttr.value,
+ BlackTree(tl.key, tl.value, tl.left, ttr.left),
+ ttr.right.black)
+ else mkTree(tlBlack, tl.key, tl.value, tl.left, ttr)
+ }
+ }
+
+ private[this] def joinLeft[A, B](tl: Tree[A, B], k: A, v: B, tr: Tree[A, B], rtl: Int, bhtr: Int): Tree[A, B] = {
+ val rtr = rank(tr, bhtr)
+ if(rtr == (rtl/2)*2) RedTree(k, v, tl, tr)
+ else {
+ val trBlack = isBlackTree(tr)
+ val bhtrl = if(trBlack) bhtr-1 else bhtr
+ val ttl = joinLeft(tl, k, v, tr.left, rtl, bhtrl)
+ if(trBlack && isRedTree(ttl) && isRedTree(ttl.left))
+ RedTree(ttl.key, ttl.value,
+ ttl.left.black,
+ BlackTree(tr.key, tr.value, ttl.right, tr.right))
+ else mkTree(trBlack, tr.key, tr.value, ttl, tr.right)
+ }
+ }
+
+ private[this] def join[A, B](tl: Tree[A, B], k: A, v: B, tr: Tree[A, B]): Tree[A, B] = {
+ @tailrec def h(t: Tree[_, _], i: Int): Int =
+ if(t eq null) i+1 else h(t.left, if(t.isBlack) i+1 else i)
+ val bhtl = h(tl, 0)
+ val bhtr = h(tr, 0)
+ if(bhtl > bhtr) {
+ val tt = joinRight(tl, k, v, tr, bhtl, rank(tr, bhtr))
+ if(isRedTree(tt) && isRedTree(tt.right)) tt.black
+ else tt
+ } else if(bhtr > bhtl) {
+ val tt = joinLeft(tl, k, v, tr, rank(tl, bhtl), bhtr)
+ if(isRedTree(tt) && isRedTree(tt.left)) tt.black
+ else tt
+ } else mkTree(isRedTree(tl) || isRedTree(tr), k, v, tl, tr)
+ }
+
+ private[this] def split[A, B](t: Tree[A, B], k2: A)(implicit ordering: Ordering[A]): (Tree[A, B], Tree[A, B], Tree[A, B], A) =
+ if(t eq null) (null, null, null, k2)
+ else {
+ val cmp = ordering.compare(k2, t.key)
+ if(cmp == 0) (t.left, t, t.right, t.key)
+ else if(cmp < 0) {
+ val (ll, b, lr, k1) = split(t.left, k2)
+ (ll, b, join(lr, t.key, t.value, t.right), k1)
+ } else {
+ val (rl, b, rr, k1) = split(t.right, k2)
+ (join(t.left, t.key, t.value, rl), b, rr, k1)
+ }
+ }
+
+ private[this] def splitLast[A, B](t: Tree[A, B]): (Tree[A, B], A, B) =
+ if(t.right eq null) (t.left, t.key, t.value)
+ else {
+ val (tt, kk, vv) = splitLast(t.right)
+ (join(t.left, t.key, t.value, tt), kk, vv)
+ }
+
+ private[this] def join2[A, B](tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] =
+ if(tl eq null) tr
+ else if(tr eq null) tl
+ else {
+ val (ttl, k, v) = splitLast(tl)
+ join(ttl, k, v, tr)
+ }
+
+ private[this] def _union[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] =
+ if((t1 eq null) || (t1 eq t2)) t2
+ else if(t2 eq null) t1
+ else {
+ val (l1, _, r1, k1) = split(t1, t2.key)
+ val tl = _union(l1, t2.left)
+ val tr = _union(r1, t2.right)
+ join(tl, k1, t2.value, tr)
+ }
+
+ private[this] def _intersect[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] =
+ if((t1 eq null) || (t2 eq null)) null
+ else if (t1 eq t2) t1
+ else {
+ val (l1, b, r1, k1) = split(t1, t2.key)
+ val tl = _intersect(l1, t2.left)
+ val tr = _intersect(r1, t2.right)
+ if(b ne null) join(tl, k1, t2.value, tr)
+ else join2(tl, tr)
+ }
+
+ private[this] def _difference[A, B](t1: Tree[A, B], t2: Tree[A, B])(implicit ordering: Ordering[A]): Tree[A, B] =
+ if((t1 eq null) || (t2 eq null)) t1
+ else if (t1 eq t2) null
+ else {
+ val (l1, _, r1, k1) = split(t1, t2.key)
+ val tl = _difference(l1, t2.left)
+ val tr = _difference(r1, t2.right)
+ join2(tl, tr)
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/immutable/Seq.scala b/tests/pos-special/stdlib/collection/immutable/Seq.scala
index 5184cadaccae..d575c3aaf14a 100644
--- a/tests/pos-special/stdlib/collection/immutable/Seq.scala
+++ b/tests/pos-special/stdlib/collection/immutable/Seq.scala
@@ -30,7 +30,7 @@ trait Seq[+A] extends Iterable[A]
* @define coll immutable sequence
* @define Coll `immutable.Seq`
*/
-trait SeqOps[+A, +CC[_], +C] extends AnyRef with collection.SeqOps[A, CC, C]
+trait SeqOps[+A, +CC[_], +C] extends Any with collection.SeqOps[A, CC, C]
/**
* $factoryInfo
diff --git a/tests/pos-special/stdlib/collection/immutable/SeqMap.scala b/tests/pos-special/stdlib/collection/immutable/SeqMap.scala
new file mode 100644
index 000000000000..6c955fd52fc2
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/immutable/SeqMap.scala
@@ -0,0 +1,278 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package immutable
+
+import scala.collection.mutable.{Builder, ReusableBuilder}
+import language.experimental.captureChecking
+import scala.annotation.unchecked.uncheckedCaptures
+
+/** A base trait for ordered, immutable maps.
+ *
+ * Note that the [[equals]] method for [[SeqMap]] compares key-value pairs
+ * without regard to ordering.
+ *
+ * All behavior is defined in terms of the abstract methods in `SeqMap`.
+ * It is sufficient for concrete subclasses to implement those methods.
+ * Methods that return a new map, in particular [[removed]] and [[updated]], must preserve ordering.
+ *
+ * @tparam K the type of the keys contained in this linked map.
+ * @tparam V the type of the values associated with the keys in this linked map.
+ *
+ * @define coll immutable seq map
+ * @define Coll `immutable.SeqMap`
+ */
+
+trait SeqMap[K, +V]
+ extends Map[K, V]
+ with collection.SeqMap[K, V]
+ with MapOps[K, V, SeqMap, SeqMap[K, V]]
+ with MapFactoryDefaults[K, V, SeqMap, Iterable] {
+ override def mapFactory: MapFactory[SeqMap] = SeqMap
+}
+
+
+object SeqMap extends MapFactory[SeqMap] {
+ def empty[K, V]: SeqMap[K, V] = EmptySeqMap.asInstanceOf[SeqMap[K, V]]
+
+ def from[K, V](it: collection.IterableOnce[(K, V)]^): SeqMap[K, V] =
+ it match {
+ case sm: SeqMap[K, V] => sm
+ case _ => (newBuilder[K, V] ++= it).result()
+ }
+
+ def newBuilder[K, V]: Builder[(K, V), SeqMap[K, V]] = new SeqMapBuilderImpl
+
+ @SerialVersionUID(3L)
+ private object EmptySeqMap extends SeqMap[Any, Nothing] with Serializable {
+ override def size: Int = 0
+ override def knownSize: Int = 0
+ override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: Any) = false
+ def get(key: Any): Option[Nothing] = None
+ override def getOrElse [V1](key: Any, default: => V1): V1 = default
+ def iterator: Iterator[(Any, Nothing)] = Iterator.empty
+ def updated [V1] (key: Any, value: V1): SeqMap[Any, V1] = new SeqMap1(key, value)
+ def removed(key: Any): SeqMap[Any, Nothing] = this
+ }
+
+ @SerialVersionUID(3L)
+ private final class SeqMap1[K, +V](key1: K, value1: V) extends SeqMap[K,V] with Serializable {
+ override def size: Int = 1
+ override def knownSize: Int = 1
+ override def apply(key: K) = if (key == key1) value1 else throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: K) = key == key1
+ def get(key: K): Option[V] =
+ if (key == key1) Some(value1) else None
+ override def getOrElse [V1 >: V](key: K, default: => V1): V1 =
+ if (key == key1) value1 else default
+ def iterator = Iterator.single((key1, value1))
+ def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] =
+ if (key == key1) new SeqMap1(key1, value)
+ else new SeqMap2(key1, value1, key, value)
+ def removed(key: K): SeqMap[K, V] =
+ if (key == key1) SeqMap.empty else this
+ override def foreach[U](f: ((K, V)) => U): Unit = {
+ f((key1, value1))
+ }
+ override def foreachEntry[U](f: (K, V) => U): Unit = {
+ f(key1, value1)
+ }
+ }
+
+ @SerialVersionUID(3L)
+ private final class SeqMap2[K, +V](key1: K, value1: V, key2: K, value2: V) extends SeqMap[K,V] with Serializable {
+ override def size: Int = 2
+ override def knownSize: Int = 2
+ override def apply(key: K) =
+ if (key == key1) value1
+ else if (key == key2) value2
+ else throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: K) = (key == key1) || (key == key2)
+ def get(key: K): Option[V] =
+ if (key == key1) Some(value1)
+ else if (key == key2) Some(value2)
+ else None
+ override def getOrElse [V1 >: V](key: K, default: => V1): V1 =
+ if (key == key1) value1
+ else if (key == key2) value2
+ else default
+ def iterator = ((key1, value1) :: (key2, value2) :: Nil).iterator
+ def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] =
+ if (key == key1) new SeqMap2(key1, value, key2, value2)
+ else if (key == key2) new SeqMap2(key1, value1, key2, value)
+ else new SeqMap3(key1, value1, key2, value2, key, value)
+ def removed(key: K): SeqMap[K, V] =
+ if (key == key1) new SeqMap1(key2, value2)
+ else if (key == key2) new SeqMap1(key1, value1)
+ else this
+ override def foreach[U](f: ((K, V)) => U): Unit = {
+ f((key1, value1)); f((key2, value2))
+ }
+ override def foreachEntry[U](f: (K, V) => U): Unit = {
+ f(key1, value1)
+ f(key2, value2)
+ }
+ }
+
+ @SerialVersionUID(3L)
+ private class SeqMap3[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V) extends SeqMap[K,V] with Serializable {
+ override def size: Int = 3
+ override def knownSize: Int = 3
+ override def apply(key: K) =
+ if (key == key1) value1
+ else if (key == key2) value2
+ else if (key == key3) value3
+ else throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: K) = (key == key1) || (key == key2) || (key == key3)
+ def get(key: K): Option[V] =
+ if (key == key1) Some(value1)
+ else if (key == key2) Some(value2)
+ else if (key == key3) Some(value3)
+ else None
+ override def getOrElse [V1 >: V](key: K, default: => V1): V1 =
+ if (key == key1) value1
+ else if (key == key2) value2
+ else if (key == key3) value3
+ else default
+ def iterator = ((key1, value1) :: (key2, value2) :: (key3, value3) :: Nil).iterator
+ def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] =
+ if (key == key1) new SeqMap3(key1, value, key2, value2, key3, value3)
+ else if (key == key2) new SeqMap3(key1, value1, key2, value, key3, value3)
+ else if (key == key3) new SeqMap3(key1, value1, key2, value2, key3, value)
+ else new SeqMap4(key1, value1, key2, value2, key3, value3, key, value)
+ def removed(key: K): SeqMap[K, V] =
+ if (key == key1) new SeqMap2(key2, value2, key3, value3)
+ else if (key == key2) new SeqMap2(key1, value1, key3, value3)
+ else if (key == key3) new SeqMap2(key1, value1, key2, value2)
+ else this
+ override def foreach[U](f: ((K, V)) => U): Unit = {
+ f((key1, value1)); f((key2, value2)); f((key3, value3))
+ }
+ override def foreachEntry[U](f: (K, V) => U): Unit = {
+ f(key1, value1)
+ f(key2, value2)
+ f(key3, value3)
+ }
+ }
+
+ @SerialVersionUID(3L)
+ private final class SeqMap4[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V, key4: K, value4: V) extends SeqMap[K,V] with Serializable {
+ override def size: Int = 4
+ override def knownSize: Int = 4
+ override def apply(key: K) =
+ if (key == key1) value1
+ else if (key == key2) value2
+ else if (key == key3) value3
+ else if (key == key4) value4
+ else throw new NoSuchElementException("key not found: " + key)
+ override def contains(key: K) = (key == key1) || (key == key2) || (key == key3) || (key == key4)
+ def get(key: K): Option[V] =
+ if (key == key1) Some(value1)
+ else if (key == key2) Some(value2)
+ else if (key == key3) Some(value3)
+ else if (key == key4) Some(value4)
+ else None
+ override def getOrElse [V1 >: V](key: K, default: => V1): V1 =
+ if (key == key1) value1
+ else if (key == key2) value2
+ else if (key == key3) value3
+ else if (key == key4) value4
+ else default
+ def iterator = ((key1, value1) :: (key2, value2) :: (key3, value3) :: (key4, value4) :: Nil).iterator
+ def updated[V1 >: V](key: K, value: V1): SeqMap[K, V1] =
+ if (key == key1) new SeqMap4(key1, value, key2, value2, key3, value3, key4, value4)
+ else if (key == key2) new SeqMap4(key1, value1, key2, value, key3, value3, key4, value4)
+ else if (key == key3) new SeqMap4(key1, value1, key2, value2, key3, value, key4, value4)
+ else if (key == key4) new SeqMap4(key1, value1, key2, value2, key3, value3, key4, value)
+ else {
+ // Directly create the elements for performance reasons
+ val fields = Vector(key1, key2, key3, key4, key)
+ val underlying: Map[K, (Int, V1)] =
+ HashMap(
+ (key1, (0, value1)),
+ (key2, (1, value2)),
+ (key3, (2, value3)),
+ (key4, (3, value4)),
+ (key, (4, value))
+ )
+ new VectorMap(fields, underlying)
+ }
+ def removed(key: K): SeqMap[K, V] =
+ if (key == key1) new SeqMap3(key2, value2, key3, value3, key4, value4)
+ else if (key == key2) new SeqMap3(key1, value1, key3, value3, key4, value4)
+ else if (key == key3) new SeqMap3(key1, value1, key2, value2, key4, value4)
+ else if (key == key4) new SeqMap3(key1, value1, key2, value2, key3, value3)
+ else this
+ override def foreach[U](f: ((K, V)) => U): Unit = {
+ f((key1, value1)); f((key2, value2)); f((key3, value3)); f((key4, value4))
+ }
+ override def foreachEntry[U](f: (K, V) => U): Unit = {
+ f(key1, value1)
+ f(key2, value2)
+ f(key3, value3)
+ f(key4, value4)
+ }
+
+ private[SeqMap] def buildTo[V1 >: V](builder: Builder[(K, V1), SeqMap[K, V1]]): builder.type =
+ builder.addOne((key1, value1)).addOne((key2, value2)).addOne((key3, value3)).addOne((key4, value4))
+ }
+
+ private final class SeqMapBuilderImpl[K, V] extends ReusableBuilder[(K, V), SeqMap[K, V]] {
+ private[this] var elems: SeqMap[K, V] @uncheckedCaptures = SeqMap.empty
+ private[this] var switchedToVectorMapBuilder: Boolean = false
+ private[this] var vectorMapBuilder: VectorMapBuilder[K, V] @uncheckedCaptures = _
+
+ override def clear(): Unit = {
+ elems = SeqMap.empty
+ if (vectorMapBuilder != null) {
+ vectorMapBuilder.clear()
+ }
+ switchedToVectorMapBuilder = false
+ }
+
+ override def result(): SeqMap[K, V] =
+ if (switchedToVectorMapBuilder) vectorMapBuilder.result() else elems
+
+ def addOne(elem: (K, V)) = {
+ if (switchedToVectorMapBuilder) {
+ vectorMapBuilder.addOne(elem)
+ } else if (elems.size < 4) {
+ elems = elems + elem
+ } else {
+ // assert(elems.size == 4)
+ if (elems.contains(elem._1)) {
+ elems = elems + elem // will not increase the size of the map
+ } else {
+ switchedToVectorMapBuilder = true
+ if (vectorMapBuilder == null) {
+ vectorMapBuilder = new VectorMapBuilder
+ }
+ elems.asInstanceOf[SeqMap4[K, V]].buildTo(vectorMapBuilder)
+ vectorMapBuilder.addOne(elem)
+ }
+ }
+
+ this
+ }
+
+ override def addAll(xs: IterableOnce[(K, V)]^): this.type =
+ if (switchedToVectorMapBuilder) {
+ vectorMapBuilder.addAll(xs)
+ this
+ } else {
+ super.addAll(xs)
+ }
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/immutable/Set.scala b/tests/pos-special/stdlib/collection/immutable/Set.scala
new file mode 100644
index 000000000000..ac92f81b2013
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/immutable/Set.scala
@@ -0,0 +1,400 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package immutable
+
+import scala.collection.immutable.Set.Set4
+import scala.collection.mutable.{Builder, ReusableBuilder}
+import language.experimental.captureChecking
+import annotation.unchecked.uncheckedCaptures
+
+/** Base trait for immutable set collections */
+trait Set[A] extends Iterable[A]
+ with collection.Set[A]
+ with SetOps[A, Set, Set[A]]
+ with IterableFactoryDefaults[A, Set] {
+ override def iterableFactory: IterableFactory[Set] = Set
+}
+
+/** Base trait for immutable set operations
+ *
+ * @define coll immutable set
+ * @define Coll `immutable.Set`
+ */
+trait SetOps[A, +CC[X], +C <: SetOps[A, CC, C]]
+ extends collection.SetOps[A, CC, C] {
+
+ /** Creates a new set with an additional element, unless the element is
+ * already present.
+ *
+ * @param elem the element to be added
+ * @return a new set that contains all elements of this set and that also
+ * contains `elem`.
+ */
+ def incl(elem: A): C
+
+ /** Alias for `incl` */
+ override final def + (elem: A): C = incl(elem) // like in collection.Set but not deprecated
+
+ /** Creates a new set with a given element removed from this set.
+ *
+ * @param elem the element to be removed
+ * @return a new set that contains all elements of this set but that does not
+ * contain `elem`.
+ */
+ def excl(elem: A): C
+
+ /** Alias for `excl` */
+ @`inline` final override def - (elem: A): C = excl(elem)
+
+ def diff(that: collection.Set[A]): C =
+ foldLeft(empty)((result, elem) => if (that contains elem) result else result + elem)
+
+ /** Creates a new $coll from this $coll by removing all elements of another
+ * collection.
+ *
+ * @param that the collection containing the elements to remove.
+ * @return a new $coll with the given elements removed, omitting duplicates.
+ */
+ def removedAll(that: IterableOnce[A]): C = that.iterator.foldLeft[C](coll)(_ - _)
+
+ /** Alias for removedAll */
+ override final def -- (that: IterableOnce[A]): C = removedAll(that)
+}
+
+trait StrictOptimizedSetOps[A, +CC[X], +C <: SetOps[A, CC, C]]
+ extends SetOps[A, CC, C]
+ with collection.StrictOptimizedSetOps[A, CC, C]
+ with StrictOptimizedIterableOps[A, CC, C] {
+
+ override def concat(that: collection.IterableOnce[A]): C = {
+ var result: C = coll
+ val it = that.iterator
+ while (it.hasNext) result = result + it.next()
+ result
+ }
+}
+
+/**
+ * $factoryInfo
+ * @define coll immutable set
+ * @define Coll `immutable.Set`
+ */
+@SerialVersionUID(3L)
+object Set extends IterableFactory[Set] {
+
+ def empty[A]: Set[A] = EmptySet.asInstanceOf[Set[A]]
+
+ def from[E](it: collection.IterableOnce[E]^): Set[E] =
+ it match {
+ // We want `SortedSet` (and subclasses, such as `BitSet`) to
+ // rebuild themselves to avoid element type widening issues
+ case _: SortedSet[E] => (newBuilder[E] ++= it).result()
+ case _ if it.knownSize == 0 => empty[E]
+ case s: Set[E] => s
+ case _ => (newBuilder[E] ++= it).result()
+ }
+
+ def newBuilder[A]: Builder[A, Set[A]] = new SetBuilderImpl[A]
+
+ /** An optimized representation for immutable empty sets */
+ @SerialVersionUID(3L)
+ private object EmptySet extends AbstractSet[Any] with Serializable {
+ override def size: Int = 0
+ override def isEmpty = true
+ override def knownSize: Int = size
+ override def filter(pred: Any => Boolean): Set[Any] = this
+ override def filterNot(pred: Any => Boolean): Set[Any] = this
+ override def removedAll(that: IterableOnce[Any]): Set[Any] = this
+ override def diff(that: collection.Set[Any]): Set[Any] = this
+ override def subsetOf(that: collection.Set[Any]): Boolean = true
+ override def intersect(that: collection.Set[Any]): Set[Any] = this
+ override def view: View[Any] = View.empty
+ def contains(elem: Any): Boolean = false
+ def incl(elem: Any): Set[Any] = new Set1(elem)
+ def excl(elem: Any): Set[Any] = this
+ def iterator: Iterator[Any] = Iterator.empty
+ override def foreach[U](f: Any => U): Unit = ()
+ }
+ private[collection] def emptyInstance: Set[Any] = EmptySet
+
+ @SerialVersionUID(3L)
+ private abstract class SetNIterator[A](n: Int) extends AbstractIterator[A], Serializable, Pure {
+ private[this] var current = 0
+ private[this] var remainder = n
+ override def knownSize: Int = remainder
+ def hasNext = remainder > 0
+ def apply(i: Int): A
+ def next(): A =
+ if (hasNext) {
+ val r = apply(current)
+ current += 1
+ remainder -= 1
+ r
+ } else Iterator.empty.next()
+
+ override def drop(n: Int): Iterator[A] = {
+ if (n > 0) {
+ current += n
+ remainder = Math.max(0, remainder - n)
+ }
+ this
+ }
+ }
+
+ /** An optimized representation for immutable sets of size 1 */
+ @SerialVersionUID(3L)
+ final class Set1[A] private[collection] (elem1: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable {
+ override def size: Int = 1
+ override def isEmpty = false
+ override def knownSize: Int = size
+ def contains(elem: A): Boolean = elem == elem1
+ def incl(elem: A): Set[A] =
+ if (contains(elem)) this
+ else new Set2(elem1, elem)
+ def excl(elem: A): Set[A] =
+ if (elem == elem1) Set.empty
+ else this
+ def iterator: Iterator[A] = Iterator.single(elem1)
+ override def foreach[U](f: A => U): Unit = f(elem1)
+ override def exists(p: A => Boolean): Boolean = p(elem1)
+ override def forall(p: A => Boolean): Boolean = p(elem1)
+ override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] =
+ if (pred(elem1) != isFlipped) this else Set.empty
+
+ override def find(p: A => Boolean): Option[A] =
+ if (p(elem1)) Some(elem1)
+ else None
+ override def head: A = elem1
+ override def tail: Set[A] = Set.empty
+ }
+
+ /** An optimized representation for immutable sets of size 2 */
+ @SerialVersionUID(3L)
+ final class Set2[A] private[collection] (elem1: A, elem2: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable {
+ override def size: Int = 2
+ override def isEmpty = false
+ override def knownSize: Int = size
+ def contains(elem: A): Boolean = elem == elem1 || elem == elem2
+ def incl(elem: A): Set[A] =
+ if (contains(elem)) this
+ else new Set3(elem1, elem2, elem)
+ def excl(elem: A): Set[A] =
+ if (elem == elem1) new Set1(elem2)
+ else if (elem == elem2) new Set1(elem1)
+ else this
+ def iterator: Iterator[A] = new SetNIterator[A](size) {
+ def apply(i: Int) = getElem(i)
+ }
+ private def getElem(i: Int) = i match { case 0 => elem1 case 1 => elem2 }
+
+ override def foreach[U](f: A => U): Unit = {
+ f(elem1); f(elem2)
+ }
+ override def exists(p: A => Boolean): Boolean = {
+ p(elem1) || p(elem2)
+ }
+ override def forall(p: A => Boolean): Boolean = {
+ p(elem1) && p(elem2)
+ }
+ override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = {
+ var r1: A = null.asInstanceOf[A]
+ var n = 0
+ if (pred(elem1) != isFlipped) { r1 = elem1; n += 1}
+ if (pred(elem2) != isFlipped) { if (n == 0) r1 = elem2; n += 1}
+
+ n match {
+ case 0 => Set.empty
+ case 1 => new Set1(r1)
+ case 2 => this
+ }
+ }
+ override def find(p: A => Boolean): Option[A] = {
+ if (p(elem1)) Some(elem1)
+ else if (p(elem2)) Some(elem2)
+ else None
+ }
+ override def head: A = elem1
+ override def tail: Set[A] = new Set1(elem2)
+ }
+
+ /** An optimized representation for immutable sets of size 3 */
+ @SerialVersionUID(3L)
+ final class Set3[A] private[collection] (elem1: A, elem2: A, elem3: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable {
+ override def size: Int = 3
+ override def isEmpty = false
+ override def knownSize: Int = size
+ def contains(elem: A): Boolean =
+ elem == elem1 || elem == elem2 || elem == elem3
+ def incl(elem: A): Set[A] =
+ if (contains(elem)) this
+ else new Set4(elem1, elem2, elem3, elem)
+ def excl(elem: A): Set[A] =
+ if (elem == elem1) new Set2(elem2, elem3)
+ else if (elem == elem2) new Set2(elem1, elem3)
+ else if (elem == elem3) new Set2(elem1, elem2)
+ else this
+ def iterator: Iterator[A] = new SetNIterator[A](size) {
+ def apply(i: Int) = getElem(i)
+ }
+ private def getElem(i: Int) = i match { case 0 => elem1 case 1 => elem2 case 2 => elem3 }
+
+ override def foreach[U](f: A => U): Unit = {
+ f(elem1); f(elem2); f(elem3)
+ }
+ override def exists(p: A => Boolean): Boolean = {
+ p(elem1) || p(elem2) || p(elem3)
+ }
+ override def forall(p: A => Boolean): Boolean = {
+ p(elem1) && p(elem2) && p(elem3)
+ }
+ override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = {
+ var r1, r2: A = null.asInstanceOf[A]
+ var n = 0
+ if (pred(elem1) != isFlipped) { r1 = elem1; n += 1}
+ if (pred(elem2) != isFlipped) { if (n == 0) r1 = elem2 else r2 = elem2; n += 1}
+ if (pred(elem3) != isFlipped) { if (n == 0) r1 = elem3 else if (n == 1) r2 = elem3; n += 1}
+
+ n match {
+ case 0 => Set.empty
+ case 1 => new Set1(r1)
+ case 2 => new Set2(r1, r2)
+ case 3 => this
+ }
+ }
+ override def find(p: A => Boolean): Option[A] = {
+ if (p(elem1)) Some(elem1)
+ else if (p(elem2)) Some(elem2)
+ else if (p(elem3)) Some(elem3)
+ else None
+ }
+ override def head: A = elem1
+ override def tail: Set[A] = new Set2(elem2, elem3)
+ }
+
+ /** An optimized representation for immutable sets of size 4 */
+ @SerialVersionUID(3L)
+ final class Set4[A] private[collection] (elem1: A, elem2: A, elem3: A, elem4: A) extends AbstractSet[A] with StrictOptimizedIterableOps[A, Set, Set[A]] with Serializable {
+ override def size: Int = 4
+ override def isEmpty = false
+ override def knownSize: Int = size
+ def contains(elem: A): Boolean =
+ elem == elem1 || elem == elem2 || elem == elem3 || elem == elem4
+ def incl(elem: A): Set[A] =
+ if (contains(elem)) this
+ else HashSet.empty[A] + elem1 + elem2 + elem3 + elem4 + elem
+ def excl(elem: A): Set[A] =
+ if (elem == elem1) new Set3(elem2, elem3, elem4)
+ else if (elem == elem2) new Set3(elem1, elem3, elem4)
+ else if (elem == elem3) new Set3(elem1, elem2, elem4)
+ else if (elem == elem4) new Set3(elem1, elem2, elem3)
+ else this
+ def iterator: Iterator[A] = new SetNIterator[A](size) {
+ def apply(i: Int) = getElem(i)
+ }
+ private def getElem(i: Int) = i match { case 0 => elem1 case 1 => elem2 case 2 => elem3 case 3 => elem4 }
+
+ override def foreach[U](f: A => U): Unit = {
+ f(elem1); f(elem2); f(elem3); f(elem4)
+ }
+ override def exists(p: A => Boolean): Boolean = {
+ p(elem1) || p(elem2) || p(elem3) || p(elem4)
+ }
+ override def forall(p: A => Boolean): Boolean = {
+ p(elem1) && p(elem2) && p(elem3) && p(elem4)
+ }
+ override protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Set[A] = {
+ var r1, r2, r3: A = null.asInstanceOf[A]
+ var n = 0
+ if (pred(elem1) != isFlipped) { r1 = elem1; n += 1}
+ if (pred(elem2) != isFlipped) { if (n == 0) r1 = elem2 else r2 = elem2; n += 1}
+ if (pred(elem3) != isFlipped) { if (n == 0) r1 = elem3 else if (n == 1) r2 = elem3 else r3 = elem3; n += 1}
+ if (pred(elem4) != isFlipped) { if (n == 0) r1 = elem4 else if (n == 1) r2 = elem4 else if (n == 2) r3 = elem4; n += 1}
+
+ n match {
+ case 0 => Set.empty
+ case 1 => new Set1(r1)
+ case 2 => new Set2(r1, r2)
+ case 3 => new Set3(r1, r2, r3)
+ case 4 => this
+ }
+ }
+
+ override def find(p: A => Boolean): Option[A] = {
+ if (p(elem1)) Some(elem1)
+ else if (p(elem2)) Some(elem2)
+ else if (p(elem3)) Some(elem3)
+ else if (p(elem4)) Some(elem4)
+ else None
+ }
+ override def head: A = elem1
+ override def tail: Set[A] = new Set3(elem2, elem3, elem4)
+
+ private[immutable] def buildTo(builder: Builder[A, Set[A]]): builder.type =
+ builder.addOne(elem1).addOne(elem2).addOne(elem3).addOne(elem4)
+ }
+}
+
+/** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */
+abstract class AbstractSet[A] extends scala.collection.AbstractSet[A] with Set[A]
+
+/** Builder for Set.
+ * $multipleResults
+ */
+private final class SetBuilderImpl[A] extends ReusableBuilder[A, Set[A]] {
+ private[this] var elems: Set[A @uncheckedCaptures] = Set.empty
+ private[this] var switchedToHashSetBuilder: Boolean = false
+ private[this] var hashSetBuilder: HashSetBuilder[A @uncheckedCaptures] = _
+
+ override def clear(): Unit = {
+ elems = Set.empty
+ if (hashSetBuilder != null) {
+ hashSetBuilder.clear()
+ }
+ switchedToHashSetBuilder = false
+ }
+
+ override def result(): Set[A] =
+ if (switchedToHashSetBuilder) hashSetBuilder.result() else elems
+
+ def addOne(elem: A) = {
+ if (switchedToHashSetBuilder) {
+ hashSetBuilder.addOne(elem)
+ } else if (elems.size < 4) {
+ elems = elems + elem
+ } else {
+ // assert(elems.size == 4)
+ if (elems.contains(elem)) {
+ () // do nothing
+ } else {
+ switchedToHashSetBuilder = true
+ if (hashSetBuilder == null) {
+ hashSetBuilder = new HashSetBuilder
+ }
+ elems.asInstanceOf[Set4[A]].buildTo(hashSetBuilder)
+ hashSetBuilder.addOne(elem)
+ }
+ }
+
+ this
+ }
+
+ override def addAll(xs: IterableOnce[A]^): this.type =
+ if (switchedToHashSetBuilder) {
+ hashSetBuilder.addAll(xs)
+ this
+ } else {
+ super.addAll(xs)
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/immutable/SortedMap.scala b/tests/pos-special/stdlib/collection/immutable/SortedMap.scala
new file mode 100644
index 000000000000..9587502fd908
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/immutable/SortedMap.scala
@@ -0,0 +1,178 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package immutable
+
+import scala.annotation.unchecked.uncheckedVariance
+import scala.collection.mutable.Builder
+import language.experimental.captureChecking
+
+/** An immutable map whose key-value pairs are sorted according to an [[scala.math.Ordering]] on the keys.
+ *
+ * Allows for range queries to be performed on its keys, and implementations must guarantee that traversal happens in
+ * sorted order, according to the map's [[scala.math.Ordering]].
+ *
+ * @example {{{
+ * import scala.collection.immutable.SortedMap
+ *
+ * // Make a SortedMap via the companion object factory
+ * val weekdays = SortedMap(
+ * 2 -> "Monday",
+ * 3 -> "Tuesday",
+ * 4 -> "Wednesday",
+ * 5 -> "Thursday",
+ * 6 -> "Friday"
+ * )
+ * // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday)
+ *
+ * val days = weekdays ++ List(1 -> "Sunday", 7 -> "Saturday")
+ * // TreeMap(1 -> Sunday, 2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday, 7 -> Saturday)
+ *
+ * val day3 = days.get(3) // Some("Tuesday")
+ *
+ * val rangeOfDays = days.range(2, 5) // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday)
+ *
+ * val daysUntil2 = days.rangeUntil(2) // TreeMap(1 -> Sunday)
+ * val daysTo2 = days.rangeTo(2) // TreeMap(1 -> Sunday, 2 -> Monday)
+ * val daysAfter5 = days.rangeFrom(5) // TreeMap(5 -> Thursday, 6 -> Friday, 7 -> Saturday)
+ * }}}
+ *
+ * @tparam K the type of the keys contained in this tree map.
+ * @tparam V the type of the values associated with the keys.
+ */
+trait SortedMap[K, +V]
+ extends Map[K, V]
+ with collection.SortedMap[K, V]
+ with SortedMapOps[K, V, SortedMap, SortedMap[K, V]]
+ with SortedMapFactoryDefaults[K, V, SortedMap, Iterable, Map] {
+
+ override def unsorted: Map[K, V] = this
+
+ override def sortedMapFactory: SortedMapFactory[SortedMap] = SortedMap
+
+ /** The same map with a given default function.
+ * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc.
+ * are not affected by `withDefault`.
+ *
+ * Invoking transformer methods (e.g. `map`) will not preserve the default value.
+ *
+ * @param d the function mapping keys to values, used for non-present keys
+ * @return a wrapper of the map with a default value
+ */
+ override def withDefault[V1 >: V](d: K -> V1): SortedMap[K, V1] = new SortedMap.WithDefault[K, V1](this, d)
+
+ /** The same map with a given default value.
+ * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc.
+ * are not affected by `withDefaultValue`.
+ *
+ * Invoking transformer methods (e.g. `map`) will not preserve the default value.
+ *
+ * @param d default value used for non-present keys
+ * @return a wrapper of the map with a default value
+ */
+ override def withDefaultValue[V1 >: V](d: V1): SortedMap[K, V1] = new SortedMap.WithDefault[K, V1](this, _ => d)
+}
+
+trait SortedMapOps[K, +V, +CC[X, +Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]]
+ extends MapOps[K, V, Map, C] with collection.SortedMapOps[K, V, CC, C] { self =>
+
+ protected def coll: C with CC[K, V]
+
+ def unsorted: Map[K, V]
+
+ override def keySet: SortedSet[K] = new ImmutableKeySortedSet
+
+ /** The implementation class of the set returned by `keySet` */
+ protected class ImmutableKeySortedSet extends AbstractSet[K] with SortedSet[K] with GenKeySet with GenKeySortedSet {
+ def rangeImpl(from: Option[K], until: Option[K]): SortedSet[K] = {
+ val map = self.rangeImpl(from, until)
+ new map.ImmutableKeySortedSet
+ }
+ def incl(elem: K): SortedSet[K] = fromSpecific(this).incl(elem)
+ def excl(elem: K): SortedSet[K] = fromSpecific(this).excl(elem)
+ }
+
+ // We override these methods to fix their return type (which would be `Map` otherwise)
+ def updated[V1 >: V](key: K, value: V1): CC[K, V1]
+ @`inline` final override def +[V1 >: V](kv: (K, V1)): CC[K, V1] = updated(kv._1, kv._2)
+ override def updatedWith[V1 >: V](key: K)(remappingFunction: Option[V] => Option[V1]): CC[K, V1] = {
+ // Implementation has been copied from `MapOps`
+ val previousValue = this.get(key)
+ remappingFunction(previousValue) match {
+ case None => previousValue.fold(coll)(_ => this.removed(key).coll)
+ case Some(nextValue) =>
+ if (previousValue.exists(_.asInstanceOf[AnyRef] eq nextValue.asInstanceOf[AnyRef])) coll
+ else coll.updated(key, nextValue)
+ }
+ }
+ override def transform[W](f: (K, V) => W): CC[K, W] = map({ case (k, v) => (k, f(k, v)) })(ordering)
+}
+
+trait StrictOptimizedSortedMapOps[K, +V, +CC[X, +Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]]
+ extends SortedMapOps[K, V, CC, C]
+ with collection.StrictOptimizedSortedMapOps[K, V, CC, C]
+ with StrictOptimizedMapOps[K, V, Map, C] {
+
+ override def concat[V2 >: V](xs: collection.IterableOnce[(K, V2)]^): CC[K, V2] = {
+ var result: CC[K, V2] = coll
+ val it = xs.iterator
+ while (it.hasNext) result = result + it.next()
+ result
+ }
+}
+
+@SerialVersionUID(3L)
+object SortedMap extends SortedMapFactory.Delegate[SortedMap](TreeMap) {
+
+ override def from[K: Ordering, V](it: IterableOnce[(K, V)]^): SortedMap[K, V] = it match {
+ case sm: SortedMap[K, V] if Ordering[K] == sm.ordering => sm
+ case _ => super.from(it)
+ }
+
+ final class WithDefault[K, +V](underlying: SortedMap[K, V], defaultValue: K -> V)
+ extends Map.WithDefault[K, V](underlying, defaultValue)
+ with SortedMap[K, V]
+ with SortedMapOps[K, V, SortedMap, WithDefault[K, V]] with Serializable {
+
+ implicit def ordering: Ordering[K] = underlying.ordering
+
+ override def sortedMapFactory: SortedMapFactory[SortedMap] = underlying.sortedMapFactory
+
+ def iteratorFrom(start: K): scala.collection.Iterator[(K, V)] = underlying.iteratorFrom(start)
+
+ def keysIteratorFrom(start: K): scala.collection.Iterator[K] = underlying.keysIteratorFrom(start)
+
+ def rangeImpl(from: Option[K], until: Option[K]): WithDefault[K, V] =
+ new WithDefault[K, V](underlying.rangeImpl(from, until), defaultValue)
+
+ // Need to override following methods to match type signatures of `SortedMap.WithDefault`
+ // for operations preserving default value
+
+ override def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] =
+ new WithDefault[K, V1](underlying.updated(key, value), defaultValue)
+
+ override def concat [V2 >: V](xs: collection.IterableOnce[(K, V2)]^): WithDefault[K, V2] =
+ new WithDefault( underlying.concat(xs) , defaultValue)
+
+ override def removed(key: K): WithDefault[K, V] = new WithDefault[K, V](underlying.removed(key), defaultValue)
+
+ override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue)
+
+ override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V) @uncheckedVariance]^): WithDefault[K, V] =
+ new WithDefault[K, V](sortedMapFactory.from(coll), defaultValue)
+
+ override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] @uncheckedVariance =
+ SortedMap.newBuilder.mapResult((p: SortedMap[K, V]) => new WithDefault[K, V](p, defaultValue))
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/immutable/SortedSet.scala b/tests/pos-special/stdlib/collection/immutable/SortedSet.scala
new file mode 100644
index 000000000000..874abcaecda1
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/immutable/SortedSet.scala
@@ -0,0 +1,58 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package immutable
+import language.experimental.captureChecking
+
+/** Base trait for sorted sets */
+trait SortedSet[A]
+ extends Set[A]
+ with collection.SortedSet[A]
+ with SortedSetOps[A, SortedSet, SortedSet[A]]
+ with SortedSetFactoryDefaults[A, SortedSet, Set] {
+
+ override def unsorted: Set[A] = this
+
+ override def sortedIterableFactory: SortedIterableFactory[SortedSet] = SortedSet
+}
+
+/**
+ * @define coll immutable sorted set
+ * @define Coll `immutable.SortedSet`
+ */
+trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]]
+ extends SetOps[A, Set, C]
+ with collection.SortedSetOps[A, CC, C] {
+
+ def unsorted: Set[A]
+}
+
+trait StrictOptimizedSortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]]
+ extends SortedSetOps[A, CC, C]
+ with collection.StrictOptimizedSortedSetOps[A, CC, C]
+ with StrictOptimizedSetOps[A, Set, C] {
+}
+
+/**
+ * $factoryInfo
+ * @define coll immutable sorted set
+ * @define Coll `immutable.SortedSet`
+ */
+@SerialVersionUID(3L)
+object SortedSet extends SortedIterableFactory.Delegate[SortedSet](TreeSet) {
+ override def from[E: Ordering](it: IterableOnce[E]^): SortedSet[E] = it match {
+ case ss: SortedSet[E] if Ordering[E] == ss.ordering => ss
+ case _ => super.from(it)
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala b/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala
new file mode 100644
index 000000000000..b1e4622971fb
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/immutable/StrictOptimizedSeqOps.scala
@@ -0,0 +1,82 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package immutable
+import language.experimental.captureChecking
+import annotation.unchecked.uncheckedCaptures
+
+/**
+ * Trait that overrides operations to take advantage of strict builders.
+ */
+trait StrictOptimizedSeqOps[+A, +CC[_], +C]
+ extends Any
+ with SeqOps[A, CC, C]
+ with collection.StrictOptimizedSeqOps[A, CC, C]
+ with StrictOptimizedIterableOps[A, CC, C] {
+
+ override def distinctBy[B](f: A -> B): C = {
+ if (lengthCompare(1) <= 0) coll
+ else {
+ val builder = newSpecificBuilder
+ val seen = mutable.HashSet.empty[B @uncheckedCaptures]
+ val it = this.iterator
+ var different = false
+ while (it.hasNext) {
+ val next = it.next()
+ if (seen.add(f(next))) builder += next else different = true
+ }
+ if (different) builder.result() else coll
+ }
+ }
+
+ override def updated[B >: A](index: Int, elem: B): CC[B] = {
+ if (index < 0) throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${if (knownSize>=0) knownSize else "unknown"})")
+ val b = iterableFactory.newBuilder[B]
+ if (knownSize >= 0) {
+ b.sizeHint(size)
+ }
+ var i = 0
+ val it = iterator
+ while (i < index && it.hasNext) {
+ b += it.next()
+ i += 1
+ }
+ if (!it.hasNext) throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${i-1})")
+ b += elem
+ it.next()
+ while (it.hasNext) b += it.next()
+ b.result()
+ }
+
+ override def patch[B >: A](from: Int, other: IterableOnce[B]^, replaced: Int): CC[B] = {
+ val b = iterableFactory.newBuilder[B]
+ var i = 0
+ val it = iterator
+ while (i < from && it.hasNext) {
+ b += it.next()
+ i += 1
+ }
+ b ++= other
+ i = replaced
+ while (i > 0 && it.hasNext) {
+ it.next()
+ i -= 1
+ }
+ while (it.hasNext) b += it.next()
+ b.result()
+ }
+
+ override def sorted[B >: A](implicit ord: Ordering[B]): C = super.sorted(ord)
+
+}
diff --git a/tests/pos-special/stdlib/collection/immutable/TreeMap.scala b/tests/pos-special/stdlib/collection/immutable/TreeMap.scala
new file mode 100644
index 000000000000..ff01ad7806ec
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/immutable/TreeMap.scala
@@ -0,0 +1,372 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package immutable
+
+import scala.annotation.tailrec
+import scala.collection.Stepper.EfficientSplit
+import scala.collection.generic.DefaultSerializable
+import scala.collection.immutable.{RedBlackTree => RB}
+import scala.collection.mutable.ReusableBuilder
+import scala.runtime.AbstractFunction2
+import language.experimental.captureChecking
+import scala.annotation.unchecked.uncheckedCaptures
+
+/** An immutable SortedMap whose values are stored in a red-black tree.
+ *
+ * This class is optimal when range queries will be performed,
+ * or when traversal in order of an ordering is desired.
+ * If you only need key lookups, and don't care in which order key-values
+ * are traversed in, consider using * [[scala.collection.immutable.HashMap]],
+ * which will generally have better performance. If you need insertion order,
+ * consider a * [[scala.collection.immutable.SeqMap]], which does not need to
+ * have an ordering supplied.
+ *
+ * @example {{{
+ * import scala.collection.immutable.TreeMap
+ *
+ * // Make a TreeMap via the companion object factory
+ * val weekdays = TreeMap(
+ * 2 -> "Monday",
+ * 3 -> "Tuesday",
+ * 4 -> "Wednesday",
+ * 5 -> "Thursday",
+ * 6 -> "Friday"
+ * )
+ * // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday)
+ *
+ * val days = weekdays ++ List(1 -> "Sunday", 7 -> "Saturday")
+ * // TreeMap(1 -> Sunday, 2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday, 7 -> Saturday)
+ *
+ * val day3 = days.get(3) // Some("Tuesday")
+ *
+ * val rangeOfDays = days.range(2, 5) // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday)
+ *
+ * val daysUntil2 = days.rangeUntil(2) // TreeMap(1 -> Sunday)
+ * val daysTo2 = days.rangeTo(2) // TreeMap(1 -> Sunday, 2 -> Monday)
+ * val daysAfter5 = days.rangeFrom(5) // TreeMap(5 -> Thursday, 6 -> Friday, 7 -> Saturday)
+ * }}}
+ *
+ * @tparam K the type of the keys contained in this tree map.
+ * @tparam V the type of the values associated with the keys.
+ * @param ordering the implicit ordering used to compare objects of type `A`.
+ *
+ * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#red-black-trees "Scala's Collection Library overview"]]
+ * section on `Red-Black Trees` for more information.
+ *
+ * @define Coll immutable.TreeMap
+ * @define coll immutable tree map
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ */
+final class TreeMap[K, +V] private (private val tree: RB.Tree[K, V])(implicit val ordering: Ordering[K])
+ extends AbstractMap[K, V]
+ with SortedMap[K, V]
+ with StrictOptimizedSortedMapOps[K, V, TreeMap, TreeMap[K, V]]
+ with SortedMapFactoryDefaults[K, V, TreeMap, Iterable, Map]
+ with DefaultSerializable {
+
+ def this()(implicit ordering: Ordering[K]) = this(null)(ordering)
+ private[immutable] def tree0: RB.Tree[K, V] = tree
+
+ private[this] def newMapOrSelf[V1 >: V](t: RB.Tree[K, V1]): TreeMap[K, V1] = if(t eq tree) this else new TreeMap[K, V1](t)
+
+ override def sortedMapFactory: SortedMapFactory[TreeMap] = TreeMap
+
+ def iterator: Iterator[(K, V)] = RB.iterator(tree)
+
+ def keysIteratorFrom(start: K): Iterator[K] = RB.keysIterator(tree, Some(start))
+
+ override def keySet: TreeSet[K] = new TreeSet(tree)(ordering)
+
+ def iteratorFrom(start: K): Iterator[(K, V)] = RB.iterator(tree, Some(start))
+
+ override def valuesIteratorFrom(start: K): Iterator[V] = RB.valuesIterator(tree, Some(start))
+
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit =
+ shape.parUnbox(
+ scala.collection.convert.impl.AnyBinaryTreeStepper.from[(K, V), RB.Tree[K, V]](
+ size, tree, _.left, _.right, x => (x.key, x.value)
+ )
+ )
+
+ override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = {
+ import scala.collection.convert.impl._
+ type T = RB.Tree[K, V]
+ val s = shape.shape match {
+ case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Int])
+ case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Long])
+ case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree, _.left, _.right, _.key.asInstanceOf[Double])
+ case _ => shape.parUnbox(AnyBinaryTreeStepper.from[K, T](size, tree, _.left, _.right, _.key))
+ }
+ s.asInstanceOf[S with EfficientSplit]
+ }
+
+ override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = {
+ import scala.collection.convert.impl._
+ type T = RB.Tree[K, V]
+ val s = shape.shape match {
+ case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.value.asInstanceOf[Int])
+ case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.value.asInstanceOf[Long])
+ case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.value.asInstanceOf[Double])
+ case _ => shape.parUnbox(AnyBinaryTreeStepper.from[V, T] (size, tree, _.left, _.right, _.value.asInstanceOf[V]))
+ }
+ s.asInstanceOf[S with EfficientSplit]
+ }
+
+ def get(key: K): Option[V] = RB.get(tree, key)
+ override def getOrElse[V1 >: V](key: K, default: => V1): V1 = {
+ val resultOrNull = RB.lookup(tree, key)
+ if (resultOrNull eq null) default
+ else resultOrNull.value
+ }
+
+ def removed(key: K): TreeMap[K,V] =
+ newMapOrSelf(RB.delete(tree, key))
+
+ def updated[V1 >: V](key: K, value: V1): TreeMap[K, V1] =
+ newMapOrSelf(RB.update(tree, key, value, overwrite = true))
+
+ override def concat[V1 >: V](that: collection.IterableOnce[(K, V1)]^): TreeMap[K, V1] =
+ newMapOrSelf(that match {
+ case tm: TreeMap[K, V] @unchecked if ordering == tm.ordering =>
+ RB.union(tree, tm.tree)
+ case ls: LinearSeq[(K,V1)] =>
+ if (ls.isEmpty) tree //to avoid the creation of the adder
+ else {
+ val adder = new Adder[V1]
+ adder.addAll(ls)
+ adder.finalTree
+ }
+ case _ =>
+ val adder = new Adder[V1]
+ val it = that.iterator
+ while (it.hasNext) {
+ adder.apply(it.next())
+ }
+ adder.finalTree
+ })
+
+ override def removedAll(keys: IterableOnce[K]^): TreeMap[K, V] = keys match {
+ case ts: TreeSet[K] if ordering == ts.ordering =>
+ newMapOrSelf(RB.difference(tree, ts.tree))
+ case _ => super.removedAll(keys)
+ }
+
+ /** A new TreeMap with the entry added is returned,
+ * assuming that key is not in the TreeMap.
+ *
+ * @tparam V1 type of the values of the new bindings, a supertype of `V`
+ * @param key the key to be inserted
+ * @param value the value to be associated with `key`
+ * @return a new $coll with the inserted binding, if it wasn't present in the map
+ */
+ @deprecated("Use `updated` instead", "2.13.0")
+ def insert[V1 >: V](key: K, value: V1): TreeMap[K, V1] = {
+ assert(!RB.contains(tree, key))
+ updated(key, value)
+ }
+
+ def rangeImpl(from: Option[K], until: Option[K]): TreeMap[K, V] = newMapOrSelf(RB.rangeImpl(tree, from, until))
+
+ override def minAfter(key: K): Option[(K, V)] = RB.minAfter(tree, key) match {
+ case null => Option.empty
+ case x => Some((x.key, x.value))
+ }
+
+ override def maxBefore(key: K): Option[(K, V)] = RB.maxBefore(tree, key) match {
+ case null => Option.empty
+ case x => Some((x.key, x.value))
+ }
+
+ override def range(from: K, until: K): TreeMap[K,V] = newMapOrSelf(RB.range(tree, from, until))
+
+ override def foreach[U](f: ((K, V)) => U): Unit = RB.foreach(tree, f)
+ override def foreachEntry[U](f: (K, V) => U): Unit = RB.foreachEntry(tree, f)
+ override def size: Int = RB.count(tree)
+ override def knownSize: Int = size
+
+ override def isEmpty = size == 0
+
+ override def firstKey: K = RB.smallest(tree).key
+
+ override def lastKey: K = RB.greatest(tree).key
+
+ override def head: (K, V) = {
+ val smallest = RB.smallest(tree)
+ (smallest.key, smallest.value)
+ }
+
+ override def last: (K, V) = {
+ val greatest = RB.greatest(tree)
+ (greatest.key, greatest.value)
+ }
+
+ override def tail: TreeMap[K, V] = new TreeMap(RB.tail(tree))
+
+ override def init: TreeMap[K, V] = new TreeMap(RB.init(tree))
+
+ override def drop(n: Int): TreeMap[K, V] = {
+ if (n <= 0) this
+ else if (n >= size) empty
+ else new TreeMap(RB.drop(tree, n))
+ }
+
+ override def take(n: Int): TreeMap[K, V] = {
+ if (n <= 0) empty
+ else if (n >= size) this
+ else new TreeMap(RB.take(tree, n))
+ }
+
+ override def slice(from: Int, until: Int) = {
+ if (until <= from) empty
+ else if (from <= 0) take(until)
+ else if (until >= size) drop(from)
+ else new TreeMap(RB.slice(tree, from, until))
+ }
+
+ override def dropRight(n: Int): TreeMap[K, V] = take(size - math.max(n, 0))
+
+ override def takeRight(n: Int): TreeMap[K, V] = drop(size - math.max(n, 0))
+
+ private[this] def countWhile(p: ((K, V)) => Boolean): Int = {
+ var result = 0
+ val it = iterator
+ while (it.hasNext && p(it.next())) result += 1
+ result
+ }
+
+ override def dropWhile(p: ((K, V)) => Boolean): TreeMap[K, V] = drop(countWhile(p))
+
+ override def takeWhile(p: ((K, V)) => Boolean): TreeMap[K, V] = take(countWhile(p))
+
+ override def span(p: ((K, V)) => Boolean): (TreeMap[K, V], TreeMap[K, V]) = splitAt(countWhile(p))
+
+ override def filter(f: ((K, V)) => Boolean): TreeMap[K, V] =
+ newMapOrSelf(RB.filterEntries[K, V](tree, (k, v) => f((k, v))))
+
+ override def partition(p: ((K, V)) => Boolean): (TreeMap[K, V], TreeMap[K, V]) = {
+ val (l, r) = RB.partitionEntries[K, V](tree, (k, v) => p((k, v)))
+ (newMapOrSelf(l), newMapOrSelf(r))
+ }
+
+ override def transform[W](f: (K, V) => W): TreeMap[K, W] = {
+ val t2 = RB.transform[K, V, W](tree, f)
+ if(t2 eq tree) this.asInstanceOf[TreeMap[K, W]]
+ else new TreeMap(t2)
+ }
+
+ private final class Adder[B1 >: V]
+ extends RB.MapHelper[K, B1] with Function1[(K, B1), Unit] {
+ private var currentMutableTree: RB.Tree[K,B1] @uncheckedCaptures = tree0
+ def finalTree = beforePublish(currentMutableTree)
+ override def apply(kv: (K, B1)): Unit = {
+ currentMutableTree = mutableUpd(currentMutableTree, kv._1, kv._2)
+ }
+ @tailrec def addAll(ls: LinearSeq[(K, B1)]): Unit = {
+ if (!ls.isEmpty) {
+ val kv = ls.head
+ currentMutableTree = mutableUpd(currentMutableTree, kv._1, kv._2)
+ addAll(ls.tail)
+ }
+ }
+ }
+ override def equals(obj: Any): Boolean = obj match {
+ case that: TreeMap[K @unchecked, _] if ordering == that.ordering => RB.entriesEqual(tree, that.tree)
+ case _ => super.equals(obj)
+ }
+
+ override protected[this] def className = "TreeMap"
+}
+
+/** $factoryInfo
+ * @define Coll immutable.TreeMap
+ * @define coll immutable tree map
+ */
+@SerialVersionUID(3L)
+object TreeMap extends SortedMapFactory[TreeMap] {
+
+ def empty[K : Ordering, V]: TreeMap[K, V] = new TreeMap()
+
+ def from[K, V](it: IterableOnce[(K, V)]^)(implicit ordering: Ordering[K]): TreeMap[K, V] =
+ it match {
+ case tm: TreeMap[K, V] if ordering == tm.ordering => tm
+ case sm: scala.collection.SortedMap[K, V] if ordering == sm.ordering =>
+ new TreeMap[K, V](RB.fromOrderedEntries(sm.iterator, sm.size))
+ case _ =>
+ var t: RB.Tree[K, V] = null
+ val i = it.iterator
+ while (i.hasNext) {
+ val (k, v) = i.next()
+ t = RB.update(t, k, v, overwrite = true)
+ }
+ new TreeMap[K, V](t)
+ }
+
+ def newBuilder[K, V](implicit ordering: Ordering[K]): ReusableBuilder[(K, V), TreeMap[K, V]] = new TreeMapBuilder[K, V]
+
+ private class TreeMapBuilder[K, V](implicit ordering: Ordering[K])
+ extends RB.MapHelper[K, V]
+ with ReusableBuilder[(K, V), TreeMap[K, V]] {
+ type Tree = RB.Tree[K, V]
+ private var tree:Tree @uncheckedCaptures = null
+
+ def addOne(elem: (K, V)): this.type = {
+ tree = mutableUpd(tree, elem._1, elem._2)
+ this
+ }
+ private object adder extends AbstractFunction2[K, V, Unit] {
+ // we cache tree to avoid the outer access to tree
+ // in the hot path (apply)
+ private[this] var accumulator: Tree @uncheckedCaptures = null
+ def addForEach(hasForEach: collection.Map[K, V]): Unit = {
+ accumulator = tree
+ hasForEach.foreachEntry(this)
+ tree = accumulator
+ // be friendly to GC
+ accumulator = null
+ }
+
+ override def apply(key: K, value: V): Unit = {
+ accumulator = mutableUpd(accumulator, key, value)
+ }
+ }
+
+ override def addAll(xs: IterableOnce[(K, V)]^): this.type = {
+ xs match {
+ // TODO consider writing a mutable-safe union for TreeSet/TreeMap builder ++=
+ // for the moment we have to force immutability before the union
+ // which will waste some time and space
+ // calling `beforePublish` makes `tree` immutable
+ case ts: TreeMap[K, V] if ts.ordering == ordering =>
+ if (tree eq null) tree = ts.tree0
+ else tree = RB.union(beforePublish(tree), ts.tree0)
+ case that: collection.Map[K, V] =>
+ //add avoiding creation of tuples
+ adder.addForEach(that)
+ case _ =>
+ super.addAll(xs)
+ }
+ this
+ }
+
+ override def clear(): Unit = {
+ tree = null
+ }
+
+ override def result(): TreeMap[K, V] = new TreeMap[K, V](beforePublish(tree))
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala b/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala
new file mode 100644
index 000000000000..91233669e5ca
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/immutable/TreeSeqMap.scala
@@ -0,0 +1,651 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package immutable
+
+import scala.annotation.tailrec
+import language.experimental.captureChecking
+import scala.annotation.unchecked.uncheckedCaptures
+
+/** This class implements an immutable map that preserves order using
+ * a hash map for the key to value mapping to provide efficient lookup,
+ * and a tree for the ordering of the keys to provide efficient
+ * insertion/modification order traversal and destructuring.
+ *
+ * By default insertion order (`TreeSeqMap.OrderBy.Insertion`)
+ * is used, but modification order (`TreeSeqMap.OrderBy.Modification`)
+ * can be used instead if so specified at creation.
+ *
+ * The `orderingBy(orderBy: TreeSeqMap.OrderBy): TreeSeqMap[K, V]` method
+ * can be used to switch to the specified ordering for the returned map.
+ *
+ * A key can be manually refreshed (i.e. placed at the end) via the
+ * `refresh(key: K): TreeSeqMap[K, V]` method (regardless of the ordering in
+ * use).
+ *
+ * Internally, an ordinal counter is increased for each insertion/modification
+ * and then the current ordinal is used as key in the tree map. After 2^32^
+ * insertions/modifications the entire map is copied (thus resetting the ordinal
+ * counter).
+ *
+ * @tparam K the type of the keys contained in this map.
+ * @tparam V the type of the values associated with the keys in this map.
+ * @define coll immutable tree seq map
+ * @define Coll `immutable.TreeSeqMap`
+ */
+final class TreeSeqMap[K, +V] private (
+ private val ordering: TreeSeqMap.Ordering[K],
+ private val mapping: TreeSeqMap.Mapping[K, V],
+ private val ordinal: Int,
+ val orderedBy: TreeSeqMap.OrderBy)
+ extends AbstractMap[K, V]
+ with SeqMap[K, V]
+ with MapOps[K, V, TreeSeqMap, TreeSeqMap[K, V]]
+ with StrictOptimizedIterableOps[(K, V), Iterable, TreeSeqMap[K, V]]
+ with StrictOptimizedMapOps[K, V, TreeSeqMap, TreeSeqMap[K, V]]
+ with MapFactoryDefaults[K, V, TreeSeqMap, Iterable] {
+
+ import TreeSeqMap._
+
+ override protected[this] def className: String = "TreeSeqMap"
+
+ override def mapFactory: MapFactory[TreeSeqMap] = TreeSeqMap
+
+ override val size = mapping.size
+
+ override def knownSize: Int = size
+
+ override def isEmpty = size == 0
+
+ /*
+ // This should have been overridden in 2.13.0 but wasn't so it will have to wait since it is not forwards compatible
+ // Now handled in inherited method from scala.collection.MapFactoryDefaults instead.
+ override def empty = TreeSeqMap.empty[K, V](orderedBy)
+ */
+
+ def orderingBy(orderBy: OrderBy): TreeSeqMap[K, V] = {
+ if (orderBy == this.orderedBy) this
+ else if (isEmpty) TreeSeqMap.empty(orderBy)
+ else new TreeSeqMap(ordering, mapping, ordinal, orderBy)
+ }
+
+ def updated[V1 >: V](key: K, value: V1): TreeSeqMap[K, V1] = {
+ mapping.get(key) match {
+ case e if ordinal == -1 && (orderedBy == OrderBy.Modification || e.isEmpty) =>
+ // Reinsert into fresh instance to restart ordinal counting, expensive but only done after 2^32 updates.
+ TreeSeqMap.empty[K, V1](orderedBy) ++ this + (key -> value)
+ case Some((o, _)) if orderedBy == OrderBy.Insertion =>
+ new TreeSeqMap(
+ ordering.include(o, key),
+ mapping.updated[(Int, V1)](key, (o, value)),
+ ordinal, // Do not increment the ordinal since the key is already present, i.e. o <= ordinal.
+ orderedBy)
+ case Some((o, _)) =>
+ val o1 = increment(ordinal)
+ new TreeSeqMap(
+ ordering.exclude(o).append(o1, key),
+ mapping.updated[(Int, V1)](key, (o1, value)),
+ o1,
+ orderedBy)
+ case None =>
+ val o1 = increment(ordinal)
+ new TreeSeqMap(
+ ordering.append(o1, key),
+ mapping.updated[(Int, V1)](key, (o1, value)),
+ o1,
+ orderedBy)
+ }
+ }
+
+ def removed(key: K): TreeSeqMap[K, V] = {
+ mapping.get(key) match {
+ case Some((o, _)) =>
+ new TreeSeqMap(
+ ordering.exclude(o),
+ mapping.removed(key),
+ ordinal,
+ orderedBy)
+ case None =>
+ this
+ }
+ }
+
+ def refresh(key: K): TreeSeqMap[K, V] = {
+ mapping.get(key) match {
+ case Some((o, _)) =>
+ val o1 = increment(ordinal)
+ new TreeSeqMap(
+ ordering.exclude(o).append(o1, key),
+ mapping,
+ o1,
+ orderedBy)
+ case None =>
+ this
+ }
+ }
+
+ def get(key: K): Option[V] = mapping.get(key).map(value)
+
+ def iterator: Iterator[(K, V)] = new AbstractIterator[(K, V)] {
+ private[this] val iter = ordering.iterator
+
+ override def hasNext: Boolean = iter.hasNext
+
+ override def next(): (K, V) = binding(iter.next())
+ }
+
+ override def keysIterator: Iterator[K] = new AbstractIterator[K] {
+ private[this] val iter = ordering.iterator
+
+ override def hasNext: Boolean = iter.hasNext
+
+ override def next(): K = iter.next()
+ }
+
+ override def valuesIterator: Iterator[V] = new AbstractIterator[V] {
+ private[this] val iter = ordering.iterator
+
+ override def hasNext: Boolean = iter.hasNext
+
+ override def next(): V = value(binding(iter.next()))
+ }
+
+ override def contains(key: K): Boolean = mapping.contains(key)
+
+ override def head: (K, V) = binding(ordering.head)
+
+ override def headOption = ordering.headOption.map(binding)
+
+ override def last: (K, V) = binding(ordering.last)
+
+ override def lastOption: Option[(K, V)] = ordering.lastOption.map(binding)
+
+ override def tail: TreeSeqMap[K, V] = {
+ val (head, tail) = ordering.headTail
+ new TreeSeqMap(tail, mapping.removed(head), ordinal, orderedBy)
+ }
+
+ override def init: TreeSeqMap[K, V] = {
+ val (init, last) = ordering.initLast
+ new TreeSeqMap(init, mapping.removed(last), ordinal, orderedBy)
+ }
+
+ override def slice(from: Int, until: Int): TreeSeqMap[K, V] = {
+ val sz = size
+ if (sz == 0 || from >= until) TreeSeqMap.empty[K, V](orderedBy)
+ else {
+ val sz = size
+ val f = if (from >= 0) from else 0
+ val u = if (until <= sz) until else sz
+ val l = u - f
+ if (l <= 0) TreeSeqMap.empty[K, V](orderedBy)
+ else if (l > sz / 2) {
+ // Remove front and rear incrementally if majority of elements are to be kept
+ val (front, rest) = ordering.splitAt(f)
+ val (ong, rear) = rest.splitAt(l)
+ var mng = this.mapping
+ val frontIter = front.iterator
+ while (frontIter.hasNext) {
+ mng = mng - frontIter.next()
+ }
+ val rearIter = rear.iterator
+ while (rearIter.hasNext) {
+ mng = mng - rearIter.next()
+ }
+ new TreeSeqMap(ong, mng, ordinal, orderedBy)
+ } else {
+ // Populate with builder otherwise
+ val bdr = newBuilder[K @uncheckedCaptures, V @uncheckedCaptures](orderedBy)
+ val iter = ordering.iterator
+ var i = 0
+ while (i < f) {
+ iter.next()
+ i += 1
+ }
+ while (i < u) {
+ val k = iter.next()
+ bdr.addOne((k, mapping(k)._2))
+ i += 1
+ }
+ bdr.result()
+ }
+ }
+ }
+
+ override def map[K2, V2](f: ((K, V)) => (K2, V2)): TreeSeqMap[K2, V2] = {
+ val bdr = newBuilder[K2 @uncheckedCaptures, V2 @uncheckedCaptures](orderedBy)
+ val iter = ordering.iterator
+ while (iter.hasNext) {
+ val k = iter.next()
+ val (_, v) = mapping(k)
+ val (k2, v2) = f((k, v))
+ bdr.addOne((k2, v2))
+ }
+ bdr.result()
+ }
+
+ override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]^): TreeSeqMap[K2, V2] = {
+ val bdr = newBuilder[K2 @uncheckedCaptures, V2 @uncheckedCaptures](orderedBy)
+ val iter = ordering.iterator
+ while (iter.hasNext) {
+ val k = iter.next()
+ val (_, v) = mapping(k)
+ val jter = f((k, v)).iterator
+ while (jter.hasNext) {
+ val (k2, v2) = jter.next()
+ bdr.addOne((k2, v2))
+ }
+ }
+ bdr.result()
+ }
+
+ override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]): TreeSeqMap[K2, V2] = {
+ val bdr = newBuilder[K2 @uncheckedCaptures, V2 @uncheckedCaptures](orderedBy)
+ val iter = ordering.iterator
+ while (iter.hasNext) {
+ val k = iter.next()
+ val (_, v) = mapping(k)
+ pf.runWith({ case (k2, v2) => bdr.addOne((k2, v2)) })((k, v))
+ }
+ bdr.result()
+ }
+
+ override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): TreeSeqMap[K, V2] = {
+ var ong: Ordering[K] = ordering
+ var mng: Mapping[K, V2] = mapping
+ var ord = increment(ordinal)
+ val iter = suffix.iterator
+ while (iter.hasNext) {
+ val (k, v2) = iter.next()
+ mng.get(k) match {
+ case Some((o, v)) =>
+ if (orderedBy == OrderBy.Insertion && v != v2) mng = mng.updated(k, (o, v2))
+ else if (orderedBy == OrderBy.Modification) {
+ mng = mng.updated(k, (ord, v2))
+ ong = ong.exclude(o).append(ord, k)
+ ord = increment(ord)
+ }
+ case None =>
+ mng = mng.updated(k, (ord, v2))
+ ong = ong.append(ord, k)
+ ord = increment(ord)
+ }
+ }
+ new TreeSeqMap[K, V2](ong, mng, ord, orderedBy)
+ }
+
+ @`inline` private[this] def value(p: (_, V)) = p._2
+ @`inline` private[this] def binding(k: K) = mapping(k).copy(_1 = k)
+}
+object TreeSeqMap extends MapFactory[TreeSeqMap] {
+ sealed trait OrderBy
+ object OrderBy {
+ case object Insertion extends OrderBy
+ case object Modification extends OrderBy
+ }
+
+ private val EmptyByInsertion = new TreeSeqMap[Nothing, Nothing](Ordering.empty, HashMap.empty, 0, OrderBy.Insertion)
+ private val EmptyByModification = new TreeSeqMap[Nothing, Nothing](Ordering.empty, HashMap.empty, 0, OrderBy.Modification)
+ val Empty = EmptyByInsertion
+ def empty[K, V]: TreeSeqMap[K, V] = empty(OrderBy.Insertion)
+ def empty[K, V](orderBy: OrderBy): TreeSeqMap[K, V] = {
+ if (orderBy == OrderBy.Modification) EmptyByModification
+ else EmptyByInsertion
+ }.asInstanceOf[TreeSeqMap[K, V]]
+
+ def from[sealed K, sealed V](it: collection.IterableOnce[(K, V)]^): TreeSeqMap[K, V] =
+ it match {
+ case om: TreeSeqMap[K, V] => om
+ case _ => (newBuilder[K, V] ++= it).result()
+ }
+
+ @inline private def increment(ord: Int) = if (ord == Int.MaxValue) Int.MinValue else ord + 1
+
+ def newBuilder[sealed K, sealed V]: mutable.Builder[(K, V), TreeSeqMap[K, V]] = newBuilder(OrderBy.Insertion)
+ def newBuilder[sealed K, sealed V](orderedBy: OrderBy): mutable.Builder[(K, V), TreeSeqMap[K, V]] = new Builder[K, V](orderedBy)
+
+ final class Builder[sealed K, sealed V](orderedBy: OrderBy) extends mutable.Builder[(K, V), TreeSeqMap[K, V]] {
+ private[this] val bdr = new MapBuilderImpl[K, (Int, V)]
+ private[this] var ong = Ordering.empty[K]
+ private[this] var ord = 0
+ private[this] var aliased: TreeSeqMap[K, V] = _
+
+ override def addOne(elem: (K, V)): this.type = addOne(elem._1, elem._2)
+ def addOne(key: K, value: V): this.type = {
+ if (aliased ne null) {
+ aliased = aliased.updated(key, value)
+ } else {
+ bdr.getOrElse(key, null) match {
+ case (o, v) =>
+ if (orderedBy == OrderBy.Insertion && v != value) bdr.addOne(key, (o, value))
+ else if (orderedBy == OrderBy.Modification) {
+ bdr.addOne(key, (ord, value))
+ ong = ong.exclude(o).appendInPlace(ord, key)
+ ord = increment(ord)
+ }
+ case null =>
+ bdr.addOne(key, (ord, value))
+ ong = ong.appendInPlace(ord, key)
+ ord = increment(ord)
+ }
+ }
+ this
+ }
+
+ override def clear(): Unit = {
+ ong = Ordering.empty
+ ord = 0
+ bdr.clear()
+ aliased = null
+ }
+
+ override def result(): TreeSeqMap[K, V] = {
+ if (aliased eq null) {
+ aliased = new TreeSeqMap(ong, bdr.result(), ord, orderedBy)
+ }
+ aliased
+ }
+ }
+
+ private type Mapping[K, +V] = Map[K, (Int, V)]
+ @annotation.unused
+ private val Mapping = Map
+
+ /* The ordering implementation below is an adapted version of immutable.IntMap. */
+ private[immutable] object Ordering {
+ import scala.collection.generic.BitOperations.Int._
+
+ @inline private[immutable] def toBinaryString(i: Int): String = s"$i/${i.toBinaryString}"
+
+ def empty[T] : Ordering[T] = Zero
+
+ def apply[T](elems: (Int, T)*): Ordering[T] =
+ elems.foldLeft(empty[T])((x, y) => x.include(y._1, y._2))
+
+ // Iterator over a non-empty Ordering.
+ final class Iterator[+V](it: Ordering[V]) {
+ // Basically this uses a simple stack to emulate conversion over the tree. However
+ // because we know that Ints are at least 32 bits we can have at most 32 Bins and
+ // one Tip sitting on the tree at any point. Therefore we know the maximum stack
+ // depth is 33
+ private[this] var index = 0
+ private[this] val buffer = new Array[AnyRef](33)
+
+ private[this] def pop = {
+ index -= 1
+ buffer(index).asInstanceOf[Ordering[V]]
+ }
+
+ private[this] def push[V2 >: V](x: Ordering[V2]): Unit = {
+ buffer(index) = x.asInstanceOf[AnyRef]
+ index += 1
+ }
+
+ if (it != Zero) push(it)
+
+ def hasNext = index != 0
+ @tailrec
+ def next(): V =
+ pop match {
+ case Bin(_,_, Tip(_, v), right) =>
+ push(right)
+ v
+ case Bin(_, _, left, right) =>
+ push(right)
+ push(left)
+ next()
+ case Tip(_, v) => v
+ // This should never happen. We don't allow Ordering.Zero in subtrees of the Ordering
+ // and don't return an Ordering.Iterator for Ordering.Zero.
+ case Zero => throw new IllegalStateException("empty subtree not allowed")
+ }
+ }
+
+ object Iterator {
+ val Empty = new Iterator[Nothing](Ordering.empty[Nothing])
+ def empty[V]: Iterator[V] = Empty.asInstanceOf[Iterator[V]]
+ }
+
+ case object Zero extends Ordering[Nothing] {
+ // Important! Without this equals method in place, an infinite
+ // loop from Map.equals => size => pattern-match-on-Nil => equals
+ // develops. Case objects and custom equality don't mix without
+ // careful handling.
+ override def equals(that : Any): Boolean = that match {
+ case _: this.type => true
+ case _: Ordering[_] => false // The only empty Orderings are eq Nil
+ case _ => super.equals(that)
+ }
+ protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit = sb ++= s"${prefix}Ø"
+ }
+
+ final case class Tip[+T](ord: Int, value: T) extends Ordering[T] {
+ def withValue[S](s: S) =
+ if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[Tip[S]]
+ else Tip(ord, s)
+ protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit = sb ++= s"${prefix}Tip(${toBinaryString(ord)} -> $value)\n"
+ }
+
+ final case class Bin[+T](prefix: Int, mask: Int, left: Ordering[T], var right: Ordering[T @uncheckedCaptures] @scala.annotation.unchecked.uncheckedVariance) extends Ordering[T] {
+ def bin[S](left: Ordering[S], right: Ordering[S]): Ordering[S] = {
+ if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[Bin[S]]
+ else Bin[S](prefix, mask, left, right)
+ }
+ protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit = {
+ sb ++= s"${prefix}Bin(${toBinaryString(this.prefix)}:${toBinaryString(mask)})\n"
+ left.format(sb, subPrefix + "├── ", subPrefix + "│ ")
+ right.format(sb, subPrefix + "└── ", subPrefix + " ")
+ }
+ }
+
+ private def branchMask(i: Int, j: Int) = highestOneBit(i ^ j)
+
+ private def join[T](p1: Int, t1: Ordering[T], p2: Int, t2: Ordering[T]): Ordering[T] = {
+ val m = branchMask(p1, p2)
+ val p = mask(p1, m)
+ if (zero(p1, m)) Bin(p, m, t1, t2)
+ else Bin(p, m, t2, t1)
+ }
+
+ private def bin[T](prefix: Int, mask: Int, left: Ordering[T], right: Ordering[T]): Ordering[T] = (left, right) match {
+ case (l, Zero) => l
+ case (Zero, r) => r
+ case (l, r) => Bin(prefix, mask, l, r)
+ }
+ }
+
+ sealed abstract class Ordering[+T] {
+ import Ordering._
+ import scala.annotation.tailrec
+ import scala.collection.generic.BitOperations.Int._
+
+ override final def toString: String = format
+ final def format: String = {
+ val sb = new StringBuilder
+ format(sb, "", "")
+ sb.toString()
+ }
+ protected def format(sb: StringBuilder, prefix: String, subPrefix: String): Unit
+
+ @tailrec
+ final def head: T = this match {
+ case Zero => throw new NoSuchElementException("head of empty map")
+ case Tip(k, v) => v
+ case Bin(_, _, l, _) => l.head
+ }
+
+ @tailrec
+ final def headOption: Option[T] = this match {
+ case Zero => None
+ case Tip(_, v) => Some(v)
+ case Bin(_, _, l, _) => l.headOption
+ }
+
+ @tailrec
+ final def last: T = this match {
+ case Zero => throw new NoSuchElementException("last of empty map")
+ case Tip(_, v) => v
+ case Bin(_, _, _, r) => r.last
+ }
+
+ @tailrec
+ final def lastOption: Option[T] = this match {
+ case Zero => None
+ case Tip(_, v) => Some(v)
+ case Bin(_, _, _, r) => r.lastOption
+ }
+
+ @tailrec
+ final def ordinal: Int = this match {
+ case Zero => 0
+ case Tip(o, _) => o
+ case Bin(_, _, _, r) => r.ordinal
+ }
+
+ final def tail: Ordering[T] = this match {
+ case Zero => throw new NoSuchElementException("tail of empty map")
+ case Tip(_, _) => Zero
+ case Bin(p, m, l, r) => bin(p, m, l.tail, r)
+ }
+
+ final def headTail: (T, Ordering[T]) = this match {
+ case Zero => throw new NoSuchElementException("init of empty map")
+ case Tip(_, v) => (v, Zero)
+ case Bin(p, m, l, r) =>
+ val (head, tail) = l.headTail
+ (head, bin(p, m, tail, r))
+ }
+
+ final def init: Ordering[T] = this match {
+ case Zero => throw new NoSuchElementException("init of empty map")
+ case Tip(_, _) => Zero
+ case Bin(p, m, l, r) =>
+ bin(p, m, l, r.init)
+ }
+
+ final def initLast: (Ordering[T], T) = this match {
+ case Zero => throw new NoSuchElementException("init of empty map")
+ case Tip(_, v) => (Zero, v)
+ case Bin(p, m, l, r) =>
+ val (init, last) = r.initLast
+ (bin(p, m, l, init), last)
+ }
+
+ final def iterator: Iterator[T] = this match {
+ case Zero => Iterator.empty
+ case _ => new Iterator(this)
+ }
+
+ final def include[S >: T](ordinal: Int, value: S): Ordering[S] = this match {
+ case Zero =>
+ Tip(ordinal, value)
+ case Tip(o, _) =>
+ if (ordinal == o) Tip(ordinal, value)
+ else join(ordinal, Tip(ordinal, value), o, this)
+ case Bin(p, m, l, r) =>
+ if (!hasMatch(ordinal, p, m)) join(ordinal, Tip(ordinal, value), p, this)
+ else if (zero(ordinal, m)) Bin(p, m, l.include(ordinal, value), r)
+ else Bin(p, m, l, r.include(ordinal, value))
+ }
+
+ final def append[S >: T](ordinal: Int, value: S): Ordering[S] = this match {
+ case Zero =>
+ Tip(ordinal, value)
+ case Tip(o, _) =>
+ if (ordinal == o) Tip(ordinal, value)
+ else join(ordinal, Tip(ordinal, value), o, this)
+ case Bin(p, m, l, r) =>
+ if (!hasMatch(ordinal, p, m)) join(ordinal, Tip(ordinal, value), p, this)
+ else if (zero(ordinal, m)) throw new IllegalArgumentException(s"Append called with ordinal out of range: $ordinal is not greater than current max ordinal ${this.ordinal}")
+ else Bin(p, m, l, r.append(ordinal, value))
+ }
+
+ @inline private[collection] final def appendInPlace[S >: T](ordinal: Int, value: S): Ordering[S] = appendInPlace1(null, ordinal, value)
+ private[collection] final def appendInPlace1[S >: T](parent: Bin[S], ordinal: Int, value: S): Ordering[S] = this match {
+ case Zero =>
+ Tip(ordinal, value)
+ case Tip(o, _) if o >= ordinal =>
+ throw new IllegalArgumentException(s"Append called with ordinal out of range: $o is not greater than current max ordinal ${this.ordinal}")
+ case Tip(o, _) if parent == null =>
+ join(ordinal, Tip(ordinal, value), o, this)
+ case Tip(o, _) =>
+ parent.right = join(ordinal, Tip(ordinal, value), o, this)
+ parent
+ case b @ Bin(p, m, _, r) =>
+ if (!hasMatch(ordinal, p, m)) {
+ val b2 = join(ordinal, Tip(ordinal, value), p, this)
+ if (parent != null) {
+ parent.right = b2
+ parent
+ } else b2
+ } else if (zero(ordinal, m)) throw new IllegalArgumentException(s"Append called with ordinal out of range: $ordinal is not greater than current max ordinal ${this.ordinal}")
+ else {
+ r.appendInPlace1(b, ordinal, value)
+ this
+ }
+ }
+
+ final def exclude(ordinal: Int): Ordering[T] = this match {
+ case Zero =>
+ Zero
+ case Tip(o, _) =>
+ if (ordinal == o) Zero
+ else this
+ case Bin(p, m, l, r) =>
+ if (!hasMatch(ordinal, p, m)) this
+ else if (zero(ordinal, m)) bin(p, m, l.exclude(ordinal), r)
+ else bin(p, m, l, r.exclude(ordinal))
+ }
+
+ final def splitAt(n: Int): (Ordering[T], Ordering[T]) = {
+ var rear: Ordering[T @uncheckedCaptures] = Ordering.empty[T]
+ var i = n
+ (modifyOrRemove { (o, v) =>
+ i -= 1
+ if (i >= 0) Some(v)
+ else {
+ rear = rear.appendInPlace(o, v)
+ None
+ }
+ }, rear)
+ }
+
+ /**
+ * A combined transform and filter function. Returns an `Ordering` such that
+ * for each `(key, value)` mapping in this map, if `f(key, value) == None`
+ * the map contains no mapping for key, and if `f(key, value) == Some(x)` the
+ * map contains `(key, x)`.
+ *
+ * @tparam S The type of the values in the resulting `LongMap`.
+ * @param f The transforming function.
+ * @return The modified map.
+ */
+ final def modifyOrRemove[S](f: (Int, T) => Option[S]): Ordering[S] = this match {
+ case Zero => Zero
+ case Tip(key, value) =>
+ f(key, value) match {
+ case None => Zero
+ case Some(value2) =>
+ // hack to preserve sharing
+ if (value.asInstanceOf[AnyRef] eq value2.asInstanceOf[AnyRef]) this.asInstanceOf[Ordering[S]]
+ else Tip(key, value2)
+ }
+ case Bin(prefix, mask, left, right) =>
+ val l = left.modifyOrRemove(f)
+ val r = right.modifyOrRemove(f)
+ if ((left eq l) && (right eq r)) this.asInstanceOf[Ordering[S]]
+ else bin(prefix, mask, l, r)
+ }
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/immutable/TreeSet.scala b/tests/pos-special/stdlib/collection/immutable/TreeSet.scala
new file mode 100644
index 000000000000..c4241b818c38
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/immutable/TreeSet.scala
@@ -0,0 +1,297 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package immutable
+
+import scala.collection.Stepper.EfficientSplit
+import scala.collection.generic.DefaultSerializable
+import scala.collection.mutable.ReusableBuilder
+import scala.collection.immutable.{RedBlackTree => RB}
+import scala.runtime.AbstractFunction1
+import language.experimental.captureChecking
+import scala.annotation.unchecked.uncheckedCaptures
+
+/** This class implements immutable sorted sets using a tree.
+ *
+ * @tparam A the type of the elements contained in this tree set
+ * @param ordering the implicit ordering used to compare objects of type `A`
+ *
+ * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-immutable-collection-classes.html#red-black-trees "Scala's Collection Library overview"]]
+ * section on `Red-Black Trees` for more information.
+ *
+ * @define Coll `immutable.TreeSet`
+ * @define coll immutable tree set
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ */
+final class TreeSet[A] private[immutable] (private[immutable] val tree: RB.Tree[A, Any])(implicit val ordering: Ordering[A])
+ extends AbstractSet[A]
+ with SortedSet[A]
+ with SortedSetOps[A, TreeSet, TreeSet[A]]
+ with StrictOptimizedSortedSetOps[A, TreeSet, TreeSet[A]]
+ with SortedSetFactoryDefaults[A, TreeSet, Set]
+ with DefaultSerializable {
+
+ if (ordering eq null) throw new NullPointerException("ordering must not be null")
+
+ def this()(implicit ordering: Ordering[A]) = this(null)(ordering)
+
+ override def sortedIterableFactory = TreeSet
+
+ private[this] def newSetOrSelf(t: RB.Tree[A, Any]) = if(t eq tree) this else new TreeSet[A](t)
+
+ override def size: Int = RB.count(tree)
+
+ override def isEmpty = size == 0
+
+ override def head: A = RB.smallest(tree).key
+
+ override def last: A = RB.greatest(tree).key
+
+ override def tail: TreeSet[A] = new TreeSet(RB.tail(tree))
+
+ override def init: TreeSet[A] = new TreeSet(RB.init(tree))
+
+ override def min[A1 >: A](implicit ord: Ordering[A1]): A = {
+ if ((ord eq ordering) && nonEmpty) {
+ head
+ } else {
+ super.min(ord)
+ }
+ }
+
+ override def max[A1 >: A](implicit ord: Ordering[A1]): A = {
+ if ((ord eq ordering) && nonEmpty) {
+ last
+ } else {
+ super.max(ord)
+ }
+ }
+
+ override def drop(n: Int): TreeSet[A] = {
+ if (n <= 0) this
+ else if (n >= size) empty
+ else new TreeSet(RB.drop(tree, n))
+ }
+
+ override def take(n: Int): TreeSet[A] = {
+ if (n <= 0) empty
+ else if (n >= size) this
+ else new TreeSet(RB.take(tree, n))
+ }
+
+ override def slice(from: Int, until: Int): TreeSet[A] = {
+ if (until <= from) empty
+ else if (from <= 0) take(until)
+ else if (until >= size) drop(from)
+ else new TreeSet(RB.slice(tree, from, until))
+ }
+
+ override def dropRight(n: Int): TreeSet[A] = take(size - math.max(n, 0))
+
+ override def takeRight(n: Int): TreeSet[A] = drop(size - math.max(n, 0))
+
+ private[this] def countWhile(p: A => Boolean): Int = {
+ var result = 0
+ val it = iterator
+ while (it.hasNext && p(it.next())) result += 1
+ result
+ }
+ override def dropWhile(p: A => Boolean): TreeSet[A] = drop(countWhile(p))
+
+ override def takeWhile(p: A => Boolean): TreeSet[A] = take(countWhile(p))
+
+ override def span(p: A => Boolean): (TreeSet[A], TreeSet[A]) = splitAt(countWhile(p))
+
+ override def foreach[U](f: A => U): Unit = RB.foreachKey(tree, f)
+
+ override def minAfter(key: A): Option[A] = {
+ val v = RB.minAfter(tree, key)
+ if (v eq null) Option.empty else Some(v.key)
+ }
+
+ override def maxBefore(key: A): Option[A] = {
+ val v = RB.maxBefore(tree, key)
+ if (v eq null) Option.empty else Some(v.key)
+ }
+
+ def iterator: Iterator[A] = RB.keysIterator(tree)
+
+ def iteratorFrom(start: A): Iterator[A] = RB.keysIterator(tree, Some(start))
+
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = {
+ import scala.collection.convert.impl._
+ type T = RB.Tree[A, Any]
+ val s = shape.shape match {
+ case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Int])
+ case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Long])
+ case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree, _.left, _.right, _.key.asInstanceOf[Double])
+ case _ => shape.parUnbox(AnyBinaryTreeStepper.from[A, T](size, tree, _.left, _.right, _.key))
+ }
+ s.asInstanceOf[S with EfficientSplit]
+ }
+
+ /** Checks if this set contains element `elem`.
+ *
+ * @param elem the element to check for membership.
+ * @return true, iff `elem` is contained in this set.
+ */
+ def contains(elem: A): Boolean = RB.contains(tree, elem)
+
+ override def range(from: A, until: A): TreeSet[A] = newSetOrSelf(RB.range(tree, from, until))
+
+ def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = newSetOrSelf(RB.rangeImpl(tree, from, until))
+
+ /** Creates a new `TreeSet` with the entry added.
+ *
+ * @param elem a new element to add.
+ * @return a new $coll containing `elem` and all the elements of this $coll.
+ */
+ def incl(elem: A): TreeSet[A] =
+ newSetOrSelf(RB.update(tree, elem, null, overwrite = false))
+
+ /** Creates a new `TreeSet` with the entry removed.
+ *
+ * @param elem a new element to add.
+ * @return a new $coll containing all the elements of this $coll except `elem`.
+ */
+ def excl(elem: A): TreeSet[A] =
+ newSetOrSelf(RB.delete(tree, elem))
+
+ override def concat(that: collection.IterableOnce[A]): TreeSet[A] = {
+ val t = that match {
+ case ts: TreeSet[A] if ordering == ts.ordering =>
+ RB.union(tree, ts.tree)
+ case _ =>
+ val it = that.iterator
+ var t = tree
+ while (it.hasNext) t = RB.update(t, it.next(), null, overwrite = false)
+ t
+ }
+ newSetOrSelf(t)
+ }
+
+ override def removedAll(that: IterableOnce[A]): TreeSet[A] = that match {
+ case ts: TreeSet[A] if ordering == ts.ordering =>
+ newSetOrSelf(RB.difference(tree, ts.tree))
+ case _ =>
+ //TODO add an implementation of a mutable subtractor similar to TreeMap
+ //but at least this doesn't create a TreeSet for each iteration
+ object sub extends AbstractFunction1[A, Unit] {
+ var currentTree = tree
+ override def apply(k: A): Unit = {
+ currentTree = RB.delete(currentTree, k)
+ }
+ }
+ that.iterator.foreach(sub)
+ newSetOrSelf(sub.currentTree)
+ }
+
+ override def intersect(that: collection.Set[A]): TreeSet[A] = that match {
+ case ts: TreeSet[A] if ordering == ts.ordering =>
+ newSetOrSelf(RB.intersect(tree, ts.tree))
+ case _ =>
+ super.intersect(that)
+ }
+
+ override def diff(that: collection.Set[A]): TreeSet[A] = that match {
+ case ts: TreeSet[A] if ordering == ts.ordering =>
+ newSetOrSelf(RB.difference(tree, ts.tree))
+ case _ =>
+ super.diff(that)
+ }
+
+ override def filter(f: A => Boolean): TreeSet[A] = newSetOrSelf(RB.filterEntries[A, Any](tree, {(k, _) => f(k)}))
+
+ override def partition(p: A => Boolean): (TreeSet[A], TreeSet[A]) = {
+ val (l, r) = RB.partitionEntries(tree, {(a:A, _: Any) => p(a)})
+ (newSetOrSelf(l), newSetOrSelf(r))
+ }
+
+ override def equals(obj: Any): Boolean = obj match {
+ case that: TreeSet[A @unchecked] if ordering == that.ordering => RB.keysEqual(tree, that.tree)
+ case _ => super.equals(obj)
+ }
+
+ override protected[this] def className = "TreeSet"
+}
+
+/**
+ * $factoryInfo
+ *
+ * @define Coll `immutable.TreeSet`
+ * @define coll immutable tree set
+ */
+@SerialVersionUID(3L)
+object TreeSet extends SortedIterableFactory[TreeSet] {
+
+ def empty[A: Ordering]: TreeSet[A] = new TreeSet[A]
+
+ def from[E](it: scala.collection.IterableOnce[E]^)(implicit ordering: Ordering[E]): TreeSet[E] =
+ it match {
+ case ts: TreeSet[E] if ordering == ts.ordering => ts
+ case ss: scala.collection.SortedSet[E] if ordering == ss.ordering =>
+ new TreeSet[E](RB.fromOrderedKeys(ss.iterator, ss.size))
+ case r: Range if (ordering eq Ordering.Int) || (Ordering.Int isReverseOf ordering) =>
+ val it = if((ordering eq Ordering.Int) == (r.step > 0)) r.iterator else r.reverseIterator
+ val tree = RB.fromOrderedKeys(it.asInstanceOf[Iterator[E]], r.size)
+ // The cast is needed to compile with Dotty:
+ // Dotty doesn't infer that E =:= Int, since instantiation of covariant GADTs is unsound
+ new TreeSet[E](tree)
+ case _ =>
+ var t: RB.Tree[E, Null] = null
+ val i = it.iterator
+ while (i.hasNext) t = RB.update(t, i.next(), null, overwrite = false)
+ new TreeSet[E](t)
+ }
+
+ def newBuilder[A](implicit ordering: Ordering[A]): ReusableBuilder[A, TreeSet[A]] = new TreeSetBuilder[A]
+ private class TreeSetBuilder[A](implicit ordering: Ordering[A])
+ extends RB.SetHelper[A]
+ with ReusableBuilder[A, TreeSet[A]] {
+ type Tree = RB.Tree[A, Any]
+ private [this] var tree:RB.Tree[A @uncheckedCaptures, Any] = null
+
+ override def addOne(elem: A): this.type = {
+ tree = mutableUpd(tree, elem)
+ this
+ }
+
+ override def addAll(xs: IterableOnce[A]^): this.type = {
+ xs match {
+ // TODO consider writing a mutable-safe union for TreeSet/TreeMap builder ++=
+ // for the moment we have to force immutability before the union
+ // which will waste some time and space
+ // calling `beforePublish` makes `tree` immutable
+ case ts: TreeSet[A] if ts.ordering == ordering =>
+ if (tree eq null) tree = ts.tree
+ else tree = RB.union(beforePublish(tree), ts.tree)(ordering)
+ case ts: TreeMap[A @unchecked, _] if ts.ordering == ordering =>
+ if (tree eq null) tree = ts.tree0
+ else tree = RB.union(beforePublish(tree), ts.tree0)(ordering)
+ case _ =>
+ super.addAll(xs)
+ }
+ this
+ }
+
+ override def clear(): Unit = {
+ tree = null
+ }
+
+ override def result(): TreeSet[A] = new TreeSet[A](beforePublish(tree))(ordering)
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/immutable/Vector.scala b/tests/pos-special/stdlib/collection/immutable/Vector.scala
new file mode 100644
index 000000000000..d9d33add512d
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/immutable/Vector.scala
@@ -0,0 +1,2476 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+package immutable
+
+import java.lang.Math.{abs, max => mmax, min => mmin}
+import java.util.Arrays.{copyOf, copyOfRange}
+import java.util.{Arrays, Spliterator}
+
+import scala.annotation.switch
+import scala.annotation.unchecked.uncheckedVariance
+import scala.collection.Stepper.EfficientSplit
+import scala.collection.generic.DefaultSerializable
+import scala.collection.immutable.VectorInline._
+import scala.collection.immutable.VectorStatics._
+import scala.collection.mutable.ReusableBuilder
+import language.experimental.captureChecking
+import scala.annotation.unchecked.uncheckedCaptures
+
+
+/** $factoryInfo
+ * @define Coll `Vector`
+ * @define coll vector
+ */
+@SerialVersionUID(3L)
+object Vector extends StrictOptimizedSeqFactory[Vector] {
+
+ def empty[A]: Vector[A] = Vector0
+
+ def from[E](it: collection.IterableOnce[E]^): Vector[E] =
+ it match {
+ case v: Vector[E] => v
+ case _ =>
+ val knownSize = it.knownSize
+ if (knownSize == 0) empty[E]
+ else if (knownSize > 0 && knownSize <= WIDTH) {
+ val a1: Arr1 = it match {
+ case as: ArraySeq.ofRef[_] if as.elemTag.runtimeClass == classOf[AnyRef] =>
+ as.unsafeArray.asInstanceOf[Arr1]
+ case it: Iterable[E] =>
+ val a1 = new Arr1(knownSize)
+ it.copyToArray(a1.asInstanceOf[Array[Any]])
+ a1
+ case _ =>
+ val a1 = new Arr1(knownSize)
+ it.iterator.copyToArray(a1.asInstanceOf[Array[Any]])
+ a1.asInstanceOf[Arr1]
+ }
+ new Vector1[E](a1)
+ } else {
+ (newBuilder ++= it).result()
+ }
+ }
+
+ def newBuilder[A]: ReusableBuilder[A, Vector[A]] = new VectorBuilder[A]
+
+ /** Create a Vector with the same element at each index.
+ *
+ * Unlike `fill`, which takes a by-name argument for the value and can thereby
+ * compute different values for each index, this method guarantees that all
+ * elements are identical. This allows sparse allocation in O(log n) time and space.
+ */
+ private[collection] def fillSparse[A](n: Int)(elem: A): Vector[A] = {
+ //TODO Make public; this method is private for now because it is not forward binary compatible
+ if(n <= 0) Vector0
+ else {
+ val b = new VectorBuilder[A]
+ b.initSparse(n, elem)
+ b.result()
+ }
+ }
+
+ private val defaultApplyPreferredMaxLength: Int =
+ try System.getProperty("scala.collection.immutable.Vector.defaultApplyPreferredMaxLength",
+ "250").toInt
+ catch {
+ case _: SecurityException => 250
+ }
+
+ private val emptyIterator = new NewVectorIterator(Vector0, 0, 0)
+}
+
+
+/** Vector is a general-purpose, immutable data structure. It provides random access and updates
+ * in O(log n) time, as well as very fast append/prepend/tail/init (amortized O(1), worst case O(log n)).
+ * Because vectors strike a good balance between fast random selections and fast random functional updates,
+ * they are currently the default implementation of immutable indexed sequences.
+ *
+ * Vectors are implemented by radix-balanced finger trees of width 32. There is a separate subclass
+ * for each level (0 to 6, with 0 being the empty vector and 6 a tree with a maximum width of 64 at the
+ * top level).
+ *
+ * Tree balancing:
+ * - Only the first dimension of an array may have a size < WIDTH
+ * - In a `data` (central) array the first dimension may be up to WIDTH-2 long, in `prefix1` and `suffix1` up
+ * to WIDTH, and in other `prefix` and `suffix` arrays up to WIDTH-1
+ * - `prefix1` and `suffix1` are never empty
+ * - Balancing does not cross the main data array (i.e. prepending never touches the suffix and appending never touches
+ * the prefix). The level is increased/decreased when the affected side plus main data is already full/empty
+ * - All arrays are left-aligned and truncated
+ *
+ * In addition to the data slices (`prefix1`, `prefix2`, ..., `dataN`, ..., `suffix2`, `suffix1`) we store a running
+ * count of elements after each prefix for more efficient indexing without having to dereference all prefix arrays.
+ */
+sealed abstract class Vector[+A] private[immutable] (private[immutable] final val prefix1: Arr1)
+ extends AbstractSeq[A]
+ with IndexedSeq[A]
+ with IndexedSeqOps[A, Vector, Vector[A]]
+ with StrictOptimizedSeqOps[A, Vector, Vector[A]]
+ with IterableFactoryDefaults[A, Vector]
+ with DefaultSerializable {
+
+ override def iterableFactory: SeqFactory[Vector] = Vector
+
+ override final def length: Int =
+ if(this.isInstanceOf[BigVector[_]]) this.asInstanceOf[BigVector[_]].length0
+ else prefix1.length
+
+ override final def iterator: Iterator[A] =
+ if(this.isInstanceOf[Vector0.type]) Vector.emptyIterator
+ else new NewVectorIterator(this, length, vectorSliceCount)
+
+ override final protected[collection] def filterImpl(pred: A => Boolean, isFlipped: Boolean): Vector[A] = {
+ var i = 0
+ val len = prefix1.length
+ while (i != len) {
+ if (pred(prefix1(i).asInstanceOf[A]) == isFlipped) {
+ // each 1 bit indicates that index passes the filter.
+ // all indices < i are also assumed to pass the filter
+ var bitmap = 0
+ var j = i + 1
+ while (j < len) {
+ if (pred(prefix1(j).asInstanceOf[A]) != isFlipped) {
+ bitmap |= (1 << j)
+ }
+ j += 1
+ }
+ val newLen = i + java.lang.Integer.bitCount(bitmap)
+
+ if(this.isInstanceOf[BigVector[_]]) {
+ val b = new VectorBuilder[A]
+ var k = 0
+ while(k < i) {
+ b.addOne(prefix1(k).asInstanceOf[A])
+ k += 1
+ }
+ k = i + 1
+ while (i != newLen) {
+ if (((1 << k) & bitmap) != 0) {
+ b.addOne(prefix1(k).asInstanceOf[A])
+ i += 1
+ }
+ k += 1
+ }
+ this.asInstanceOf[BigVector[A]].foreachRest { v => if(pred(v) != isFlipped) b.addOne(v) }
+ return b.result()
+ } else {
+ if (newLen == 0) return Vector0
+ val newData = new Array[AnyRef](newLen)
+ System.arraycopy(prefix1, 0, newData, 0, i)
+ var k = i + 1
+ while (i != newLen) {
+ if (((1 << k) & bitmap) != 0) {
+ newData(i) = prefix1(k)
+ i += 1
+ }
+ k += 1
+ }
+ return new Vector1[A](newData)
+ }
+ }
+ i += 1
+ }
+ if(this.isInstanceOf[BigVector[_]]) {
+ val b = new VectorBuilder[A]
+ b.initFrom(prefix1)
+ this.asInstanceOf[BigVector[A]].foreachRest { v => if(pred(v) != isFlipped) b.addOne(v) }
+ b.result()
+ } else this
+ }
+
+ // Dummy overrides to refine result types for binary compatibility:
+ override def updated[B >: A](index: Int, elem: B): Vector[B] = super.updated(index, elem)
+ override def appended[B >: A](elem: B): Vector[B] = super.appended(elem)
+ override def prepended[B >: A](elem: B): Vector[B] = super.prepended(elem)
+ override def prependedAll[B >: A](prefix: collection.IterableOnce[B]^): Vector[B] = {
+ val k = prefix.knownSize
+ if (k == 0) this
+ else if (k < 0) super.prependedAll(prefix)
+ else prependedAll0(prefix, k)
+ }
+
+ override final def appendedAll[B >: A](suffix: collection.IterableOnce[B]^): Vector[B] = {
+ val k = suffix.knownSize
+ if (k == 0) this
+ else if (k < 0) super.appendedAll(suffix)
+ else appendedAll0(suffix, k)
+ }
+
+ protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] = {
+ // k >= 0, k = prefix.knownSize
+ val tinyAppendLimit = 4 + vectorSliceCount
+ if (k < tinyAppendLimit /*|| k < (this.size >>> Log2ConcatFaster)*/) {
+ var v: Vector[B] = this
+ val it = IndexedSeq.from(prefix).reverseIterator
+ while (it.hasNext) v = it.next() +: v
+ v
+ } else if (this.size < (k >>> Log2ConcatFaster) && prefix.isInstanceOf[Vector[_]]) {
+ var v = prefix.asInstanceOf[Vector[B]]
+ val it = this.iterator
+ while (it.hasNext) v = v :+ it.next()
+ v
+ } else if (k < this.size - AlignToFaster) {
+ new VectorBuilder[B].alignTo(k, this).addAll(prefix).addAll(this).result()
+ } else super.prependedAll(prefix)
+ }
+
+ protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = {
+ // k >= 0, k = suffix.knownSize
+ val tinyAppendLimit = 4 + vectorSliceCount
+ if (k < tinyAppendLimit) {
+ var v: Vector[B @uncheckedCaptures] = this
+ suffix match {
+ case it: Iterable[_] => it.asInstanceOf[Iterable[B]].foreach(x => v = v.appended(x))
+ case _ => suffix.iterator.foreach(x => v = v.appended(x))
+ }
+ v
+ } else if (this.size < (k >>> Log2ConcatFaster) && suffix.isInstanceOf[Vector[_]]) {
+ var v = suffix.asInstanceOf[Vector[B]]
+ val ri = this.reverseIterator
+ while (ri.hasNext) v = v.prepended(ri.next())
+ v
+ } else if (this.size < k - AlignToFaster && suffix.isInstanceOf[Vector[_]]) {
+ val v = suffix.asInstanceOf[Vector[B]]
+ new VectorBuilder[B].alignTo(this.size, v).addAll(this).addAll(v).result()
+ } else new VectorBuilder[B].initFrom(this).addAll(suffix).result()
+ }
+
+ override def className = "Vector"
+
+ @inline override final def take(n: Int): Vector[A] = slice(0, n)
+ @inline override final def drop(n: Int): Vector[A] = slice(n, length)
+ @inline override final def takeRight(n: Int): Vector[A] = slice(length - mmax(n, 0), length)
+ @inline override final def dropRight(n: Int): Vector[A] = slice(0, length - mmax(n, 0))
+ override def tail: Vector[A] = slice(1, length)
+ override def init: Vector[A] = slice(0, length-1)
+
+ /** Like slice but parameters must be 0 <= lo < hi < length */
+ protected[this] def slice0(lo: Int, hi: Int): Vector[A]
+
+ /** Number of slices */
+ protected[immutable] def vectorSliceCount: Int
+ /** Slice at index */
+ protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef]
+ /** Length of all slices up to and including index */
+ protected[immutable] def vectorSlicePrefixLength(idx: Int): Int
+
+ override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = iterator.copyToArray(xs, start, len)
+
+ override def toVector: Vector[A] = this
+
+ override protected def applyPreferredMaxLength: Int = Vector.defaultApplyPreferredMaxLength
+
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = {
+ val s = shape.shape match {
+ case StepperShape.IntShape => new IntVectorStepper(iterator.asInstanceOf[NewVectorIterator[Int]])
+ case StepperShape.LongShape => new LongVectorStepper(iterator.asInstanceOf[NewVectorIterator[Long]])
+ case StepperShape.DoubleShape => new DoubleVectorStepper(iterator.asInstanceOf[NewVectorIterator[Double]])
+ case _ => shape.parUnbox(new AnyVectorStepper[A](iterator.asInstanceOf[NewVectorIterator[A]]))
+ }
+ s.asInstanceOf[S with EfficientSplit]
+ }
+
+ protected[this] def ioob(index: Int): IndexOutOfBoundsException =
+ new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${length-1})")
+
+ override final def head: A =
+ if (prefix1.length == 0) throw new NoSuchElementException("empty.head")
+ else prefix1(0).asInstanceOf[A]
+
+ override final def last: A = {
+ if(this.isInstanceOf[BigVector[_]]) {
+ val suffix = this.asInstanceOf[BigVector[_]].suffix1
+ if(suffix.length == 0) throw new NoSuchElementException("empty.tail")
+ else suffix(suffix.length-1)
+ } else prefix1(prefix1.length-1)
+ }.asInstanceOf[A]
+
+ override final def foreach[U](f: A => U): Unit = {
+ val c = vectorSliceCount
+ var i = 0
+ while (i < c) {
+ foreachRec(vectorSliceDim(c, i) - 1, vectorSlice(i), f)
+ i += 1
+ }
+ }
+
+ // The following definitions are needed for binary compatibility with ParVector
+ private[collection] def startIndex: Int = 0
+ private[collection] def endIndex: Int = length
+ private[collection] def initIterator[B >: A](s: VectorIterator[B]): Unit =
+ s.it = iterator.asInstanceOf[NewVectorIterator[B]]
+}
+
+
+/** This class only exists because we cannot override `slice` in `Vector` in a binary-compatible way */
+private sealed abstract class VectorImpl[+A](_prefix1: Arr1) extends Vector[A](_prefix1) {
+
+ override final def slice(from: Int, until: Int): Vector[A] = {
+ val lo = mmax(from, 0)
+ val hi = mmin(until, length)
+ if (hi <= lo) Vector0
+ else if (hi - lo == length) this
+ else slice0(lo, hi)
+ }
+}
+
+
+/** Vector with suffix and length fields; all Vector subclasses except Vector1 extend this */
+private sealed abstract class BigVector[+A](_prefix1: Arr1, private[immutable] val suffix1: Arr1, private[immutable] val length0: Int) extends VectorImpl[A](_prefix1) {
+
+ protected[immutable] final def foreachRest[U](f: A => U): Unit = {
+ val c = vectorSliceCount
+ var i = 1
+ while(i < c) {
+ foreachRec(vectorSliceDim(c, i)-1, vectorSlice(i), f)
+ i += 1
+ }
+ }
+}
+
+
+/** Empty vector */
+private object Vector0 extends BigVector[Nothing](empty1, empty1, 0) {
+
+ def apply(index: Int): Nothing = throw ioob(index)
+
+ override def updated[B >: Nothing](index: Int, elem: B): Vector[B] = throw ioob(index)
+
+ override def appended[B >: Nothing](elem: B): Vector[B] = new Vector1(wrap1(elem))
+
+ override def prepended[B >: Nothing](elem: B): Vector[B] = new Vector1(wrap1(elem))
+
+ override def map[B](f: Nothing => B): Vector[B] = this
+
+ override def tail: Vector[Nothing] = throw new UnsupportedOperationException("empty.tail")
+
+ override def init: Vector[Nothing] = throw new UnsupportedOperationException("empty.init")
+
+ protected[this] def slice0(lo: Int, hi: Int): Vector[Nothing] = this
+
+ protected[immutable] def vectorSliceCount: Int = 0
+ protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = null
+ protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = 0
+
+ override def equals(o: Any): Boolean = {
+ if(this eq o.asInstanceOf[AnyRef]) true
+ else o match {
+ case that: Vector[_] => false
+ case o => super.equals(o)
+ }
+ }
+
+ override protected[this]def prependedAll0[B >: Nothing](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] =
+ Vector.from(prefix)
+
+ override protected[this]def appendedAll0[B >: Nothing](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] =
+ Vector.from(suffix)
+
+ override protected[this] def ioob(index: Int): IndexOutOfBoundsException =
+ new IndexOutOfBoundsException(s"$index is out of bounds (empty vector)")
+}
+
+/** Flat ArraySeq-like structure */
+private final class Vector1[+A](_data1: Arr1) extends VectorImpl[A](_data1) {
+
+ @inline def apply(index: Int): A = {
+ if(index >= 0 && index < prefix1.length)
+ prefix1(index).asInstanceOf[A]
+ else throw ioob(index)
+ }
+
+ override def updated[B >: A](index: Int, elem: B): Vector[B] = {
+ if(index >= 0 && index < prefix1.length)
+ new Vector1(copyUpdate(prefix1, index, elem))
+ else throw ioob(index)
+ }
+
+ override def appended[B >: A](elem: B): Vector[B] = {
+ val len1 = prefix1.length
+ if(len1 < WIDTH) new Vector1(copyAppend1(prefix1, elem))
+ else new Vector2(prefix1, WIDTH, empty2, wrap1(elem), WIDTH+1)
+ }
+
+ override def prepended[B >: A](elem: B): Vector[B] = {
+ val len1 = prefix1.length
+ if(len1 < WIDTH) new Vector1(copyPrepend1(elem, prefix1))
+ else new Vector2(wrap1(elem), 1, empty2, prefix1, len1+1)
+ }
+
+ override def map[B](f: A => B): Vector[B] = new Vector1(mapElems1(prefix1, f))
+
+ protected[this] def slice0(lo: Int, hi: Int): Vector[A] =
+ new Vector1(copyOfRange(prefix1, lo, hi))
+
+ override def tail: Vector[A] =
+ if(prefix1.length == 1) Vector0
+ else new Vector1(copyTail(prefix1))
+
+ override def init: Vector[A] =
+ if(prefix1.length == 1) Vector0
+ else new Vector1(copyInit(prefix1))
+
+ protected[immutable] def vectorSliceCount: Int = 1
+ protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = prefix1
+ protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = prefix1.length
+
+ override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] =
+ prepend1IfSpace(prefix1, prefix) match {
+ case null => super.prependedAll0(prefix, k)
+ case data1b => new Vector1(data1b)
+ }
+
+ override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = {
+ val data1b = append1IfSpace(prefix1, suffix)
+ if(data1b ne null) new Vector1(data1b)
+ else super.appendedAll0(suffix, k)
+ }
+}
+
+
+/** 2-dimensional radix-balanced finger tree */
+private final class Vector2[+A](_prefix1: Arr1, private[immutable] val len1: Int,
+ private[immutable] val data2: Arr2,
+ _suffix1: Arr1,
+ _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) {
+
+ @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1,
+ data2: Arr2 = data2,
+ suffix1: Arr1 = suffix1,
+ length0: Int = length0) =
+ new Vector2(prefix1, len1, data2, suffix1, length0)
+
+ @inline def apply(index: Int): A = {
+ if(index >= 0 && index < length0) {
+ val io = index - len1
+ if(io >= 0) {
+ val i2 = io >>> BITS
+ val i1 = io & MASK
+ if(i2 < data2.length) data2(i2)(i1)
+ else suffix1(io & MASK)
+ } else prefix1(index)
+ }.asInstanceOf[A] else throw ioob(index)
+ }
+
+ override def updated[B >: A](index: Int, elem: B): Vector[B] = {
+ if(index >= 0 && index < length0) {
+ if(index >= len1) {
+ val io = index - len1
+ val i2 = io >>> BITS
+ val i1 = io & MASK
+ if(i2 < data2.length) copy(data2 = copyUpdate(data2, i2, i1, elem))
+ else copy(suffix1 = copyUpdate(suffix1, i1, elem))
+ } else {
+ copy(prefix1 = copyUpdate(prefix1, index, elem))
+ }
+ } else throw ioob(index)
+ }
+
+ override def appended[B >: A](elem: B): Vector[B] = {
+ if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1)
+ else if(data2.length < WIDTH-2) copy(data2 = copyAppend(data2, suffix1), suffix1 = wrap1(elem), length0 = length0+1)
+ else new Vector3(prefix1, len1, data2, WIDTH*(WIDTH-2) + len1, empty3, wrap2(suffix1), wrap1(elem), length0+1)
+ }
+
+ override def prepended[B >: A](elem: B): Vector[B] = {
+ if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, length0 = length0+1)
+ else if(data2.length < WIDTH-2) copy(wrap1(elem), 1, copyPrepend(prefix1, data2), length0 = length0+1)
+ else new Vector3(wrap1(elem), 1, wrap2(prefix1), len1+1, empty3, data2, suffix1, length0+1)
+ }
+
+ override def map[B](f: A => B): Vector[B] =
+ copy(prefix1 = mapElems1(prefix1, f), data2 = mapElems(2, data2, f), suffix1 = mapElems1(suffix1, f))
+
+ protected[this] def slice0(lo: Int, hi: Int): Vector[A] = {
+ val b = new VectorSliceBuilder(lo, hi)
+ b.consider(1, prefix1)
+ b.consider(2, data2)
+ b.consider(1, suffix1)
+ b.result()
+ }
+
+ override def tail: Vector[A] =
+ if(len1 > 1) copy(copyTail(prefix1), len1-1, length0 = length0-1)
+ else slice0(1, length0)
+
+ override def init: Vector[A] =
+ if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1)
+ else slice0(0, length0-1)
+
+ protected[immutable] def vectorSliceCount: Int = 3
+ protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match {
+ case 0 => prefix1
+ case 1 => data2
+ case 2 => suffix1
+ }
+ protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match {
+ case 0 => len1
+ case 1 => length0 - suffix1.length
+ case 2 => length0
+ }
+
+ override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] =
+ prepend1IfSpace(prefix1, prefix) match {
+ case null => super.prependedAll0(prefix, k)
+ case prefix1b =>
+ val diff = prefix1b.length - prefix1.length
+ copy(prefix1 = prefix1b,
+ len1 = len1 + diff,
+ length0 = length0 + diff,
+ )
+ }
+
+ override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = {
+ val suffix1b = append1IfSpace(suffix1, suffix)
+ if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length)
+ else super.appendedAll0(suffix, k)
+ }
+}
+
+
+/** 3-dimensional radix-balanced finger tree */
+private final class Vector3[+A](_prefix1: Arr1, private[immutable] val len1: Int,
+ private[immutable] val prefix2: Arr2, private[immutable] val len12: Int,
+ private[immutable] val data3: Arr3,
+ private[immutable] val suffix2: Arr2, _suffix1: Arr1,
+ _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) {
+
+ @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1,
+ prefix2: Arr2 = prefix2, len12: Int = len12,
+ data3: Arr3 = data3,
+ suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1,
+ length0: Int = length0) =
+ new Vector3(prefix1, len1, prefix2, len12, data3, suffix2, suffix1, length0)
+
+ @inline def apply(index: Int): A = {
+ if(index >= 0 && index < length0) {
+ val io = index - len12
+ if(io >= 0) {
+ val i3 = io >>> BITS2
+ val i2 = (io >>> BITS) & MASK
+ val i1 = io & MASK
+ if(i3 < data3.length) data3(i3)(i2)(i1)
+ else if(i2 < suffix2.length) suffix2(i2)(i1)
+ else suffix1(i1)
+ } else if(index >= len1) {
+ val io = index - len1
+ prefix2(io >>> BITS)(io & MASK)
+ } else prefix1(index)
+ }.asInstanceOf[A] else throw ioob(index)
+ }
+
+ override def updated[B >: A](index: Int, elem: B): Vector[B] = {
+ if(index >= 0 && index < length0) {
+ if(index >= len12) {
+ val io = index - len12
+ val i3 = io >>> BITS2
+ val i2 = (io >>> BITS) & MASK
+ val i1 = io & MASK
+ if (i3 < data3.length ) copy(data3 = copyUpdate(data3, i3, i2, i1, elem))
+ else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem))
+ else copy(suffix1 = copyUpdate(suffix1, i1, elem))
+ } else if(index >= len1) {
+ val io = index - len1
+ copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem))
+ } else {
+ copy(prefix1 = copyUpdate(prefix1, index, elem))
+ }
+ } else throw ioob(index)
+ }
+
+ override def appended[B >: A](elem: B): Vector[B] = {
+ if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1)
+ else if(suffix2.length < WIDTH-1) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1)
+ else if(data3.length < WIDTH-2) copy(data3 = copyAppend(data3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1)
+ else new Vector4(prefix1, len1, prefix2, len12, data3, (WIDTH-2)*WIDTH2 + len12, empty4, wrap3(copyAppend(suffix2, suffix1)), empty2, wrap1(elem), length0+1)
+ }
+
+ override def prepended[B >: A](elem: B): Vector[B] = {
+ if (len1 < WIDTH ) copy(prefix1 = copyPrepend1(elem, prefix1), len1 = len1+1, len12 = len12+1, length0 = length0+1)
+ else if(len12 < WIDTH2 ) copy(prefix1 = wrap1(elem), len1 = 1, prefix2 = copyPrepend(prefix1, prefix2), len12 = len12+1, length0 = length0+1)
+ else if(data3.length < WIDTH-2) copy(prefix1 = wrap1(elem), len1 = 1, prefix2 = empty2, len12 = 1, data3 = copyPrepend(copyPrepend(prefix1, prefix2), data3), length0 = length0+1)
+ else new Vector4(wrap1(elem), 1, empty2, 1, wrap3(copyPrepend(prefix1, prefix2)), len12+1, empty4, data3, suffix2, suffix1, length0+1)
+ }
+
+ override def map[B](f: A => B): Vector[B] =
+ copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f),
+ data3 = mapElems(3, data3, f),
+ suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f))
+
+ protected[this] def slice0(lo: Int, hi: Int): Vector[A] = {
+ val b = new VectorSliceBuilder(lo, hi)
+ b.consider(1, prefix1)
+ b.consider(2, prefix2)
+ b.consider(3, data3)
+ b.consider(2, suffix2)
+ b.consider(1, suffix1)
+ b.result()
+ }
+
+ override def tail: Vector[A] =
+ if(len1 > 1) copy(prefix1 = copyTail(prefix1), len1 = len1-1, len12 = len12-1, length0 = length0-1)
+ else slice0(1, length0)
+
+ override def init: Vector[A] =
+ if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1)
+ else slice0(0, length0-1)
+
+ protected[immutable] def vectorSliceCount: Int = 5
+ protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match {
+ case 0 => prefix1
+ case 1 => prefix2
+ case 2 => data3
+ case 3 => suffix2
+ case 4 => suffix1
+ }
+ protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match {
+ case 0 => len1
+ case 1 => len12
+ case 2 => len12 + data3.length*WIDTH2
+ case 3 => length0 - suffix1.length
+ case 4 => length0
+ }
+
+ override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] =
+ prepend1IfSpace(prefix1, prefix) match {
+ case null => super.prependedAll0(prefix, k)
+ case prefix1b =>
+ val diff = prefix1b.length - prefix1.length
+ copy(prefix1 = prefix1b,
+ len1 = len1 + diff,
+ len12 = len12 + diff,
+ length0 = length0 + diff,
+ )
+ }
+
+ override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = {
+ val suffix1b = append1IfSpace(suffix1, suffix)
+ if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length)
+ else super.appendedAll0(suffix, k)
+ }
+}
+
+
+/** 4-dimensional radix-balanced finger tree */
+private final class Vector4[+A](_prefix1: Arr1, private[immutable] val len1: Int,
+ private[immutable] val prefix2: Arr2, private[immutable] val len12: Int,
+ private[immutable] val prefix3: Arr3, private[immutable] val len123: Int,
+ private[immutable] val data4: Arr4,
+ private[immutable] val suffix3: Arr3, private[immutable] val suffix2: Arr2, _suffix1: Arr1,
+ _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) {
+
+ @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1,
+ prefix2: Arr2 = prefix2, len12: Int = len12,
+ prefix3: Arr3 = prefix3, len123: Int = len123,
+ data4: Arr4 = data4,
+ suffix3: Arr3 = suffix3, suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1,
+ length0: Int = length0) =
+ new Vector4(prefix1, len1, prefix2, len12, prefix3, len123, data4, suffix3, suffix2, suffix1, length0)
+
+ @inline def apply(index: Int): A = {
+ if(index >= 0 && index < length0) {
+ val io = index - len123
+ if(io >= 0) {
+ val i4 = io >>> BITS3
+ val i3 = (io >>> BITS2) & MASK
+ val i2 = (io >>> BITS) & MASK
+ val i1 = io & MASK
+ if(i4 < data4.length) data4(i4)(i3)(i2)(i1)
+ else if(i3 < suffix3.length) suffix3(i3)(i2)(i1)
+ else if(i2 < suffix2.length) suffix2(i2)(i1)
+ else suffix1(i1)
+ } else if(index >= len12) {
+ val io = index - len12
+ prefix3(io >>> BITS2)((io >>> BITS) & MASK)(io & MASK)
+ } else if(index >= len1) {
+ val io = index - len1
+ prefix2(io >>> BITS)(io & MASK)
+ } else prefix1(index)
+ }.asInstanceOf[A] else throw ioob(index)
+ }
+
+ override def updated[B >: A](index: Int, elem: B): Vector[B] = {
+ if(index >= 0 && index < length0) {
+ if(index >= len123) {
+ val io = index - len123
+ val i4 = io >>> BITS3
+ val i3 = (io >>> BITS2) & MASK
+ val i2 = (io >>> BITS) & MASK
+ val i1 = io & MASK
+ if (i4 < data4.length ) copy(data4 = copyUpdate(data4, i4, i3, i2, i1, elem))
+ else if(i3 < suffix3.length) copy(suffix3 = copyUpdate(suffix3, i3, i2, i1, elem))
+ else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem))
+ else copy(suffix1 = copyUpdate(suffix1, i1, elem))
+ } else if(index >= len12) {
+ val io = index - len12
+ copy(prefix3 = copyUpdate(prefix3, io >>> BITS2, (io >>> BITS) & MASK, io & MASK, elem))
+ } else if(index >= len1) {
+ val io = index - len1
+ copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem))
+ } else {
+ copy(prefix1 = copyUpdate(prefix1, index, elem))
+ }
+ } else throw ioob(index)
+ }
+
+ override def appended[B >: A](elem: B): Vector[B] = {
+ if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1)
+ else if(suffix2.length < WIDTH-1) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1)
+ else if(suffix3.length < WIDTH-1) copy(suffix3 = copyAppend(suffix3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1)
+ else if(data4.length < WIDTH-2) copy(data4 = copyAppend(data4, copyAppend(suffix3, copyAppend(suffix2, suffix1))), suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1)
+ else new Vector5(prefix1, len1, prefix2, len12, prefix3, len123, data4, (WIDTH-2)*WIDTH3 + len123, empty5, wrap4(copyAppend(suffix3, copyAppend(suffix2, suffix1))), empty3, empty2, wrap1(elem), length0+1)
+ }
+
+ override def prepended[B >: A](elem: B): Vector[B] = {
+ if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, len12 = len12+1, len123 = len123+1, length0 = length0+1)
+ else if(len12 < WIDTH2 ) copy(wrap1(elem), 1, copyPrepend(prefix1, prefix2), len12+1, len123 = len123+1, length0 = length0+1)
+ else if(len123 < WIDTH3 ) copy(wrap1(elem), 1, empty2, 1, copyPrepend(copyPrepend(prefix1, prefix2), prefix3), len123+1, length0 = length0+1)
+ else if(data4.length < WIDTH-2) copy(wrap1(elem), 1, empty2, 1, empty3, 1, copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), data4), length0 = length0+1)
+ else new Vector5(wrap1(elem), 1, empty2, 1, empty3, 1, wrap4(copyPrepend(copyPrepend(prefix1, prefix2), prefix3)), len123+1, empty5, data4, suffix3, suffix2, suffix1, length0+1)
+ }
+
+ override def map[B](f: A => B): Vector[B] =
+ copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), prefix3 = mapElems(3, prefix3, f),
+ data4 = mapElems(4, data4, f),
+ suffix3 = mapElems(3, suffix3, f), suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f))
+
+ protected[this] def slice0(lo: Int, hi: Int): Vector[A] = {
+ val b = new VectorSliceBuilder(lo, hi)
+ b.consider(1, prefix1)
+ b.consider(2, prefix2)
+ b.consider(3, prefix3)
+ b.consider(4, data4)
+ b.consider(3, suffix3)
+ b.consider(2, suffix2)
+ b.consider(1, suffix1)
+ b.result()
+ }
+
+ override def tail: Vector[A] =
+ if(len1 > 1) copy(copyTail(prefix1), len1-1, len12 = len12-1, len123 = len123-1, length0 = length0-1)
+ else slice0(1, length0)
+
+ override def init: Vector[A] =
+ if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1)
+ else slice0(0, length0-1)
+
+ protected[immutable] def vectorSliceCount: Int = 7
+ protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match {
+ case 0 => prefix1
+ case 1 => prefix2
+ case 2 => prefix3
+ case 3 => data4
+ case 4 => suffix3
+ case 5 => suffix2
+ case 6 => suffix1
+ }
+ protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match {
+ case 0 => len1
+ case 1 => len12
+ case 2 => len123
+ case 3 => len123 + data4.length*WIDTH3
+ case 4 => len123 + data4.length*WIDTH3 + suffix3.length*WIDTH2
+ case 5 => length0 - suffix1.length
+ case 6 => length0
+ }
+
+ override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] =
+ prepend1IfSpace(prefix1, prefix) match {
+ case null => super.prependedAll0(prefix, k)
+ case prefix1b =>
+ val diff = prefix1b.length - prefix1.length
+ copy(prefix1 = prefix1b,
+ len1 = len1 + diff,
+ len12 = len12 + diff,
+ len123 = len123 + diff,
+ length0 = length0 + diff,
+ )
+ }
+
+ override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = {
+ val suffix1b = append1IfSpace(suffix1, suffix)
+ if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length)
+ else super.appendedAll0(suffix, k)
+ }
+}
+
+
+/** 5-dimensional radix-balanced finger tree */
+private final class Vector5[+A](_prefix1: Arr1, private[immutable] val len1: Int,
+ private[immutable] val prefix2: Arr2, private[immutable] val len12: Int,
+ private[immutable] val prefix3: Arr3, private[immutable] val len123: Int,
+ private[immutable] val prefix4: Arr4, private[immutable] val len1234: Int,
+ private[immutable] val data5: Arr5,
+ private[immutable] val suffix4: Arr4, private[immutable] val suffix3: Arr3, private[immutable] val suffix2: Arr2, _suffix1: Arr1,
+ _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) {
+
+ @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1,
+ prefix2: Arr2 = prefix2, len12: Int = len12,
+ prefix3: Arr3 = prefix3, len123: Int = len123,
+ prefix4: Arr4 = prefix4, len1234: Int = len1234,
+ data5: Arr5 = data5,
+ suffix4: Arr4 = suffix4, suffix3: Arr3 = suffix3, suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1,
+ length0: Int = length0) =
+ new Vector5(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data5, suffix4, suffix3, suffix2, suffix1, length0)
+
+ @inline def apply(index: Int): A = {
+ if(index >= 0 && index < length0) {
+ val io = index - len1234
+ if(io >= 0) {
+ val i5 = io >>> BITS4
+ val i4 = (io >>> BITS3) & MASK
+ val i3 = (io >>> BITS2) & MASK
+ val i2 = (io >>> BITS) & MASK
+ val i1 = io & MASK
+ if(i5 < data5.length) data5(i5)(i4)(i3)(i2)(i1)
+ else if(i4 < suffix4.length) suffix4(i4)(i3)(i2)(i1)
+ else if(i3 < suffix3.length) suffix3(i3)(i2)(i1)
+ else if(i2 < suffix2.length) suffix2(i2)(i1)
+ else suffix1(i1)
+ } else if(index >= len123) {
+ val io = index - len123
+ prefix4(io >>> BITS3)((io >>> BITS2) & MASK)((io >>> BITS) & MASK)(io & MASK)
+ } else if(index >= len12) {
+ val io = index - len12
+ prefix3(io >>> BITS2)((io >>> BITS) & MASK)(io & MASK)
+ } else if(index >= len1) {
+ val io = index - len1
+ prefix2(io >>> BITS)(io & MASK)
+ } else prefix1(index)
+ }.asInstanceOf[A] else throw ioob(index)
+ }
+
+ override def updated[B >: A](index: Int, elem: B): Vector[B] = {
+ if(index >= 0 && index < length0) {
+ if(index >= len1234) {
+ val io = index - len1234
+ val i5 = io >>> BITS4
+ val i4 = (io >>> BITS3) & MASK
+ val i3 = (io >>> BITS2) & MASK
+ val i2 = (io >>> BITS) & MASK
+ val i1 = io & MASK
+ if (i5 < data5.length ) copy(data5 = copyUpdate(data5, i5, i4, i3, i2, i1, elem))
+ else if(i4 < suffix4.length) copy(suffix4 = copyUpdate(suffix4, i4, i3, i2, i1, elem))
+ else if(i3 < suffix3.length) copy(suffix3 = copyUpdate(suffix3, i3, i2, i1, elem))
+ else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem))
+ else copy(suffix1 = copyUpdate(suffix1, i1, elem))
+ } else if(index >= len123) {
+ val io = index - len123
+ copy(prefix4 = copyUpdate(prefix4, io >>> BITS3, (io >>> BITS2) & MASK, (io >>> BITS) & MASK, io & MASK, elem))
+ } else if(index >= len12) {
+ val io = index - len12
+ copy(prefix3 = copyUpdate(prefix3, io >>> BITS2, (io >>> BITS) & MASK, io & MASK, elem))
+ } else if(index >= len1) {
+ val io = index - len1
+ copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem))
+ } else {
+ copy(prefix1 = copyUpdate(prefix1, index, elem))
+ }
+ } else throw ioob(index)
+ }
+
+ override def appended[B >: A](elem: B): Vector[B] = {
+ if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1)
+ else if(suffix2.length < WIDTH-1) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1)
+ else if(suffix3.length < WIDTH-1) copy(suffix3 = copyAppend(suffix3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1)
+ else if(suffix4.length < WIDTH-1) copy(suffix4 = copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1))), suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1)
+ else if(data5.length < WIDTH-2) copy(data5 = copyAppend(data5, copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1)))), suffix4 = empty4, suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1)
+ else new Vector6(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data5, (WIDTH-2)*WIDTH4 + len1234, empty6, wrap5(copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1)))), empty4, empty3, empty2, wrap1(elem), length0+1)
+ }
+
+ override def prepended[B >: A](elem: B): Vector[B] = {
+ if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, len12 = len12+1, len123 = len123+1, len1234 = len1234+1, length0 = length0+1)
+ else if(len12 < WIDTH2 ) copy(wrap1(elem), 1, copyPrepend(prefix1, prefix2), len12+1, len123 = len123+1, len1234 = len1234+1, length0 = length0+1)
+ else if(len123 < WIDTH3 ) copy(wrap1(elem), 1, empty2, 1, copyPrepend(copyPrepend(prefix1, prefix2), prefix3), len123+1, len1234 = len1234+1, length0 = length0+1)
+ else if(len1234 < WIDTH4 ) copy(wrap1(elem), 1, empty2, 1, empty3, 1, copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), len1234+1, length0 = length0+1)
+ else if(data5.length < WIDTH-2) copy(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, copyPrepend(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), data5), length0 = length0+1)
+ else new Vector6(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, wrap5(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4)), len1234+1, empty6, data5, suffix4, suffix3, suffix2, suffix1, length0+1)
+ }
+
+ override def map[B](f: A => B): Vector[B] =
+ copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), prefix3 = mapElems(3, prefix3, f), prefix4 = mapElems(4, prefix4, f),
+ data5 = mapElems(5, data5, f),
+ suffix4 = mapElems(4, suffix4, f), suffix3 = mapElems(3, suffix3, f), suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f))
+
+ protected[this] def slice0(lo: Int, hi: Int): Vector[A] = {
+ val b = new VectorSliceBuilder(lo, hi)
+ b.consider(1, prefix1)
+ b.consider(2, prefix2)
+ b.consider(3, prefix3)
+ b.consider(4, prefix4)
+ b.consider(5, data5)
+ b.consider(4, suffix4)
+ b.consider(3, suffix3)
+ b.consider(2, suffix2)
+ b.consider(1, suffix1)
+ b.result()
+ }
+
+ override def tail: Vector[A] =
+ if(len1 > 1) copy(copyTail(prefix1), len1-1, len12 = len12-1, len123 = len123-1, len1234 = len1234-1, length0 = length0-1)
+ else slice0(1, length0)
+
+ override def init: Vector[A] =
+ if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1)
+ else slice0(0, length0-1)
+
+ protected[immutable] def vectorSliceCount: Int = 9
+ protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match {
+ case 0 => prefix1
+ case 1 => prefix2
+ case 2 => prefix3
+ case 3 => prefix4
+ case 4 => data5
+ case 5 => suffix4
+ case 6 => suffix3
+ case 7 => suffix2
+ case 8 => suffix1
+ }
+ protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match {
+ case 0 => len1
+ case 1 => len12
+ case 2 => len123
+ case 3 => len1234
+ case 4 => len1234 + data5.length*WIDTH4
+ case 5 => len1234 + data5.length*WIDTH4 + suffix4.length*WIDTH3
+ case 6 => len1234 + data5.length*WIDTH4 + suffix4.length*WIDTH3 + suffix3.length*WIDTH2
+ case 7 => length0 - suffix1.length
+ case 8 => length0
+ }
+
+ override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] =
+ prepend1IfSpace(prefix1, prefix) match {
+ case null => super.prependedAll0(prefix, k)
+ case prefix1b =>
+ val diff = prefix1b.length - prefix1.length
+ copy(prefix1 = prefix1b,
+ len1 = len1 + diff,
+ len12 = len12 + diff,
+ len123 = len123 + diff,
+ len1234 = len1234 + diff,
+ length0 = length0 + diff,
+ )
+ }
+
+ override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = {
+ val suffix1b = append1IfSpace(suffix1, suffix)
+ if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length)
+ else super.appendedAll0(suffix, k)
+ }
+}
+
+
+/** 6-dimensional radix-balanced finger tree */
+private final class Vector6[+A](_prefix1: Arr1, private[immutable] val len1: Int,
+ private[immutable] val prefix2: Arr2, private[immutable] val len12: Int,
+ private[immutable] val prefix3: Arr3, private[immutable] val len123: Int,
+ private[immutable] val prefix4: Arr4, private[immutable] val len1234: Int,
+ private[immutable] val prefix5: Arr5, private[immutable] val len12345: Int,
+ private[immutable] val data6: Arr6,
+ private[immutable] val suffix5: Arr5, private[immutable] val suffix4: Arr4, private[immutable] val suffix3: Arr3, private[immutable] val suffix2: Arr2, _suffix1: Arr1,
+ _length0: Int) extends BigVector[A](_prefix1, _suffix1, _length0) {
+
+ @inline private[this] def copy(prefix1: Arr1 = prefix1, len1: Int = len1,
+ prefix2: Arr2 = prefix2, len12: Int = len12,
+ prefix3: Arr3 = prefix3, len123: Int = len123,
+ prefix4: Arr4 = prefix4, len1234: Int = len1234,
+ prefix5: Arr5 = prefix5, len12345: Int = len12345,
+ data6: Arr6 = data6,
+ suffix5: Arr5 = suffix5, suffix4: Arr4 = suffix4, suffix3: Arr3 = suffix3, suffix2: Arr2 = suffix2, suffix1: Arr1 = suffix1,
+ length0: Int = length0) =
+ new Vector6(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, prefix5, len12345, data6, suffix5, suffix4, suffix3, suffix2, suffix1, length0)
+
+ @inline def apply(index: Int): A = {
+ if(index >= 0 && index < length0) {
+ val io = index - len12345
+ if(io >= 0) {
+ val i6 = io >>> BITS5
+ val i5 = (io >>> BITS4) & MASK
+ val i4 = (io >>> BITS3) & MASK
+ val i3 = (io >>> BITS2) & MASK
+ val i2 = (io >>> BITS) & MASK
+ val i1 = io & MASK
+ if(i6 < data6.length) data6(i6)(i5)(i4)(i3)(i2)(i1)
+ else if(i5 < suffix5.length) suffix5(i5)(i4)(i3)(i2)(i1)
+ else if(i4 < suffix4.length) suffix4(i4)(i3)(i2)(i1)
+ else if(i3 < suffix3.length) suffix3(i3)(i2)(i1)
+ else if(i2 < suffix2.length) suffix2(i2)(i1)
+ else suffix1(i1)
+ } else if(index >= len1234) {
+ val io = index - len1234
+ prefix5(io >>> BITS4)((io >>> BITS3) & MASK)((io >>> BITS2) & MASK)((io >>> BITS) & MASK)(io & MASK)
+ } else if(index >= len123) {
+ val io = index - len123
+ prefix4(io >>> BITS3)((io >>> BITS2) & MASK)((io >>> BITS) & MASK)(io & MASK)
+ } else if(index >= len12) {
+ val io = index - len12
+ prefix3(io >>> BITS2)((io >>> BITS) & MASK)(io & MASK)
+ } else if(index >= len1) {
+ val io = index - len1
+ prefix2(io >>> BITS)(io & MASK)
+ } else prefix1(index)
+ }.asInstanceOf[A] else throw ioob(index)
+ }
+
+ override def updated[B >: A](index: Int, elem: B): Vector[B] = {
+ if(index >= 0 && index < length0) {
+ if(index >= len12345) {
+ val io = index - len12345
+ val i6 = io >>> BITS5
+ val i5 = (io >>> BITS4) & MASK
+ val i4 = (io >>> BITS3) & MASK
+ val i3 = (io >>> BITS2) & MASK
+ val i2 = (io >>> BITS) & MASK
+ val i1 = io & MASK
+ if (i6 < data6.length ) copy(data6 = copyUpdate(data6, i6, i5, i4, i3, i2, i1, elem))
+ else if(i5 < suffix5.length) copy(suffix5 = copyUpdate(suffix5, i5, i4, i3, i2, i1, elem))
+ else if(i4 < suffix4.length) copy(suffix4 = copyUpdate(suffix4, i4, i3, i2, i1, elem))
+ else if(i3 < suffix3.length) copy(suffix3 = copyUpdate(suffix3, i3, i2, i1, elem))
+ else if(i2 < suffix2.length) copy(suffix2 = copyUpdate(suffix2, i2, i1, elem))
+ else copy(suffix1 = copyUpdate(suffix1, i1, elem))
+ } else if(index >= len1234) {
+ val io = index - len1234
+ copy(prefix5 = copyUpdate(prefix5, io >>> BITS4, (io >>> BITS3) & MASK, (io >>> BITS2) & MASK, (io >>> BITS) & MASK, io & MASK, elem))
+ } else if(index >= len123) {
+ val io = index - len123
+ copy(prefix4 = copyUpdate(prefix4, io >>> BITS3, (io >>> BITS2) & MASK, (io >>> BITS) & MASK, io & MASK, elem))
+ } else if(index >= len12) {
+ val io = index - len12
+ copy(prefix3 = copyUpdate(prefix3, io >>> BITS2, (io >>> BITS) & MASK, io & MASK, elem))
+ } else if(index >= len1) {
+ val io = index - len1
+ copy(prefix2 = copyUpdate(prefix2, io >>> BITS, io & MASK, elem))
+ } else {
+ copy(prefix1 = copyUpdate(prefix1, index, elem))
+ }
+ } else throw ioob(index)
+ }
+
+ override def appended[B >: A](elem: B): Vector[B] = {
+ if (suffix1.length < WIDTH ) copy(suffix1 = copyAppend1(suffix1, elem), length0 = length0+1)
+ else if(suffix2.length < WIDTH-1 ) copy(suffix2 = copyAppend(suffix2, suffix1), suffix1 = wrap1(elem), length0 = length0+1)
+ else if(suffix3.length < WIDTH-1 ) copy(suffix3 = copyAppend(suffix3, copyAppend(suffix2, suffix1)), suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1)
+ else if(suffix4.length < WIDTH-1 ) copy(suffix4 = copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1))), suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1)
+ else if(suffix5.length < WIDTH-1 ) copy(suffix5 = copyAppend(suffix5, copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1)))), suffix4 = empty4, suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1)
+ else if(data6.length < LASTWIDTH-2) copy(data6 = copyAppend(data6, copyAppend(suffix5, copyAppend(suffix4, copyAppend(suffix3, copyAppend(suffix2, suffix1))))), suffix5 = empty5, suffix4 = empty4, suffix3 = empty3, suffix2 = empty2, suffix1 = wrap1(elem), length0 = length0+1)
+ else throw new IllegalArgumentException
+ }
+
+ override def prepended[B >: A](elem: B): Vector[B] = {
+ if (len1 < WIDTH ) copy(copyPrepend1(elem, prefix1), len1+1, len12 = len12+1, len123 = len123+1, len1234 = len1234+1, len12345 = len12345+1, length0 = length0+1)
+ else if(len12 < WIDTH2 ) copy(wrap1(elem), 1, copyPrepend(prefix1, prefix2), len12+1, len123 = len123+1, len1234 = len1234+1, len12345 = len12345+1, length0 = length0+1)
+ else if(len123 < WIDTH3 ) copy(wrap1(elem), 1, empty2, 1, copyPrepend(copyPrepend(prefix1, prefix2), prefix3), len123+1, len1234 = len1234+1, len12345 = len12345+1, length0 = length0+1)
+ else if(len1234 < WIDTH4 ) copy(wrap1(elem), 1, empty2, 1, empty3, 1, copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), len1234+1, len12345 = len12345+1, length0 = length0+1)
+ else if(len12345 < WIDTH5 ) copy(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, copyPrepend(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), prefix5), len12345+1, length0 = length0+1)
+ else if(data6.length < LASTWIDTH-2) copy(wrap1(elem), 1, empty2, 1, empty3, 1, empty4, 1, empty5, 1, copyPrepend(copyPrepend(copyPrepend(copyPrepend(copyPrepend(prefix1, prefix2), prefix3), prefix4), prefix5), data6), length0 = length0+1)
+ else throw new IllegalArgumentException
+ }
+
+ override def map[B](f: A => B): Vector[B] =
+ copy(prefix1 = mapElems1(prefix1, f), prefix2 = mapElems(2, prefix2, f), prefix3 = mapElems(3, prefix3, f), prefix4 = mapElems(4, prefix4, f), prefix5 = mapElems(5, prefix5, f),
+ data6 = mapElems(6, data6, f),
+ suffix5 = mapElems(5, suffix5, f), suffix4 = mapElems(4, suffix4, f), suffix3 = mapElems(3, suffix3, f), suffix2 = mapElems(2, suffix2, f), suffix1 = mapElems1(suffix1, f))
+
+ protected[this] def slice0(lo: Int, hi: Int): Vector[A] = {
+ val b = new VectorSliceBuilder(lo, hi)
+ b.consider(1, prefix1)
+ b.consider(2, prefix2)
+ b.consider(3, prefix3)
+ b.consider(4, prefix4)
+ b.consider(5, prefix5)
+ b.consider(6, data6)
+ b.consider(5, suffix5)
+ b.consider(4, suffix4)
+ b.consider(3, suffix3)
+ b.consider(2, suffix2)
+ b.consider(1, suffix1)
+ b.result()
+ }
+
+ override def tail: Vector[A] =
+ if(len1 > 1) copy(copyTail(prefix1), len1-1, len12 = len12-1, len123 = len123-1, len1234 = len1234-1, len12345 = len12345-1, length0 = length0-1)
+ else slice0(1, length0)
+
+ override def init: Vector[A] =
+ if(suffix1.length > 1) copy(suffix1 = copyInit(suffix1), length0 = length0-1)
+ else slice0(0, length0-1)
+
+ protected[immutable] def vectorSliceCount: Int = 11
+ protected[immutable] def vectorSlice(idx: Int): Array[_ <: AnyRef] = (idx: @switch) match {
+ case 0 => prefix1
+ case 1 => prefix2
+ case 2 => prefix3
+ case 3 => prefix4
+ case 4 => prefix5
+ case 5 => data6
+ case 6 => suffix5
+ case 7 => suffix4
+ case 8 => suffix3
+ case 9 => suffix2
+ case 10 => suffix1
+ }
+ protected[immutable] def vectorSlicePrefixLength(idx: Int): Int = (idx: @switch) match {
+ case 0 => len1
+ case 1 => len12
+ case 2 => len123
+ case 3 => len1234
+ case 4 => len12345
+ case 5 => len12345 + data6.length*WIDTH5
+ case 6 => len12345 + data6.length*WIDTH5 + suffix5.length*WIDTH4
+ case 7 => len12345 + data6.length*WIDTH5 + suffix5.length*WIDTH4 + suffix4.length*WIDTH3
+ case 8 => len12345 + data6.length*WIDTH5 + suffix5.length*WIDTH4 + suffix4.length*WIDTH3 + suffix3.length*WIDTH2
+ case 9 => length0 - suffix1.length
+ case 10 => length0
+ }
+
+ override protected[this] def prependedAll0[B >: A](prefix: collection.IterableOnce[B]^, k: Int): Vector[B] =
+ prepend1IfSpace(prefix1, prefix) match {
+ case null => super.prependedAll0(prefix, k)
+ case prefix1b =>
+ val diff = prefix1b.length - prefix1.length
+ copy(prefix1 = prefix1b,
+ len1 = len1 + diff,
+ len12 = len12 + diff,
+ len123 = len123 + diff,
+ len1234 = len1234 + diff,
+ len12345 = len12345 + diff,
+ length0 = length0 + diff,
+ )
+ }
+
+ override protected[this] def appendedAll0[B >: A](suffix: collection.IterableOnce[B]^, k: Int): Vector[B] = {
+ val suffix1b = append1IfSpace(suffix1, suffix)
+ if(suffix1b ne null) copy(suffix1 = suffix1b, length0 = length0-suffix1.length+suffix1b.length)
+ else super.appendedAll0(suffix, k)
+ }
+}
+
+
+/** Helper class for vector slicing. It is initialized with the validated start and end index,
+ * then the vector slices are added in succession with `consider`. No matter what the dimension
+ * of the originating vector is or where the cut is performed, this always results in a
+ * structure with the highest-dimensional data in the middle and fingers of decreasing dimension
+ * at both ends, which can be turned into a new vector with very little rebalancing.
+ */
+private final class VectorSliceBuilder(lo: Int, hi: Int) {
+ //println(s"***** VectorSliceBuilder($lo, $hi)")
+
+ private[this] val slices = new Array[Array[AnyRef]](11)
+ private[this] var len, pos, maxDim = 0
+
+ @inline private[this] def prefixIdx(n: Int) = n-1
+ @inline private[this] def suffixIdx(n: Int) = 11-n
+
+ def consider[T <: AnyRef](n: Int, a: Array[T]): Unit = {
+ //println(s"***** consider($n, /${a.length})")
+ val count = a.length * (1 << (BITS*(n-1)))
+ val lo0 = mmax(lo-pos, 0)
+ val hi0 = mmin(hi-pos, count)
+ if(hi0 > lo0) {
+ addSlice(n, a, lo0, hi0)
+ len += (hi0 - lo0)
+ }
+ pos += count
+ }
+
+ private[this] def addSlice[T <: AnyRef](n: Int, a: Array[T], lo: Int, hi: Int): Unit = {
+ //println(s"***** addSlice($n, /${a.length}, $lo, $hi)")
+ if(n == 1) {
+ add(1, copyOrUse(a, lo, hi))
+ } else {
+ val bitsN = BITS * (n-1)
+ val widthN = 1 << bitsN
+ val loN = lo >>> bitsN
+ val hiN = hi >>> bitsN
+ val loRest = lo & (widthN - 1)
+ val hiRest = hi & (widthN - 1)
+ //println(s"***** bitsN=$bitsN, loN=$loN, hiN=$hiN, loRest=$loRest, hiRest=$hiRest")
+ if(loRest == 0) {
+ if(hiRest == 0) {
+ add(n, copyOrUse(a, loN, hiN))
+ } else {
+ if(hiN > loN) add(n, copyOrUse(a, loN, hiN))
+ addSlice(n-1, a(hiN).asInstanceOf[Array[AnyRef]], 0, hiRest)
+ }
+ } else {
+ if(hiN == loN) {
+ addSlice(n-1, a(loN).asInstanceOf[Array[AnyRef]], loRest, hiRest)
+ } else {
+ addSlice(n-1, a(loN).asInstanceOf[Array[AnyRef]], loRest, widthN)
+ if(hiRest == 0) {
+ if(hiN > loN+1) add(n, copyOrUse(a, loN+1, hiN))
+ } else {
+ if(hiN > loN+1) add(n, copyOrUse(a, loN+1, hiN))
+ addSlice(n-1, a(hiN).asInstanceOf[Array[AnyRef]], 0, hiRest)
+ }
+ }
+ }
+ }
+ }
+
+ private[this] def add[T <: AnyRef](n: Int, a: Array[T]): Unit = {
+ //println(s"***** add($n, /${a.length})")
+ val idx =
+ if(n <= maxDim) suffixIdx(n)
+ else { maxDim = n; prefixIdx(n) }
+ slices(idx) = a.asInstanceOf[Array[AnyRef]]
+ }
+
+ def result[A](): Vector[A] = {
+ //println(s"***** result: $len, $maxDim")
+ if(len <= 32) {
+ if(len == 0) Vector0
+ else {
+ val prefix1 = slices(prefixIdx(1))
+ val suffix1 = slices(suffixIdx(1))
+ //println(s"***** prefix1: ${if(prefix1 == null) "null" else prefix1.mkString("[", ",", "]")}, suffix1: ${if(suffix1 == null) "null" else suffix1.mkString("[", ",", "]")}")
+ val a: Arr1 =
+ if(prefix1 ne null) {
+ if(suffix1 ne null) concatArrays(prefix1, suffix1)
+ else prefix1
+ } else if(suffix1 ne null) suffix1
+ else {
+ val prefix2 = slices(prefixIdx(2)).asInstanceOf[Arr2]
+ if(prefix2 ne null) prefix2(0)
+ else {
+ val suffix2 = slices(suffixIdx(2)).asInstanceOf[Arr2]
+ suffix2(0)
+ }
+ }
+ new Vector1(a)
+ }
+ } else {
+ balancePrefix(1)
+ balanceSuffix(1)
+ var resultDim = maxDim
+ if(resultDim < 6) {
+ val pre = slices(prefixIdx(maxDim))
+ val suf = slices(suffixIdx(maxDim))
+ if((pre ne null) && (suf ne null)) {
+ // The highest-dimensional data consists of two slices: concatenate if they fit into the main data array,
+ // otherwise increase the dimension
+ if(pre.length + suf.length <= WIDTH-2) {
+ slices(prefixIdx(maxDim)) = concatArrays(pre, suf)
+ slices(suffixIdx(maxDim)) = null
+ } else resultDim += 1
+ } else {
+ // A single highest-dimensional slice could have length WIDTH-1 if it came from a prefix or suffix but we
+ // only allow WIDTH-2 for the main data, so increase the dimension in this case
+ val one = if(pre ne null) pre else suf
+ if(one.length > WIDTH-2) resultDim += 1
+ }
+ }
+ val prefix1 = slices(prefixIdx(1))
+ val suffix1 = slices(suffixIdx(1))
+ val len1 = prefix1.length
+ val res = (resultDim: @switch) match {
+ case 2 =>
+ val data2 = dataOr(2, empty2)
+ new Vector2[A](prefix1, len1, data2, suffix1, len)
+ case 3 =>
+ val prefix2 = prefixOr(2, empty2)
+ val data3 = dataOr(3, empty3)
+ val suffix2 = suffixOr(2, empty2)
+ val len12 = len1 + (prefix2.length * WIDTH)
+ new Vector3[A](prefix1, len1, prefix2, len12, data3, suffix2, suffix1, len)
+ case 4 =>
+ val prefix2 = prefixOr(2, empty2)
+ val prefix3 = prefixOr(3, empty3)
+ val data4 = dataOr(4, empty4)
+ val suffix3 = suffixOr(3, empty3)
+ val suffix2 = suffixOr(2, empty2)
+ val len12 = len1 + (prefix2.length * WIDTH)
+ val len123 = len12 + (prefix3.length * WIDTH2)
+ new Vector4[A](prefix1, len1, prefix2, len12, prefix3, len123, data4, suffix3, suffix2, suffix1, len)
+ case 5 =>
+ val prefix2 = prefixOr(2, empty2)
+ val prefix3 = prefixOr(3, empty3)
+ val prefix4 = prefixOr(4, empty4)
+ val data5 = dataOr(5, empty5)
+ val suffix4 = suffixOr(4, empty4)
+ val suffix3 = suffixOr(3, empty3)
+ val suffix2 = suffixOr(2, empty2)
+ val len12 = len1 + (prefix2.length * WIDTH)
+ val len123 = len12 + (prefix3.length * WIDTH2)
+ val len1234 = len123 + (prefix4.length * WIDTH3)
+ new Vector5[A](prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data5, suffix4, suffix3, suffix2, suffix1, len)
+ case 6 =>
+ val prefix2 = prefixOr(2, empty2)
+ val prefix3 = prefixOr(3, empty3)
+ val prefix4 = prefixOr(4, empty4)
+ val prefix5 = prefixOr(5, empty5)
+ val data6 = dataOr(6, empty6)
+ val suffix5 = suffixOr(5, empty5)
+ val suffix4 = suffixOr(4, empty4)
+ val suffix3 = suffixOr(3, empty3)
+ val suffix2 = suffixOr(2, empty2)
+ val len12 = len1 + (prefix2.length * WIDTH)
+ val len123 = len12 + (prefix3.length * WIDTH2)
+ val len1234 = len123 + (prefix4.length * WIDTH3)
+ val len12345 = len1234 + (prefix5.length * WIDTH4)
+ new Vector6[A](prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, prefix5, len12345, data6, suffix5, suffix4, suffix3, suffix2, suffix1, len)
+ }
+ res
+ }
+ }
+
+ @inline private[this] def prefixOr[T <: AnyRef](n: Int, a: Array[T]): Array[T] = {
+ val p = slices(prefixIdx(n))
+ if(p ne null) p.asInstanceOf[Array[T]] else a
+ }
+
+ @inline private[this] def suffixOr[T <: AnyRef](n: Int, a: Array[T]): Array[T] = {
+ val s = slices(suffixIdx(n))
+ if(s ne null) s.asInstanceOf[Array[T]] else a
+ }
+
+ @inline private[this] def dataOr[T <: AnyRef](n: Int, a: Array[T]): Array[T] = {
+ val p = slices(prefixIdx(n))
+ if(p ne null) p.asInstanceOf[Array[T]]
+ else {
+ val s = slices(suffixIdx(n))
+ if(s ne null) s.asInstanceOf[Array[T]] else a
+ }
+ }
+
+ /** Ensure prefix is not empty */
+ private[this] def balancePrefix(n: Int): Unit = {
+ if(slices(prefixIdx(n)) eq null) {
+ if(n == maxDim) {
+ slices(prefixIdx(n)) = slices(suffixIdx(n))
+ slices(suffixIdx(n)) = null
+ } else {
+ balancePrefix(n+1)
+ val preN1 = slices(prefixIdx(n+1)).asInstanceOf[Array[Array[AnyRef]]]
+ //assert(preN1 ne null)
+ slices(prefixIdx(n)) = preN1(0)
+ if(preN1.length == 1) {
+ slices(prefixIdx(n+1)) = null
+ if((maxDim == n+1) && (slices(suffixIdx(n+1)) eq null)) maxDim = n
+ } else {
+ slices(prefixIdx(n+1)) = copyOfRange(preN1, 1, preN1.length).asInstanceOf[Array[AnyRef]]
+ }
+ }
+ }
+ }
+
+ /** Ensure suffix is not empty */
+ private[this] def balanceSuffix(n: Int): Unit = {
+ if(slices(suffixIdx(n)) eq null) {
+ if(n == maxDim) {
+ slices(suffixIdx(n)) = slices(prefixIdx(n))
+ slices(prefixIdx(n)) = null
+ } else {
+ balanceSuffix(n+1)
+ val sufN1 = slices(suffixIdx(n+1)).asInstanceOf[Array[Array[AnyRef]]]
+ //assert(sufN1 ne null, s"n=$n, maxDim=$maxDim, slices=${slices.mkString(",")}")
+ slices(suffixIdx(n)) = sufN1(sufN1.length-1)
+ if(sufN1.length == 1) {
+ slices(suffixIdx(n+1)) = null
+ if((maxDim == n+1) && (slices(prefixIdx(n+1)) eq null)) maxDim = n
+ } else {
+ slices(suffixIdx(n+1)) = copyOfRange(sufN1, 0, sufN1.length-1).asInstanceOf[Array[AnyRef]]
+ }
+ }
+ }
+ }
+
+ override def toString: String =
+ s"VectorSliceBuilder(lo=$lo, hi=$hi, len=$len, pos=$pos, maxDim=$maxDim)"
+
+ private[immutable] def getSlices: Array[Array[AnyRef]] = slices
+}
+
+
+final class VectorBuilder[A] extends ReusableBuilder[A, Vector[A]] {
+
+ private[this] var a6: Arr6 = _
+ private[this] var a5: Arr5 = _
+ private[this] var a4: Arr4 = _
+ private[this] var a3: Arr3 = _
+ private[this] var a2: Arr2 = _
+ private[this] var a1: Arr1 = new Arr1(WIDTH)
+ private[this] var len1, lenRest, offset = 0
+ private[this] var prefixIsRightAligned = false
+ private[this] var depth = 1
+
+ @inline private[this] final def setLen(i: Int): Unit = {
+ len1 = i & MASK
+ lenRest = i - len1
+ }
+
+ override def knownSize: Int = len1 + lenRest - offset
+
+ @inline def size: Int = knownSize
+ @inline def isEmpty: Boolean = knownSize == 0
+ @inline def nonEmpty: Boolean = knownSize != 0
+
+ def clear(): Unit = {
+ a6 = null
+ a5 = null
+ a4 = null
+ a3 = null
+ a2 = null
+ a1 = new Arr1(WIDTH)
+ len1 = 0
+ lenRest = 0
+ offset = 0
+ prefixIsRightAligned = false
+ depth = 1
+ }
+
+ private[immutable] def initSparse(size: Int, elem: A): Unit = {
+ setLen(size)
+ Arrays.fill(a1, elem)
+ if(size > WIDTH) {
+ a2 = new Array(WIDTH)
+ Arrays.fill(a2.asInstanceOf[Array[AnyRef]], a1)
+ if(size > WIDTH2) {
+ a3 = new Array(WIDTH)
+ Arrays.fill(a3.asInstanceOf[Array[AnyRef]], a2)
+ if(size > WIDTH3) {
+ a4 = new Array(WIDTH)
+ Arrays.fill(a4.asInstanceOf[Array[AnyRef]], a3)
+ if(size > WIDTH4) {
+ a5 = new Array(WIDTH)
+ Arrays.fill(a5.asInstanceOf[Array[AnyRef]], a4)
+ if(size > WIDTH5) {
+ a6 = new Array(LASTWIDTH)
+ Arrays.fill(a6.asInstanceOf[Array[AnyRef]], a5)
+ depth = 6
+ } else depth = 5
+ } else depth = 4
+ } else depth = 3
+ } else depth = 2
+ } else depth = 1
+ }
+
+ private[immutable] def initFrom(prefix1: Arr1): Unit = {
+ depth = 1
+ setLen(prefix1.length)
+ a1 = copyOrUse(prefix1, 0, WIDTH)
+ if(len1 == 0 && lenRest > 0) {
+ // force advance() on next addition:
+ len1 = WIDTH
+ lenRest -= WIDTH
+ }
+ }
+
+ private[immutable] def initFrom(v: Vector[_]): this.type = {
+ (v.vectorSliceCount: @switch) match {
+ case 0 =>
+ case 1 =>
+ val v1 = v.asInstanceOf[Vector1[_]]
+ depth = 1
+ setLen(v1.prefix1.length)
+ a1 = copyOrUse(v1.prefix1, 0, WIDTH)
+ case 3 =>
+ val v2 = v.asInstanceOf[Vector2[_]]
+ val d2 = v2.data2
+ a1 = copyOrUse(v2.suffix1, 0, WIDTH)
+ depth = 2
+ offset = WIDTH - v2.len1
+ setLen(v2.length0 + offset)
+ a2 = new Arr2(WIDTH)
+ a2(0) = v2.prefix1
+ System.arraycopy(d2, 0, a2, 1, d2.length)
+ a2(d2.length+1) = a1
+ case 5 =>
+ val v3 = v.asInstanceOf[Vector3[_]]
+ val d3 = v3.data3
+ val s2 = v3.suffix2
+ a1 = copyOrUse(v3.suffix1, 0, WIDTH)
+ depth = 3
+ offset = WIDTH2 - v3.len12
+ setLen(v3.length0 + offset)
+ a3 = new Arr3(WIDTH)
+ a3(0) = copyPrepend(v3.prefix1, v3.prefix2)
+ System.arraycopy(d3, 0, a3, 1, d3.length)
+ a2 = copyOf(s2, WIDTH)
+ a3(d3.length+1) = a2
+ a2(s2.length) = a1
+ case 7 =>
+ val v4 = v.asInstanceOf[Vector4[_]]
+ val d4 = v4.data4
+ val s3 = v4.suffix3
+ val s2 = v4.suffix2
+ a1 = copyOrUse(v4.suffix1, 0, WIDTH)
+ depth = 4
+ offset = WIDTH3 - v4.len123
+ setLen(v4.length0 + offset)
+ a4 = new Arr4(WIDTH)
+ a4(0) = copyPrepend(copyPrepend(v4.prefix1, v4.prefix2), v4.prefix3)
+ System.arraycopy(d4, 0, a4, 1, d4.length)
+ a3 = copyOf(s3, WIDTH)
+ a2 = copyOf(s2, WIDTH)
+ a4(d4.length+1) = a3
+ a3(s3.length) = a2
+ a2(s2.length) = a1
+ case 9 =>
+ val v5 = v.asInstanceOf[Vector5[_]]
+ val d5 = v5.data5
+ val s4 = v5.suffix4
+ val s3 = v5.suffix3
+ val s2 = v5.suffix2
+ a1 = copyOrUse(v5.suffix1, 0, WIDTH)
+ depth = 5
+ offset = WIDTH4 - v5.len1234
+ setLen(v5.length0 + offset)
+ a5 = new Arr5(WIDTH)
+ a5(0) = copyPrepend(copyPrepend(copyPrepend(v5.prefix1, v5.prefix2), v5.prefix3), v5.prefix4)
+ System.arraycopy(d5, 0, a5, 1, d5.length)
+ a4 = copyOf(s4, WIDTH)
+ a3 = copyOf(s3, WIDTH)
+ a2 = copyOf(s2, WIDTH)
+ a5(d5.length+1) = a4
+ a4(s4.length) = a3
+ a3(s3.length) = a2
+ a2(s2.length) = a1
+ case 11 =>
+ val v6 = v.asInstanceOf[Vector6[_]]
+ val d6 = v6.data6
+ val s5 = v6.suffix5
+ val s4 = v6.suffix4
+ val s3 = v6.suffix3
+ val s2 = v6.suffix2
+ a1 = copyOrUse(v6.suffix1, 0, WIDTH)
+ depth = 6
+ offset = WIDTH5 - v6.len12345
+ setLen(v6.length0 + offset)
+ a6 = new Arr6(LASTWIDTH)
+ a6(0) = copyPrepend(copyPrepend(copyPrepend(copyPrepend(v6.prefix1, v6.prefix2), v6.prefix3), v6.prefix4), v6.prefix5)
+ System.arraycopy(d6, 0, a6, 1, d6.length)
+ a5 = copyOf(s5, WIDTH)
+ a4 = copyOf(s4, WIDTH)
+ a3 = copyOf(s3, WIDTH)
+ a2 = copyOf(s2, WIDTH)
+ a6(d6.length+1) = a5
+ a5(s5.length) = a4
+ a4(s4.length) = a3
+ a3(s3.length) = a2
+ a2(s2.length) = a1
+ }
+ if(len1 == 0 && lenRest > 0) {
+ // force advance() on next addition:
+ len1 = WIDTH
+ lenRest -= WIDTH
+ }
+ this
+ }
+
+ //TODO Make public; this method is only private for binary compatibility
+ private[collection] def alignTo(before: Int, bigVector: Vector[A]): this.type = {
+ if (len1 != 0 || lenRest != 0)
+ throw new UnsupportedOperationException("A non-empty VectorBuilder cannot be aligned retrospectively. Please call .reset() or use a new VectorBuilder.")
+ val (prefixLength, maxPrefixLength) = bigVector match {
+ case Vector0 => (0, 1)
+ case v1: Vector1[_] => (0, 1)
+ case v2: Vector2[_] => (v2.len1, WIDTH)
+ case v3: Vector3[_] => (v3.len12, WIDTH2)
+ case v4: Vector4[_] => (v4.len123, WIDTH3)
+ case v5: Vector5[_] => (v5.len1234, WIDTH4)
+ case v6: Vector6[_] => (v6.len12345, WIDTH5)
+ }
+ if (maxPrefixLength == 1) return this // does not really make sense to align for <= 32 element-vector
+ val overallPrefixLength = (before + prefixLength) % maxPrefixLength
+ offset = (maxPrefixLength - overallPrefixLength) % maxPrefixLength
+ // pretend there are already `offset` elements added
+ advanceN(offset & ~MASK)
+ len1 = offset & MASK
+ prefixIsRightAligned = true
+ this
+ }
+
+ /**
+ * Removes `offset` leading `null`s in the prefix.
+ * This is needed after calling `alignTo` and subsequent additions,
+ * directly before the result is used for creating a new Vector.
+ * Note that the outermost array keeps its length to keep the
+ * Builder re-usable.
+ *
+ * example:
+ * a2 = Array(null, ..., null, Array(null, .., null, 0, 1, .., x), Array(x+1, .., x+32), ...)
+ * becomes
+ * a2 = Array(Array(0, 1, .., x), Array(x+1, .., x+32), ..., ?, ..., ?)
+ */
+ private[this] def leftAlignPrefix(): Unit = {
+ @inline def shrinkOffsetIfToLarge(width: Int): Unit = {
+ val newOffset = offset % width
+ lenRest -= offset - newOffset
+ offset = newOffset
+ }
+ var a: Array[AnyRef] = null // the array we modify
+ var aParent: Array[AnyRef] = null // a's parent, so aParent(0) == a
+ if (depth >= 6) {
+ a = a6.asInstanceOf[Array[AnyRef]]
+ val i = offset >>> BITS5
+ if (i > 0) System.arraycopy(a, i, a, 0, LASTWIDTH - i)
+ shrinkOffsetIfToLarge(WIDTH5)
+ if ((lenRest >>> BITS5) == 0) depth = 5
+ aParent = a
+ a = a(0).asInstanceOf[Array[AnyRef]]
+ }
+ if (depth >= 5) {
+ if (a == null) a = a5.asInstanceOf[Array[AnyRef]]
+ val i = (offset >>> BITS4) & MASK
+ if (depth == 5) {
+ if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i)
+ a5 = a.asInstanceOf[Arr5]
+ shrinkOffsetIfToLarge(WIDTH4)
+ if ((lenRest >>> BITS4) == 0) depth = 4
+ } else {
+ if (i > 0) a = copyOfRange(a, i, WIDTH)
+ aParent(0) = a
+ }
+ aParent = a
+ a = a(0).asInstanceOf[Array[AnyRef]]
+ }
+ if (depth >= 4) {
+ if (a == null) a = a4.asInstanceOf[Array[AnyRef]]
+ val i = (offset >>> BITS3) & MASK
+ if (depth == 4) {
+ if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i)
+ a4 = a.asInstanceOf[Arr4]
+ shrinkOffsetIfToLarge(WIDTH3)
+ if ((lenRest >>> BITS3) == 0) depth = 3
+ } else {
+ if (i > 0) a = copyOfRange(a, i, WIDTH)
+ aParent(0) = a
+ }
+ aParent = a
+ a = a(0).asInstanceOf[Array[AnyRef]]
+ }
+ if (depth >= 3) {
+ if (a == null) a = a3.asInstanceOf[Array[AnyRef]]
+ val i = (offset >>> BITS2) & MASK
+ if (depth == 3) {
+ if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i)
+ a3 = a.asInstanceOf[Arr3]
+ shrinkOffsetIfToLarge(WIDTH2)
+ if ((lenRest >>> BITS2) == 0) depth = 2
+ } else {
+ if (i > 0) a = copyOfRange(a, i, WIDTH)
+ aParent(0) = a
+ }
+ aParent = a
+ a = a(0).asInstanceOf[Array[AnyRef]]
+ }
+ if (depth >= 2) {
+ if (a == null) a = a2.asInstanceOf[Array[AnyRef]]
+ val i = (offset >>> BITS) & MASK
+ if (depth == 2) {
+ if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i)
+ a2 = a.asInstanceOf[Arr2]
+ shrinkOffsetIfToLarge(WIDTH)
+ if ((lenRest >>> BITS) == 0) depth = 1
+ } else {
+ if (i > 0) a = copyOfRange(a, i, WIDTH)
+ aParent(0) = a
+ }
+ aParent = a
+ a = a(0).asInstanceOf[Array[AnyRef]]
+ }
+ if (depth >= 1) {
+ if (a == null) a = a1.asInstanceOf[Array[AnyRef]]
+ val i = offset & MASK
+ if (depth == 1) {
+ if (i > 0) System.arraycopy(a, i, a, 0, WIDTH - i)
+ a1 = a.asInstanceOf[Arr1]
+ len1 -= offset
+ offset = 0
+ } else {
+ if (i > 0) a = copyOfRange(a, i, WIDTH)
+ aParent(0) = a
+ }
+ }
+ prefixIsRightAligned = false
+ }
+
+ def addOne(elem: A): this.type = {
+ if(len1 == WIDTH) advance()
+ a1(len1) = elem.asInstanceOf[AnyRef]
+ len1 += 1
+ this
+ }
+
+ private[this] def addArr1(data: Arr1): Unit = {
+ val dl = data.length
+ if(dl > 0) {
+ if(len1 == WIDTH) advance()
+ val copy1 = mmin(WIDTH-len1, dl)
+ val copy2 = dl - copy1
+ System.arraycopy(data, 0, a1, len1, copy1)
+ len1 += copy1
+ if(copy2 > 0) {
+ advance()
+ System.arraycopy(data, copy1, a1, 0, copy2)
+ len1 += copy2
+ }
+ }
+ }
+
+ private[this] def addArrN(slice: Array[AnyRef], dim: Int): Unit = {
+// assert(dim >= 2)
+// assert(lenRest % WIDTH == 0)
+// assert(len1 == 0 || len1 == WIDTH)
+ if (slice.isEmpty) return
+ if (len1 == WIDTH) advance()
+ val sl = slice.length
+ (dim: @switch) match {
+ case 2 =>
+ // lenRest is always a multiple of WIDTH
+ val copy1 = mmin(((WIDTH2 - lenRest) >>> BITS) & MASK, sl)
+ val copy2 = sl - copy1
+ val destPos = (lenRest >>> BITS) & MASK
+ System.arraycopy(slice, 0, a2, destPos, copy1)
+ advanceN(WIDTH * copy1)
+ if (copy2 > 0) {
+ System.arraycopy(slice, copy1, a2, 0, copy2)
+ advanceN(WIDTH * copy2)
+ }
+ case 3 =>
+ if (lenRest % WIDTH2 != 0) {
+ // lenRest is not multiple of WIDTH2, so this slice does not align, need to try lower dimension
+ slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 2))
+ return
+ }
+ val copy1 = mmin(((WIDTH3 - lenRest) >>> BITS2) & MASK, sl)
+ val copy2 = sl - copy1
+ val destPos = (lenRest >>> BITS2) & MASK
+ System.arraycopy(slice, 0, a3, destPos, copy1)
+ advanceN(WIDTH2 * copy1)
+ if (copy2 > 0) {
+ System.arraycopy(slice, copy1, a3, 0, copy2)
+ advanceN(WIDTH2 * copy2)
+ }
+ case 4 =>
+ if (lenRest % WIDTH3 != 0) {
+ // lenRest is not multiple of WIDTH3, so this slice does not align, need to try lower dimensions
+ slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 3))
+ return
+ }
+ val copy1 = mmin(((WIDTH4 - lenRest) >>> BITS3) & MASK, sl)
+ val copy2 = sl - copy1
+ val destPos = (lenRest >>> BITS3) & MASK
+ System.arraycopy(slice, 0, a4, destPos, copy1)
+ advanceN(WIDTH3 * copy1)
+ if (copy2 > 0) {
+ System.arraycopy(slice, copy1, a4, 0, copy2)
+ advanceN(WIDTH3 * copy2)
+ }
+ case 5 =>
+ if (lenRest % WIDTH4 != 0) {
+ // lenRest is not multiple of WIDTH4, so this slice does not align, need to try lower dimensions
+ slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 4))
+ return
+ }
+ val copy1 = mmin(((WIDTH5 - lenRest) >>> BITS4) & MASK, sl)
+ val copy2 = sl - copy1
+ val destPos = (lenRest >>> BITS4) & MASK
+ System.arraycopy(slice, 0, a5, destPos, copy1)
+ advanceN(WIDTH4 * copy1)
+ if (copy2 > 0) {
+ System.arraycopy(slice, copy1, a5, 0, copy2)
+ advanceN(WIDTH4 * copy2)
+ }
+ case 6 => // note width is now LASTWIDTH
+ if (lenRest % WIDTH5 != 0) {
+ // lenRest is not multiple of WIDTH5, so this slice does not align, need to try lower dimensions
+ slice.foreach(e => addArrN(e.asInstanceOf[Array[AnyRef]], 5))
+ return
+ }
+ val copy1 = sl
+ // there is no copy2 because there can't be another a6 to copy to
+ val destPos = lenRest >>> BITS5
+ if (destPos + copy1 > LASTWIDTH)
+ throw new IllegalArgumentException("exceeding 2^31 elements")
+ System.arraycopy(slice, 0, a6, destPos, copy1)
+ advanceN(WIDTH5 * copy1)
+ }
+ }
+
+ private[this] def addVector(xs: Vector[A]): this.type = {
+ val sliceCount = xs.vectorSliceCount
+ var sliceIdx = 0
+ while(sliceIdx < sliceCount) {
+ val slice = xs.vectorSlice(sliceIdx)
+ vectorSliceDim(sliceCount, sliceIdx) match {
+ case 1 => addArr1(slice.asInstanceOf[Arr1])
+ case n if len1 == WIDTH || len1 == 0 =>
+ addArrN(slice.asInstanceOf[Array[AnyRef]], n)
+ case n => foreachRec(n-2, slice, addArr1)
+ }
+ sliceIdx += 1
+ }
+ this
+ }
+
+ override def addAll(xs: IterableOnce[A]^): this.type = xs match {
+ case v: Vector[_] =>
+ if(len1 == 0 && lenRest == 0 && !prefixIsRightAligned) initFrom(v)
+ else addVector(v.asInstanceOf[Vector[A]])
+ case _ =>
+ super.addAll(xs)
+ }
+
+ private[this] def advance(): Unit = {
+ val idx = lenRest + WIDTH
+ val xor = idx ^ lenRest
+ lenRest = idx
+ len1 = 0
+ advance1(idx, xor)
+ }
+
+ private[this] def advanceN(n: Int): Unit = if (n > 0) {
+ // assert(n % 32 == 0)
+ val idx = lenRest + n
+ val xor = idx ^ lenRest
+ lenRest = idx
+ len1 = 0
+ advance1(idx, xor)
+ }
+
+ private[this] def advance1(idx: Int, xor: Int): Unit = {
+ if (xor <= 0) { // level = 6 or something very unexpected happened
+ throw new IllegalArgumentException(s"advance1($idx, $xor): a1=$a1, a2=$a2, a3=$a3, a4=$a4, a5=$a5, a6=$a6, depth=$depth")
+ } else if (xor < WIDTH2) { // level = 1
+ if (depth <= 1) { a2 = new Array(WIDTH); a2(0) = a1; depth = 2 }
+ a1 = new Array(WIDTH)
+ a2((idx >>> BITS) & MASK) = a1
+ } else if (xor < WIDTH3) { // level = 2
+ if (depth <= 2) { a3 = new Array(WIDTH); a3(0) = a2; depth = 3 }
+ a1 = new Array(WIDTH)
+ a2 = new Array(WIDTH)
+ a2((idx >>> BITS) & MASK) = a1
+ a3((idx >>> BITS2) & MASK) = a2
+ } else if (xor < WIDTH4) { // level = 3
+ if (depth <= 3) { a4 = new Array(WIDTH); a4(0) = a3; depth = 4 }
+ a1 = new Array(WIDTH)
+ a2 = new Array(WIDTH)
+ a3 = new Array(WIDTH)
+ a2((idx >>> BITS) & MASK) = a1
+ a3((idx >>> BITS2) & MASK) = a2
+ a4((idx >>> BITS3) & MASK) = a3
+ } else if (xor < WIDTH5) { // level = 4
+ if (depth <= 4) { a5 = new Array(WIDTH); a5(0) = a4; depth = 5 }
+ a1 = new Array(WIDTH)
+ a2 = new Array(WIDTH)
+ a3 = new Array(WIDTH)
+ a4 = new Array(WIDTH)
+ a2((idx >>> BITS) & MASK) = a1
+ a3((idx >>> BITS2) & MASK) = a2
+ a4((idx >>> BITS3) & MASK) = a3
+ a5((idx >>> BITS4) & MASK) = a4
+ } else { // level = 5
+ if (depth <= 5) { a6 = new Array(LASTWIDTH); a6(0) = a5; depth = 6 }
+ a1 = new Array(WIDTH)
+ a2 = new Array(WIDTH)
+ a3 = new Array(WIDTH)
+ a4 = new Array(WIDTH)
+ a5 = new Array(WIDTH)
+ a2((idx >>> BITS) & MASK) = a1
+ a3((idx >>> BITS2) & MASK) = a2
+ a4((idx >>> BITS3) & MASK) = a3
+ a5((idx >>> BITS4) & MASK) = a4
+ a6(idx >>> BITS5) = a5
+ }
+ }
+
+ def result(): Vector[A] = {
+ if (prefixIsRightAligned) leftAlignPrefix()
+ val len = len1 + lenRest
+ val realLen = len - offset
+ if(realLen == 0) Vector.empty
+ else if(len < 0) throw new IndexOutOfBoundsException(s"Vector cannot have negative size $len")
+ else if(len <= WIDTH) {
+ new Vector1(copyIfDifferentSize(a1, realLen))
+ } else if(len <= WIDTH2) {
+ val i1 = (len-1) & MASK
+ val i2 = (len-1) >>> BITS
+ val data = copyOfRange(a2, 1, i2)
+ val prefix1 = a2(0)
+ val suffix1 = copyIfDifferentSize(a2(i2), i1+1)
+ new Vector2(prefix1, WIDTH-offset, data, suffix1, realLen)
+ } else if(len <= WIDTH3) {
+ val i1 = (len-1) & MASK
+ val i2 = ((len-1) >>> BITS) & MASK
+ val i3 = ((len-1) >>> BITS2)
+ val data = copyOfRange(a3, 1, i3)
+ val prefix2 = copyTail(a3(0))
+ val prefix1 = a3(0)(0)
+ val suffix2 = copyOf(a3(i3), i2)
+ val suffix1 = copyIfDifferentSize(a3(i3)(i2), i1+1)
+ val len1 = prefix1.length
+ val len12 = len1 + prefix2.length*WIDTH
+ new Vector3(prefix1, len1, prefix2, len12, data, suffix2, suffix1, realLen)
+ } else if(len <= WIDTH4) {
+ val i1 = (len-1) & MASK
+ val i2 = ((len-1) >>> BITS) & MASK
+ val i3 = ((len-1) >>> BITS2) & MASK
+ val i4 = ((len-1) >>> BITS3)
+ val data = copyOfRange(a4, 1, i4)
+ val prefix3 = copyTail(a4(0))
+ val prefix2 = copyTail(a4(0)(0))
+ val prefix1 = a4(0)(0)(0)
+ val suffix3 = copyOf(a4(i4), i3)
+ val suffix2 = copyOf(a4(i4)(i3), i2)
+ val suffix1 = copyIfDifferentSize(a4(i4)(i3)(i2), i1+1)
+ val len1 = prefix1.length
+ val len12 = len1 + prefix2.length*WIDTH
+ val len123 = len12 + prefix3.length*WIDTH2
+ new Vector4(prefix1, len1, prefix2, len12, prefix3, len123, data, suffix3, suffix2, suffix1, realLen)
+ } else if(len <= WIDTH5) {
+ val i1 = (len-1) & MASK
+ val i2 = ((len-1) >>> BITS) & MASK
+ val i3 = ((len-1) >>> BITS2) & MASK
+ val i4 = ((len-1) >>> BITS3) & MASK
+ val i5 = ((len-1) >>> BITS4)
+ val data = copyOfRange(a5, 1, i5)
+ val prefix4 = copyTail(a5(0))
+ val prefix3 = copyTail(a5(0)(0))
+ val prefix2 = copyTail(a5(0)(0)(0))
+ val prefix1 = a5(0)(0)(0)(0)
+ val suffix4 = copyOf(a5(i5), i4)
+ val suffix3 = copyOf(a5(i5)(i4), i3)
+ val suffix2 = copyOf(a5(i5)(i4)(i3), i2)
+ val suffix1 = copyIfDifferentSize(a5(i5)(i4)(i3)(i2), i1+1)
+ val len1 = prefix1.length
+ val len12 = len1 + prefix2.length*WIDTH
+ val len123 = len12 + prefix3.length*WIDTH2
+ val len1234 = len123 + prefix4.length*WIDTH3
+ new Vector5(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, data, suffix4, suffix3, suffix2, suffix1, realLen)
+ } else {
+ val i1 = (len-1) & MASK
+ val i2 = ((len-1) >>> BITS) & MASK
+ val i3 = ((len-1) >>> BITS2) & MASK
+ val i4 = ((len-1) >>> BITS3) & MASK
+ val i5 = ((len-1) >>> BITS4) & MASK
+ val i6 = ((len-1) >>> BITS5)
+ val data = copyOfRange(a6, 1, i6)
+ val prefix5 = copyTail(a6(0))
+ val prefix4 = copyTail(a6(0)(0))
+ val prefix3 = copyTail(a6(0)(0)(0))
+ val prefix2 = copyTail(a6(0)(0)(0)(0))
+ val prefix1 = a6(0)(0)(0)(0)(0)
+ val suffix5 = copyOf(a6(i6), i5)
+ val suffix4 = copyOf(a6(i6)(i5), i4)
+ val suffix3 = copyOf(a6(i6)(i5)(i4), i3)
+ val suffix2 = copyOf(a6(i6)(i5)(i4)(i3), i2)
+ val suffix1 = copyIfDifferentSize(a6(i6)(i5)(i4)(i3)(i2), i1+1)
+ val len1 = prefix1.length
+ val len12 = len1 + prefix2.length*WIDTH
+ val len123 = len12 + prefix3.length*WIDTH2
+ val len1234 = len123 + prefix4.length*WIDTH3
+ val len12345 = len1234 + prefix5.length*WIDTH4
+ new Vector6(prefix1, len1, prefix2, len12, prefix3, len123, prefix4, len1234, prefix5, len12345, data, suffix5, suffix4, suffix3, suffix2, suffix1, realLen)
+ }
+ }
+
+ override def toString: String =
+ s"VectorBuilder(len1=$len1, lenRest=$lenRest, offset=$offset, depth=$depth)"
+
+ private[immutable] def getData: Array[Array[_]] = Array[Array[AnyRef]](
+ a1, a2.asInstanceOf[Array[AnyRef]], a3.asInstanceOf[Array[AnyRef]], a4.asInstanceOf[Array[AnyRef]],
+ a5.asInstanceOf[Array[AnyRef]], a6.asInstanceOf[Array[AnyRef]]
+ ).asInstanceOf[Array[Array[_]]]
+}
+
+
+/** Compile-time definitions for Vector. No references to this object should appear in bytecode. */
+private[immutable] object VectorInline {
+ // compile-time numeric constants
+ final val BITS = 5
+ final val WIDTH = 1 << BITS
+ final val MASK = WIDTH - 1
+ final val BITS2 = BITS * 2
+ final val WIDTH2 = 1 << BITS2
+ final val BITS3 = BITS * 3
+ final val WIDTH3 = 1 << BITS3
+ final val BITS4 = BITS * 4
+ final val WIDTH4 = 1 << BITS4
+ final val BITS5 = BITS * 5
+ final val WIDTH5 = 1 << BITS5
+ final val LASTWIDTH = WIDTH << 1 // 1 extra bit in the last level to go up to Int.MaxValue (2^31-1) instead of 2^30:
+ final val Log2ConcatFaster = 5
+ final val AlignToFaster = 64
+
+ type Arr1 = Array[AnyRef]
+ type Arr2 = Array[Array[AnyRef]]
+ type Arr3 = Array[Array[Array[AnyRef]]]
+ type Arr4 = Array[Array[Array[Array[AnyRef]]]]
+ type Arr5 = Array[Array[Array[Array[Array[AnyRef]]]]]
+ type Arr6 = Array[Array[Array[Array[Array[Array[AnyRef]]]]]]
+
+ /** Dimension of the slice at index */
+ @inline def vectorSliceDim(count: Int, idx: Int): Int = {
+ val c = count/2
+ c+1-abs(idx-c)
+ }
+
+ @inline def copyOrUse[T <: AnyRef](a: Array[T], start: Int, end: Int): Array[T] =
+ if(start == 0 && end == a.length) a else copyOfRange[T](a, start, end)
+
+ @inline final def copyTail[T <: AnyRef](a: Array[T]): Array[T] = copyOfRange[T](a, 1, a.length)
+
+ @inline final def copyInit[T <: AnyRef](a: Array[T]): Array[T] = copyOfRange[T](a, 0, a.length-1)
+
+ @inline final def copyIfDifferentSize[T <: AnyRef](a: Array[T], len: Int): Array[T] =
+ if(a.length == len) a else copyOf[T](a, len)
+
+ @inline final def wrap1(x: Any ): Arr1 = { val a = new Arr1(1); a(0) = x.asInstanceOf[AnyRef]; a }
+ @inline final def wrap2(x: Arr1): Arr2 = { val a = new Arr2(1); a(0) = x; a }
+ @inline final def wrap3(x: Arr2): Arr3 = { val a = new Arr3(1); a(0) = x; a }
+ @inline final def wrap4(x: Arr3): Arr4 = { val a = new Arr4(1); a(0) = x; a }
+ @inline final def wrap5(x: Arr4): Arr5 = { val a = new Arr5(1); a(0) = x; a }
+
+ @inline final def copyUpdate(a1: Arr1, idx1: Int, elem: Any): Arr1 = {
+ val a1c = a1.clone()
+ a1c(idx1) = elem.asInstanceOf[AnyRef]
+ a1c
+ }
+
+ @inline final def copyUpdate(a2: Arr2, idx2: Int, idx1: Int, elem: Any): Arr2 = {
+ val a2c = a2.clone()
+ a2c(idx2) = copyUpdate(a2c(idx2), idx1, elem)
+ a2c
+ }
+
+ @inline final def copyUpdate(a3: Arr3, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr3 = {
+ val a3c = a3.clone()
+ a3c(idx3) = copyUpdate(a3c(idx3), idx2, idx1, elem)
+ a3c
+ }
+
+ @inline final def copyUpdate(a4: Arr4, idx4: Int, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr4 = {
+ val a4c = a4.clone()
+ a4c(idx4) = copyUpdate(a4c(idx4), idx3, idx2, idx1, elem)
+ a4c
+ }
+
+ @inline final def copyUpdate(a5: Arr5, idx5: Int, idx4: Int, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr5 = {
+ val a5c = a5.clone()
+ a5c(idx5) = copyUpdate(a5c(idx5), idx4, idx3, idx2, idx1, elem)
+ a5c
+ }
+
+ @inline final def copyUpdate(a6: Arr6, idx6: Int, idx5: Int, idx4: Int, idx3: Int, idx2: Int, idx1: Int, elem: Any): Arr6 = {
+ val a6c = a6.clone()
+ a6c(idx6) = copyUpdate(a6c(idx6), idx5, idx4, idx3, idx2, idx1, elem)
+ a6c
+ }
+
+ @inline final def concatArrays[T <: AnyRef](a: Array[T], b: Array[T]): Array[T] = {
+ val dest = copyOf[T](a, a.length+b.length)
+ System.arraycopy(b, 0, dest, a.length, b.length)
+ dest
+ }
+}
+
+
+/** Helper methods and constants for Vector. */
+private object VectorStatics {
+
+ final def copyAppend1(a: Arr1, elem: Any): Arr1 = {
+ val alen = a.length
+ val ac = new Arr1(alen+1)
+ System.arraycopy(a, 0, ac, 0, alen)
+ ac(alen) = elem.asInstanceOf[AnyRef]
+ ac
+ }
+
+ final def copyAppend[T <: AnyRef](a: Array[T], elem: T): Array[T] = {
+ val ac = copyOf(a, a.length+1)
+ ac(ac.length-1) = elem
+ ac
+ }
+
+ final def copyPrepend1(elem: Any, a: Arr1): Arr1 = {
+ val ac = new Arr1(a.length+1)
+ System.arraycopy(a, 0, ac, 1, a.length)
+ ac(0) = elem.asInstanceOf[AnyRef]
+ ac
+ }
+
+ final def copyPrepend[T <: AnyRef](elem: T, a: Array[T]): Array[T] = {
+ val ac = java.lang.reflect.Array.newInstance(a.getClass.getComponentType, a.length+1).asInstanceOf[Array[T]]
+ System.arraycopy(a, 0, ac, 1, a.length)
+ ac(0) = elem
+ ac
+ }
+
+ final val empty1: Arr1 = new Array(0)
+ final val empty2: Arr2 = new Array(0)
+ final val empty3: Arr3 = new Array(0)
+ final val empty4: Arr4 = new Array(0)
+ final val empty5: Arr5 = new Array(0)
+ final val empty6: Arr6 = new Array(0)
+
+ final def foreachRec[T <: AnyRef, A, U](level: Int, a: Array[T], f: A => U): Unit = {
+ var i = 0
+ val len = a.length
+ if(level == 0) {
+ while(i < len) {
+ f(a(i).asInstanceOf[A])
+ i += 1
+ }
+ } else {
+ val l = level-1
+ while(i < len) {
+ foreachRec(l, a(i).asInstanceOf[Array[AnyRef]], f)
+ i += 1
+ }
+ }
+ }
+
+ final def mapElems1[A, B](a: Arr1, f: A => B): Arr1 = {
+ var i = 0
+ while(i < a.length) {
+ val v1 = a(i).asInstanceOf[AnyRef]
+ val v2 = f(v1.asInstanceOf[A]).asInstanceOf[AnyRef]
+ if(v1 ne v2)
+ return mapElems1Rest(a, f, i, v2)
+ i += 1
+ }
+ a
+ }
+
+ final def mapElems1Rest[A, B](a: Arr1, f: A => B, at: Int, v2: AnyRef): Arr1 = {
+ val ac = new Arr1(a.length)
+ if(at > 0) System.arraycopy(a, 0, ac, 0, at)
+ ac(at) = v2
+ var i = at+1
+ while(i < a.length) {
+ ac(i) = f(a(i).asInstanceOf[A]).asInstanceOf[AnyRef]
+ i += 1
+ }
+ ac
+ }
+
+ final def mapElems[A, B, T <: AnyRef](n: Int, a: Array[T], f: A => B): Array[T] = {
+ if(n == 1)
+ mapElems1[A, B](a.asInstanceOf[Arr1], f).asInstanceOf[Array[T]]
+ else {
+ var i = 0
+ while(i < a.length) {
+ val v1 = a(i)
+ val v2 = mapElems(n-1, v1.asInstanceOf[Array[AnyRef]], f)
+ if(v1 ne v2)
+ return mapElemsRest(n, a, f, i, v2)
+ i += 1
+ }
+ a
+ }
+ }
+
+ final def mapElemsRest[A, B, T <: AnyRef](n: Int, a: Array[T], f: A => B, at: Int, v2: AnyRef): Array[T] = {
+ val ac = java.lang.reflect.Array.newInstance(a.getClass.getComponentType, a.length).asInstanceOf[Array[AnyRef]]
+ if(at > 0) System.arraycopy(a, 0, ac, 0, at)
+ ac(at) = v2
+ var i = at+1
+ while(i < a.length) {
+ ac(i) = mapElems(n-1, a(i).asInstanceOf[Array[AnyRef]], f)
+ i += 1
+ }
+ ac.asInstanceOf[Array[T]]
+ }
+
+ final def prepend1IfSpace(prefix1: Arr1, xs: IterableOnce[_]^): Arr1 = xs match {
+ case it: Iterable[_] =>
+ if(it.sizeCompare(WIDTH-prefix1.length) <= 0) {
+ it.size match {
+ case 0 => null
+ case 1 => copyPrepend(it.head.asInstanceOf[AnyRef], prefix1)
+ case s =>
+ val prefix1b = new Arr1(prefix1.length + s)
+ System.arraycopy(prefix1, 0, prefix1b, s, prefix1.length)
+ it.copyToArray(prefix1b.asInstanceOf[Array[Any]], 0)
+ prefix1b
+ }
+ } else null
+ case it =>
+ val s = it.knownSize
+ if(s > 0 && s <= WIDTH-prefix1.length) {
+ val prefix1b = new Arr1(prefix1.length + s)
+ System.arraycopy(prefix1, 0, prefix1b, s, prefix1.length)
+ it.iterator.copyToArray(prefix1b.asInstanceOf[Array[Any]], 0)
+ prefix1b
+ } else null
+ }
+
+ final def append1IfSpace(suffix1: Arr1, xs: IterableOnce[_]^): Arr1 = xs match {
+ case it: Iterable[_] =>
+ if(it.sizeCompare(WIDTH-suffix1.length) <= 0) {
+ it.size match {
+ case 0 => null
+ case 1 => copyAppend(suffix1, it.head.asInstanceOf[AnyRef])
+ case s =>
+ val suffix1b = copyOf(suffix1, suffix1.length + s)
+ it.copyToArray(suffix1b.asInstanceOf[Array[Any]], suffix1.length)
+ suffix1b
+ }
+ } else null
+ case it =>
+ val s = it.knownSize
+ if(s > 0 && s <= WIDTH-suffix1.length) {
+ val suffix1b = copyOf(suffix1, suffix1.length + s)
+ it.iterator.copyToArray(suffix1b.asInstanceOf[Array[Any]], suffix1.length)
+ suffix1b
+ } else null
+ }
+}
+
+
+private final class NewVectorIterator[A](v: Vector[A], private[this] var totalLength: Int, private[this] val sliceCount: Int) extends Iterator[A] with java.lang.Cloneable {
+
+ private[this] var a1: Arr1 = v.prefix1
+ private[this] var a2: Arr2 = _
+ private[this] var a3: Arr3 = _
+ private[this] var a4: Arr4 = _
+ private[this] var a5: Arr5 = _
+ private[this] var a6: Arr6 = _
+ private[this] var a1len = a1.length
+ private[this] var i1 = 0 // current index in a1
+ private[this] var oldPos = 0
+ private[this] var len1 = totalLength // remaining length relative to a1
+
+ private[this] var sliceIdx = 0
+ private[this] var sliceDim = 1
+ private[this] var sliceStart = 0 // absolute position
+ private[this] var sliceEnd = a1len // absolute position
+
+ //override def toString: String =
+ // s"NewVectorIterator(v=$v, totalLength=$totalLength, sliceCount=$sliceCount): a1len=$a1len, len1=$len1, i1=$i1, sliceEnd=$sliceEnd"
+
+ @inline override def knownSize = len1 - i1
+
+ @inline def hasNext: Boolean = len1 > i1
+
+ def next(): A = {
+ if(i1 == a1len) advance()
+ val r = a1(i1)
+ i1 += 1
+ r.asInstanceOf[A]
+ }
+
+ private[this] def advanceSlice(): Unit = {
+ if(!hasNext) Iterator.empty.next()
+ sliceIdx += 1
+ var slice: Array[_ <: AnyRef] = v.vectorSlice(sliceIdx)
+ while(slice.length == 0) {
+ sliceIdx += 1
+ slice = v.vectorSlice(sliceIdx)
+ }
+ sliceStart = sliceEnd
+ sliceDim = vectorSliceDim(sliceCount, sliceIdx)
+ (sliceDim: @switch) match {
+ case 1 => a1 = slice.asInstanceOf[Arr1]
+ case 2 => a2 = slice.asInstanceOf[Arr2]
+ case 3 => a3 = slice.asInstanceOf[Arr3]
+ case 4 => a4 = slice.asInstanceOf[Arr4]
+ case 5 => a5 = slice.asInstanceOf[Arr5]
+ case 6 => a6 = slice.asInstanceOf[Arr6]
+ }
+ sliceEnd = sliceStart + slice.length * (1 << (BITS*(sliceDim-1)))
+ if(sliceEnd > totalLength) sliceEnd = totalLength
+ if(sliceDim > 1) oldPos = (1 << (BITS*sliceDim))-1
+ }
+
+ private[this] def advance(): Unit = {
+ val pos = i1-len1+totalLength
+ if(pos == sliceEnd) advanceSlice()
+ if(sliceDim > 1) {
+ val io = pos - sliceStart
+ val xor = oldPos ^ io
+ advanceA(io, xor)
+ oldPos = io
+ }
+ len1 -= i1
+ a1len = mmin(a1.length, len1)
+ i1 = 0
+ }
+
+ private[this] def advanceA(io: Int, xor: Int): Unit = {
+ if(xor < WIDTH2) {
+ a1 = a2((io >>> BITS) & MASK)
+ } else if(xor < WIDTH3) {
+ a2 = a3((io >>> BITS2) & MASK)
+ a1 = a2(0)
+ } else if(xor < WIDTH4) {
+ a3 = a4((io >>> BITS3) & MASK)
+ a2 = a3(0)
+ a1 = a2(0)
+ } else if(xor < WIDTH5) {
+ a4 = a5((io >>> BITS4) & MASK)
+ a3 = a4(0)
+ a2 = a3(0)
+ a1 = a2(0)
+ } else {
+ a5 = a6(io >>> BITS5)
+ a4 = a5(0)
+ a3 = a4(0)
+ a2 = a3(0)
+ a1 = a2(0)
+ }
+ }
+
+ private[this] def setA(io: Int, xor: Int): Unit = {
+ if(xor < WIDTH2) {
+ a1 = a2((io >>> BITS) & MASK)
+ } else if(xor < WIDTH3) {
+ a2 = a3((io >>> BITS2) & MASK)
+ a1 = a2((io >>> BITS) & MASK)
+ } else if(xor < WIDTH4) {
+ a3 = a4((io >>> BITS3) & MASK)
+ a2 = a3((io >>> BITS2) & MASK)
+ a1 = a2((io >>> BITS) & MASK)
+ } else if(xor < WIDTH5) {
+ a4 = a5((io >>> BITS4) & MASK)
+ a3 = a4((io >>> BITS3) & MASK)
+ a2 = a3((io >>> BITS2) & MASK)
+ a1 = a2((io >>> BITS) & MASK)
+ } else {
+ a5 = a6(io >>> BITS5)
+ a4 = a5((io >>> BITS4) & MASK)
+ a3 = a4((io >>> BITS3) & MASK)
+ a2 = a3((io >>> BITS2) & MASK)
+ a1 = a2((io >>> BITS) & MASK)
+ }
+ }
+
+ override def drop(n: Int): Iterator[A] = {
+ if(n > 0) {
+ val oldpos = i1-len1+totalLength
+ val newpos = mmin(oldpos + n, totalLength)
+ if(newpos == totalLength) {
+ i1 = 0
+ len1 = 0
+ a1len = 0
+ } else {
+ while(newpos >= sliceEnd) advanceSlice()
+ val io = newpos - sliceStart
+ if(sliceDim > 1) {
+ val xor = oldPos ^ io
+ setA(io, xor)
+ oldPos = io
+ }
+ a1len = a1.length
+ i1 = io & MASK
+ len1 = i1 + (totalLength-newpos)
+ if(a1len > len1) a1len = len1
+ }
+ }
+ this
+ }
+
+ override def take(n: Int): Iterator[A] = {
+ if(n < knownSize) {
+ val trunc = knownSize - mmax(0, n)
+ totalLength -= trunc
+ len1 -= trunc
+ if(len1 < a1len) a1len = len1
+ if(totalLength < sliceEnd) sliceEnd = totalLength
+ }
+ this
+ }
+
+ override def slice(from: Int, until: Int): Iterator[A] = {
+ val _until =
+ if(from > 0) {
+ drop(from)
+ until - from
+ } else until
+ take(_until)
+ }
+
+ override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = {
+ val xsLen = xs.length
+ val total = IterableOnce.elemsToCopyToArray(knownSize, xsLen, start, len)
+ var copied = 0
+ val isBoxed = xs.isInstanceOf[Array[AnyRef]]
+ while(copied < total) {
+ if(i1 == a1len) advance()
+ val count = mmin(total-copied, a1.length-i1)
+ if(isBoxed) System.arraycopy(a1, i1, xs, start+copied, count)
+ else Array.copy(a1, i1, xs, start+copied, count)
+ i1 += count
+ copied += count
+ }
+ total
+ }
+
+ override def toVector: Vector[A] =
+ v.slice(i1-len1+totalLength, totalLength)
+
+ protected[immutable] def split(at: Int): NewVectorIterator[A] = {
+ val it2 = clone().asInstanceOf[NewVectorIterator[A]]
+ it2.take(at)
+ drop(at)
+ it2
+ }
+}
+
+
+private abstract class VectorStepperBase[A, Sub >: Null <: Stepper[A], Semi <: Sub](it: NewVectorIterator[A])
+ extends Stepper[A] with EfficientSplit {
+
+ protected[this] def build(it: NewVectorIterator[A]): Semi
+
+ final def hasStep: Boolean = it.hasNext
+
+ final def characteristics: Int = Spliterator.ORDERED + Spliterator.SIZED + Spliterator.SUBSIZED
+
+ final def estimateSize: Long = it.knownSize
+
+ def trySplit(): Sub = {
+ val len = it.knownSize
+ if(len > 1) build(it.split(len >>> 1))
+ else null
+ }
+
+ override final def iterator: Iterator[A] = it
+}
+
+private class AnyVectorStepper[A](it: NewVectorIterator[A])
+ extends VectorStepperBase[A, AnyStepper[A], AnyVectorStepper[A]](it) with AnyStepper[A] {
+ protected[this] def build(it: NewVectorIterator[A]) = new AnyVectorStepper(it)
+ def nextStep(): A = it.next()
+}
+
+private class DoubleVectorStepper(it: NewVectorIterator[Double])
+ extends VectorStepperBase[Double, DoubleStepper, DoubleVectorStepper](it) with DoubleStepper {
+ protected[this] def build(it: NewVectorIterator[Double]) = new DoubleVectorStepper(it)
+ def nextStep(): Double = it.next()
+}
+
+private class IntVectorStepper(it: NewVectorIterator[Int])
+ extends VectorStepperBase[Int, IntStepper, IntVectorStepper](it) with IntStepper {
+ protected[this] def build(it: NewVectorIterator[Int]) = new IntVectorStepper(it)
+ def nextStep(): Int = it.next()
+}
+
+private class LongVectorStepper(it: NewVectorIterator[Long])
+ extends VectorStepperBase[Long, LongStepper, LongVectorStepper](it) with LongStepper {
+ protected[this] def build(it: NewVectorIterator[Long]) = new LongVectorStepper(it)
+ def nextStep(): Long = it.next()
+}
+
+
+// The following definitions are needed for binary compatibility with ParVector
+private[collection] class VectorIterator[+A](_startIndex: Int, private[this] var endIndex: Int) extends AbstractIterator[A] {
+ private[immutable] var it: NewVectorIterator[A @uncheckedVariance @uncheckedCaptures] = _
+ def hasNext: Boolean = it.hasNext
+ def next(): A = it.next()
+ private[collection] def remainingElementCount: Int = it.size
+ private[collection] def remainingVector: Vector[A] = it.toVector
+}
diff --git a/tests/pos-special/stdlib/collection/immutable/VectorMap.scala b/tests/pos-special/stdlib/collection/immutable/VectorMap.scala
new file mode 100644
index 000000000000..0860a0b47f28
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/immutable/VectorMap.scala
@@ -0,0 +1,277 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package immutable
+
+import scala.annotation.tailrec
+import language.experimental.captureChecking
+import scala.annotation.unchecked.uncheckedCaptures
+
+/** This class implements immutable maps using a vector/map-based data structure, which preserves insertion order.
+ *
+ * Unlike `ListMap`, `VectorMap` has amortized effectively constant lookup at the expense
+ * of using extra memory and generally lower performance for other operations
+ *
+ * @tparam K the type of the keys contained in this vector map.
+ * @tparam V the type of the values associated with the keys in this vector map.
+ *
+ * @define coll immutable vector map
+ * @define Coll `immutable.VectorMap`
+ */
+final class VectorMap[K, +V] private (
+ private[immutable] val fields: Vector[Any],
+ private[immutable] val underlying: Map[K, (Int, V)], dropped: Int)
+ extends AbstractMap[K, V]
+ with SeqMap[K, V]
+ with StrictOptimizedMapOps[K, V, VectorMap, VectorMap[K, V]]
+ with MapFactoryDefaults[K, V, VectorMap, Iterable] {
+
+ import VectorMap._
+
+ override protected[this] def className: String = "VectorMap"
+
+ private[immutable] def this(fields: Vector[K], underlying: Map[K, (Int, V)]) = {
+ this(fields, underlying, 0)
+ }
+
+ override val size = underlying.size
+
+ override def knownSize: Int = size
+
+ override def isEmpty: Boolean = size == 0
+
+ def updated[V1 >: V](key: K, value: V1): VectorMap[K, V1] = {
+ underlying.get(key) match {
+ case Some((slot, _)) =>
+ new VectorMap(fields, underlying.updated[(Int, V1)](key, (slot, value)), dropped)
+ case None =>
+ new VectorMap(fields :+ key, underlying.updated[(Int, V1)](key, (fields.length + dropped, value)), dropped)
+ }
+ }
+
+ override def withDefault[V1 >: V](d: K -> V1): Map[K, V1] =
+ new Map.WithDefault(this, d)
+
+ override def withDefaultValue[V1 >: V](d: V1): Map[K, V1] =
+ new Map.WithDefault[K, V1](this, _ => d)
+
+ def get(key: K): Option[V] = underlying.get(key) match {
+ case Some(v) => Some(v._2)
+ case None => None
+ }
+
+ @tailrec
+ private def nextValidField(slot: Int): (Int, K) = {
+ if (slot >= fields.size) (-1, null.asInstanceOf[K])
+ else fields(slot) match {
+ case Tombstone(distance) =>
+ nextValidField(slot + distance)
+ case k =>
+ (slot, k.asInstanceOf[K])
+ }
+ }
+
+ def iterator: Iterator[(K, V)] = new AbstractIterator[(K, V)] {
+ private[this] val fieldsLength = fields.length
+ private[this] var slot = -1
+ private[this] var key: K = null.asInstanceOf[K]
+
+ private[this] def advance(): Unit = {
+ val nextSlot = slot + 1
+ if (nextSlot >= fieldsLength) {
+ slot = fieldsLength
+ key = null.asInstanceOf[K]
+ } else {
+ nextValidField(nextSlot) match {
+ case (-1, _) =>
+ slot = fieldsLength
+ key = null.asInstanceOf[K]
+ case (s, k) =>
+ slot = s
+ key = k
+ }
+ }
+ }
+
+ advance()
+
+ override def hasNext: Boolean = slot < fieldsLength
+
+ override def next(): (K, V) = {
+ if (!hasNext) throw new NoSuchElementException("next called on depleted iterator")
+ val result = (key, underlying(key)._2)
+ advance()
+ result
+ }
+ }
+
+ // No-Op overrides to allow for more efficient steppers in a minor release.
+ // Refining the return type to `S with EfficientSplit` is binary compatible.
+
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S = super.stepper(shape)
+
+ override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S = super.keyStepper(shape)
+
+ override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S = super.valueStepper(shape)
+
+
+ def removed(key: K): VectorMap[K, V] = {
+ if (isEmpty) empty
+ else {
+ var fs = fields
+ val sz = fs.size
+ underlying.get(key) match {
+ case Some(_) if size == 1 => empty
+ case Some((slot, _)) =>
+ val s = slot - dropped
+
+ // Calculate next of kin
+ val next =
+ if (s < sz - 1) fs(s + 1) match {
+ case Tombstone(d) => s + d + 1
+ case _ => s + 1
+ } else s + 1
+
+ fs = fs.updated(s, Tombstone(next - s))
+
+ // Calculate first index of preceding tombstone sequence
+ val first =
+ if (s > 0) {
+ fs(s - 1) match {
+ case Tombstone(d) if d < 0 => if (s + d >= 0) s + d else 0
+ case Tombstone(d) if d == 1 => s - 1
+ case Tombstone(d) => throw new IllegalStateException("tombstone indicate wrong position: " + d)
+ case _ => s
+ }
+ }else s
+ fs = fs.updated(first, Tombstone(next - first))
+
+ // Calculate last index of succeeding tombstone sequence
+ val last = next - 1
+ if (last != first) {
+ fs = fs.updated(last, Tombstone(first - 1 - last))
+ }
+ new VectorMap(fs, underlying - key, dropped)
+ case _ =>
+ this
+ }
+ }
+ }
+
+ override def mapFactory: MapFactory[VectorMap] = VectorMap
+
+ override def contains(key: K): Boolean = underlying.contains(key)
+
+ override def head: (K, V) = iterator.next()
+
+ override def last: (K, V) = {
+ if (isEmpty) throw new UnsupportedOperationException("empty.last")
+ val lastSlot = fields.length - 1
+ val last = fields.last match {
+ case Tombstone(d) if d < 0 => fields(lastSlot + d).asInstanceOf[K]
+ case Tombstone(d) if d == 1 => fields(lastSlot - 1).asInstanceOf[K]
+ case Tombstone(d) => throw new IllegalStateException("tombstone indicate wrong position: " + d)
+ case k => k.asInstanceOf[K]
+ }
+ (last, underlying(last)._2)
+ }
+
+ override def lastOption: Option[(K, V)] = {
+ if (isEmpty) None
+ else Some(last)
+ }
+
+ override def tail: VectorMap[K, V] = {
+ if (isEmpty) throw new UnsupportedOperationException("empty.tail")
+ val (slot, key) = nextValidField(0)
+ new VectorMap(fields.drop(slot + 1), underlying - key, dropped + slot + 1)
+ }
+
+ override def init: VectorMap[K, V] = {
+ if (isEmpty) throw new UnsupportedOperationException("empty.init")
+ val lastSlot = fields.size - 1
+ val (slot, key) = fields.last match {
+ case Tombstone(d) if d < 0 => (lastSlot + d, fields(lastSlot + d).asInstanceOf[K])
+ case Tombstone(d) if d == 1 => (lastSlot - 1, fields(lastSlot - 1).asInstanceOf[K])
+ case Tombstone(d) => throw new IllegalStateException("tombstone indicate wrong position: " + d)
+ case k => (lastSlot, k.asInstanceOf[K])
+ }
+ new VectorMap(fields.dropRight(fields.size - slot), underlying - key, dropped)
+ }
+
+ override def keys: Vector[K] = keysIterator.toVector
+
+ override def values: Iterable[V] = new Iterable[V] with IterableFactoryDefaults[V, Iterable] {
+ override def iterator: Iterator[V] = keysIterator.map(underlying(_)._2)
+ }
+}
+
+object VectorMap extends MapFactory[VectorMap] {
+ //Class to mark deleted slots in 'fields'.
+ //When one or more consecutive slots are deleted, the 'distance' of the first 'Tombstone'
+ // represents the distance to the location of the next undeleted slot (or the last slot in 'fields' +1 if it does not exist).
+ //When two or more consecutive slots are deleted, the 'distance' of the trailing 'Tombstone'
+ // represents the distance to the location of the previous undeleted slot ( or -1 if it does not exist) multiplied by -1.
+ //For other deleted slots, it simply indicates that they have been deleted.
+ private[VectorMap] final case class Tombstone(distance: Int)
+
+ private[this] final val EmptyMap: VectorMap[Nothing, Nothing] =
+ new VectorMap[Nothing, Nothing](Vector.empty[Nothing], HashMap.empty[Nothing, (Int, Nothing)])
+
+ def empty[K, V]: VectorMap[K, V] = EmptyMap.asInstanceOf[VectorMap[K, V]]
+
+ def from[K, V](it: collection.IterableOnce[(K, V)]^): VectorMap[K, V] =
+ it match {
+ case vm: VectorMap[K, V] => vm
+ case _ => (newBuilder[K, V] ++= it).result()
+ }
+
+ def newBuilder[K, V]: mutable.Builder[(K, V), VectorMap[K, V]] = new VectorMapBuilder[K, V]
+}
+
+private[immutable] final class VectorMapBuilder[K, V] extends mutable.Builder[(K, V), VectorMap[K, V]] {
+ private[this] val vectorBuilder = new VectorBuilder[K]
+ private[this] val mapBuilder = new MapBuilderImpl[K, (Int, V)]
+ private[this] var aliased: VectorMap[K, V] @uncheckedCaptures = _ // OK since VectorMapBuilder is private
+
+ override def clear(): Unit = {
+ vectorBuilder.clear()
+ mapBuilder.clear()
+ aliased = null
+ }
+
+ override def result(): VectorMap[K, V] = {
+ if (aliased eq null) {
+ aliased = new VectorMap(vectorBuilder.result(), mapBuilder.result())
+ }
+ aliased
+ }
+ def addOne(key: K, value: V): this.type = {
+ if (aliased ne null) {
+ aliased = aliased.updated(key, value)
+ } else {
+ mapBuilder.getOrElse(key, null) match {
+ case (slot, _) =>
+ mapBuilder.addOne(key, (slot, value))
+ case null =>
+ val vectorSize = vectorBuilder.size
+ vectorBuilder.addOne(key)
+ mapBuilder.addOne(key, (vectorSize, value))
+ }
+ }
+ this
+ }
+
+ override def addOne(elem: (K, V)): this.type = addOne(elem._1, elem._2)
+}
diff --git a/tests/pos-special/stdlib/collection/immutable/WrappedString.scala b/tests/pos-special/stdlib/collection/immutable/WrappedString.scala
new file mode 100644
index 000000000000..47fe769c81ef
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/immutable/WrappedString.scala
@@ -0,0 +1,142 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+package immutable
+
+import scala.Predef.{wrapString => _, assert}
+import scala.collection.Stepper.EfficientSplit
+import scala.collection.convert.impl.CharStringStepper
+import scala.collection.mutable.{Builder, StringBuilder}
+import language.experimental.captureChecking
+
+/**
+ * This class serves as a wrapper augmenting `String`s with all the operations
+ * found in indexed sequences.
+ *
+ * The difference between this class and `StringOps` is that calling transformer
+ * methods such as `filter` and `map` will yield an object of type `WrappedString`
+ * rather than a `String`.
+ *
+ * @param self a string contained within this wrapped string
+ *
+ * @define Coll `WrappedString`
+ * @define coll wrapped string
+ */
+@SerialVersionUID(3L)
+final class WrappedString(private val self: String) extends AbstractSeq[Char] with IndexedSeq[Char]
+ with IndexedSeqOps[Char, IndexedSeq, WrappedString]
+ with Serializable
+ with Pure {
+
+ def apply(i: Int): Char = self.charAt(i)
+
+ override protected def fromSpecific(coll: scala.collection.IterableOnce[Char]^): WrappedString = WrappedString.fromSpecific(coll)
+ override protected def newSpecificBuilder: Builder[Char, WrappedString] = WrappedString.newBuilder
+ override def empty: WrappedString = WrappedString.empty
+
+ override def slice(from: Int, until: Int): WrappedString = {
+ val start = if (from < 0) 0 else from
+ if (until <= start || start >= self.length)
+ return WrappedString.empty
+
+ val end = if (until > length) length else until
+ new WrappedString(self.substring(start, end))
+ }
+ override def length = self.length
+ override def toString = self
+ override def view: StringView = new StringView(self)
+
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Char, S]): S with EfficientSplit = {
+ val st = new CharStringStepper(self, 0, self.length)
+ val r =
+ if (shape.shape == StepperShape.CharShape) st
+ else {
+ assert(shape.shape == StepperShape.ReferenceShape, s"unexpected StepperShape: $shape")
+ AnyStepper.ofParIntStepper(st)
+ }
+ r.asInstanceOf[S with EfficientSplit]
+ }
+
+ override def startsWith[B >: Char](that: IterableOnce[B]^, offset: Int = 0): Boolean =
+ that match {
+ case s: WrappedString => self.startsWith(s.self, offset)
+ case _ => super.startsWith(that, offset)
+ }
+
+ override def endsWith[B >: Char](that: collection.Iterable[B]^): Boolean =
+ that match {
+ case s: WrappedString => self.endsWith(s.self)
+ case _ => super.endsWith(that)
+ }
+
+ override def indexOf[B >: Char](elem: B, from: Int = 0): Int = elem match {
+ case c: Char => self.indexOf(c, from)
+ case _ => super.indexOf(elem, from)
+ }
+
+ override def lastIndexOf[B >: Char](elem: B, end: Int = length - 1): Int =
+ elem match {
+ case c: Char => self.lastIndexOf(c, end)
+ case _ => super.lastIndexOf(elem, end)
+ }
+
+ override def copyToArray[sealed B >: Char](xs: Array[B], start: Int, len: Int): Int =
+ (xs: Any) match {
+ case chs: Array[Char] =>
+ val copied = IterableOnce.elemsToCopyToArray(length, chs.length, start, len)
+ self.getChars(0, copied, chs, start)
+ copied
+ case _ => super.copyToArray(xs, start, len)
+ }
+
+ override def appendedAll[B >: Char](suffix: IterableOnce[B]^): IndexedSeq[B] =
+ suffix match {
+ case s: WrappedString => new WrappedString(self concat s.self)
+ case _ => super.appendedAll(suffix)
+ }
+
+ override def sameElements[B >: Char](o: IterableOnce[B]^) = o match {
+ case s: WrappedString => self == s.self
+ case _ => super.sameElements(o)
+ }
+
+ override protected[this] def className = "WrappedString"
+
+ override protected final def applyPreferredMaxLength: Int = Int.MaxValue
+ override def equals(other: Any): Boolean = other match {
+ case that: WrappedString =>
+ this.self == that.self
+ case _ =>
+ super.equals(other)
+ }
+}
+
+/** A companion object for wrapped strings.
+ */
+@SerialVersionUID(3L)
+object WrappedString extends SpecificIterableFactory[Char, WrappedString] {
+ def fromSpecific(it: IterableOnce[Char]^): WrappedString = {
+ val b = newBuilder
+ val s = it.knownSize
+ if(s >= 0) b.sizeHint(s)
+ b ++= it
+ b.result()
+ }
+ val empty: WrappedString = new WrappedString("")
+ def newBuilder: Builder[Char, WrappedString] =
+ new StringBuilder().mapResult(x => new WrappedString(x))
+
+ implicit class UnwrapOp(private val value: WrappedString) extends AnyVal {
+ def unwrap: String = value.self
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/immutable/package.scala b/tests/pos-special/stdlib/collection/immutable/package.scala
new file mode 100644
index 000000000000..985ef22859be
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/immutable/package.scala
@@ -0,0 +1,29 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+import language.experimental.captureChecking
+
+package object immutable {
+ type StringOps = scala.collection.StringOps
+ val StringOps = scala.collection.StringOps
+ type StringView = scala.collection.StringView
+ val StringView = scala.collection.StringView
+
+ @deprecated("Use Iterable instead of Traversable", "2.13.0")
+ type Traversable[+X] = Iterable[X]
+ @deprecated("Use Iterable instead of Traversable", "2.13.0")
+ val Traversable = Iterable
+
+ @deprecated("Use Map instead of DefaultMap", "2.13.0")
+ type DefaultMap[K, +V] = scala.collection.immutable.Map[K, V]
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala b/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala
new file mode 100644
index 000000000000..a6413649e219
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/AnyRefMap.scala
@@ -0,0 +1,603 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package mutable
+
+import scala.annotation.nowarn
+import scala.collection.generic.DefaultSerializationProxy
+import scala.language.implicitConversions
+import language.experimental.captureChecking
+
+
+/** This class implements mutable maps with `AnyRef` keys based on a hash table with open addressing.
+ *
+ * Basic map operations on single entries, including `contains` and `get`,
+ * are typically significantly faster with `AnyRefMap` than [[HashMap]].
+ * Note that numbers and characters are not handled specially in AnyRefMap;
+ * only plain `equals` and `hashCode` are used in comparisons.
+ *
+ * Methods that traverse or regenerate the map, including `foreach` and `map`,
+ * are not in general faster than with `HashMap`. The methods `foreachKey`,
+ * `foreachValue`, `mapValuesNow`, and `transformValues` are, however, faster
+ * than alternative ways to achieve the same functionality.
+ *
+ * Maps with open addressing may become less efficient at lookup after
+ * repeated addition/removal of elements. Although `AnyRefMap` makes a
+ * decent attempt to remain efficient regardless, calling `repack`
+ * on a map that will no longer have elements removed but will be
+ * used heavily may save both time and storage space.
+ *
+ * This map is not intended to contain more than 2^29^ entries (approximately
+ * 500 million). The maximum capacity is 2^30^, but performance will degrade
+ * rapidly as 2^30^ is approached.
+ *
+ */
+class AnyRefMap[K <: AnyRef, sealed V] private[collection] (defaultEntry: K -> V, initialBufferSize: Int, initBlank: Boolean)
+ extends AbstractMap[K, V]
+ with MapOps[K, V, Map, AnyRefMap[K, V]]
+ with StrictOptimizedIterableOps[(K, V), Iterable, AnyRefMap[K, V]]
+ with Serializable {
+
+ import AnyRefMap._
+ def this() = this(AnyRefMap.exceptionDefault, 16, true)
+
+ /** Creates a new `AnyRefMap` that returns default values according to a supplied key-value mapping. */
+ def this(defaultEntry: K -> V) = this(defaultEntry, 16, true)
+
+ /** Creates a new `AnyRefMap` with an initial buffer of specified size.
+ *
+ * An `AnyRefMap` can typically contain half as many elements as its buffer size
+ * before it requires resizing.
+ */
+ def this(initialBufferSize: Int) = this(AnyRefMap.exceptionDefault, initialBufferSize, true)
+
+ /** Creates a new `AnyRefMap` with specified default values and initial buffer size. */
+ def this(defaultEntry: K -> V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true)
+
+ private[this] var mask = 0
+ private[this] var _size = 0
+ private[this] var _vacant = 0
+ private[this] var _hashes: Array[Int] = null
+ private[this] var _keys: Array[AnyRef] = null
+ private[this] var _values: Array[AnyRef] = null
+
+ if (initBlank) defaultInitialize(initialBufferSize)
+
+ private[this] def defaultInitialize(n: Int): Unit = {
+ mask =
+ if (n<0) 0x7
+ else (((1 << (32 - java.lang.Integer.numberOfLeadingZeros(n-1))) - 1) & 0x3FFFFFFF) | 0x7
+ _hashes = new Array[Int](mask+1)
+ _keys = new Array[AnyRef](mask+1)
+ _values = new Array[AnyRef](mask+1)
+ }
+
+ private[collection] def initializeTo(
+ m: Int, sz: Int, vc: Int, hz: Array[Int], kz: Array[AnyRef], vz: Array[AnyRef]
+ ): Unit = {
+ mask = m; _size = sz; _vacant = vc; _hashes = hz; _keys = kz; _values = vz
+ }
+
+ override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]^): AnyRefMap[K,V] = {
+ var sz = coll.knownSize
+ if(sz < 0) sz = 4
+ val arm = new AnyRefMap[K, V](sz * 2)
+ coll.iterator.foreach{ case (k,v) => arm(k) = v }
+ if (arm.size < (sz>>3)) arm.repack()
+ arm
+ }
+ override protected def newSpecificBuilder: Builder[(K, V), AnyRefMap[K,V]] = new AnyRefMapBuilder
+
+ override def size: Int = _size
+ override def knownSize: Int = size
+ override def isEmpty: Boolean = _size == 0
+ override def empty: AnyRefMap[K,V] = new AnyRefMap(defaultEntry)
+
+ private def imbalanced: Boolean =
+ (_size + _vacant) > 0.5*mask || _vacant > _size
+
+ private def hashOf(key: K): Int = {
+ // Note: this method must not return 0 or Int.MinValue, as these indicate no element
+ if (key eq null) 0x41081989
+ else {
+ val h = key.hashCode
+ // Part of the MurmurHash3 32 bit finalizer
+ val i = (h ^ (h >>> 16)) * 0x85EBCA6B
+ val j = (i ^ (i >>> 13)) & 0x7FFFFFFF
+ if (j==0) 0x41081989 else j
+ }
+ }
+
+ private def seekEntry(h: Int, k: AnyRef): Int = {
+ var e = h & mask
+ var x = 0
+ var g = 0
+ val hashes = _hashes
+ val keys = _keys
+ while ({ g = hashes(e); g != 0}) {
+ if (g == h && { val q = keys(e); (q eq k) || ((q ne null) && (q equals k)) }) return e
+ x += 1
+ e = (e + 2*(x+1)*x - 3) & mask
+ }
+ e | MissingBit
+ }
+
+ @`inline` private def seekEntryOrOpen(h: Int, k: AnyRef): Int = {
+ var e = h & mask
+ var x = 0
+ var g = 0
+ var o = -1
+ while ({ g = _hashes(e); g != 0}) {
+ if (g == h && { val q = _keys(e); (q eq k) || ((q ne null) && (q equals k)) }) return e
+ else if (o == -1 && g+g == 0) o = e
+ x += 1
+ e = (e + 2*(x+1)*x - 3) & mask
+ }
+ if (o >= 0) o | MissVacant else e | MissingBit
+ }
+
+ override def contains(key: K): Boolean = seekEntry(hashOf(key), key) >= 0
+
+ override def get(key: K): Option[V] = {
+ val i = seekEntry(hashOf(key), key)
+ if (i < 0) None else Some(_values(i).asInstanceOf[V])
+ }
+
+ override def getOrElse[V1 >: V](key: K, default: => V1): V1 = {
+ val i = seekEntry(hashOf(key), key)
+ if (i < 0) default else _values(i).asInstanceOf[V]
+ }
+
+ override def getOrElseUpdate(key: K, defaultValue: => V): V = {
+ val h = hashOf(key)
+ var i = seekEntryOrOpen(h, key)
+ if (i < 0) {
+ // It is possible that the default value computation was side-effecting
+ // Our hash table may have resized or even contain what we want now
+ // (but if it does, we'll replace it)
+ val value = {
+ val oh = _hashes
+ val ans = defaultValue
+ if (oh ne _hashes) {
+ i = seekEntryOrOpen(h, key)
+ if (i >= 0) _size -= 1
+ }
+ ans
+ }
+ _size += 1
+ val j = i & IndexMask
+ _hashes(j) = h
+ _keys(j) = key.asInstanceOf[AnyRef]
+ _values(j) = value.asInstanceOf[AnyRef]
+ if ((i & VacantBit) != 0) _vacant -= 1
+ else if (imbalanced) repack()
+ value
+ }
+ else _values(i).asInstanceOf[V]
+ }
+
+ /** Retrieves the value associated with a key, or the default for that type if none exists
+ * (null for AnyRef, 0 for floats and integers).
+ *
+ * Note: this is the fastest way to retrieve a value that may or
+ * may not exist, if the default null/zero is acceptable. For key/value
+ * pairs that do exist, `apply` (i.e. `map(key)`) is equally fast.
+ */
+ def getOrNull(key: K): V = {
+ val i = seekEntry(hashOf(key), key)
+ (if (i < 0) null else _values(i)).asInstanceOf[V]
+ }
+
+ /** Retrieves the value associated with a key.
+ * If the key does not exist in the map, the `defaultEntry` for that key
+ * will be returned instead; an exception will be thrown if no
+ * `defaultEntry` was supplied.
+ */
+ override def apply(key: K): V = {
+ val i = seekEntry(hashOf(key), key)
+ if (i < 0) defaultEntry(key) else _values(i).asInstanceOf[V]
+ }
+
+ /** Defers to defaultEntry to find a default value for the key. Throws an
+ * exception if no other default behavior was specified.
+ */
+ override def default(key: K): V = defaultEntry(key)
+
+ private def repack(newMask: Int): Unit = {
+ val oh = _hashes
+ val ok = _keys
+ val ov = _values
+ mask = newMask
+ _hashes = new Array[Int](mask+1)
+ _keys = new Array[AnyRef](mask+1)
+ _values = new Array[AnyRef](mask+1)
+ _vacant = 0
+ var i = 0
+ while (i < oh.length) {
+ val h = oh(i)
+ if (h+h != 0) {
+ var e = h & mask
+ var x = 0
+ while (_hashes(e) != 0) { x += 1; e = (e + 2*(x+1)*x - 3) & mask }
+ _hashes(e) = h
+ _keys(e) = ok(i)
+ _values(e) = ov(i)
+ }
+ i += 1
+ }
+ }
+
+ /** Repacks the contents of this `AnyRefMap` for maximum efficiency of lookup.
+ *
+ * For maps that undergo a complex creation process with both addition and
+ * removal of keys, and then are used heavily with no further removal of
+ * elements, calling `repack` after the end of the creation can result in
+ * improved performance. Repacking takes time proportional to the number
+ * of entries in the map.
+ */
+ def repack(): Unit = {
+ var m = mask
+ if (_size + _vacant >= 0.5*mask && !(_vacant > 0.2*mask)) m = ((m << 1) + 1) & IndexMask
+ while (m > 8 && 8*_size < m) m = m >>> 1
+ repack(m)
+ }
+
+ override def put(key: K, value: V): Option[V] = {
+ val h = hashOf(key)
+ val i = seekEntryOrOpen(h, key)
+ if (i < 0) {
+ val j = i & IndexMask
+ _hashes(j) = h
+ _keys(j) = key
+ _values(j) = value.asInstanceOf[AnyRef]
+ _size += 1
+ if ((i & VacantBit) != 0) _vacant -= 1
+ else if (imbalanced) repack()
+ None
+ }
+ else {
+ val ans = Some(_values(i).asInstanceOf[V])
+ _hashes(i) = h
+ _values(i) = value.asInstanceOf[AnyRef]
+ ans
+ }
+ }
+
+ /** Updates the map to include a new key-value pair.
+ *
+ * This is the fastest way to add an entry to an `AnyRefMap`.
+ */
+ override def update(key: K, value: V): Unit = {
+ val h = hashOf(key)
+ val i = seekEntryOrOpen(h, key)
+ if (i < 0) {
+ val j = i & IndexMask
+ _hashes(j) = h
+ _keys(j) = key
+ _values(j) = value.asInstanceOf[AnyRef]
+ _size += 1
+ if ((i & VacantBit) != 0) _vacant -= 1
+ else if (imbalanced) repack()
+ }
+ else {
+ _hashes(i) = h
+ _values(i) = value.asInstanceOf[AnyRef]
+ }
+ }
+
+ /** Adds a new key/value pair to this map and returns the map. */
+ @deprecated("Use `addOne` or `update` instead; infix operations with an operand of multiple args will be deprecated", "2.13.3")
+ def +=(key: K, value: V): this.type = { update(key, value); this }
+
+ /** Adds a new key/value pair to this map and returns the map. */
+ @inline final def addOne(key: K, value: V): this.type = { update(key, value); this }
+
+ @inline override final def addOne(kv: (K, V)): this.type = { update(kv._1, kv._2); this }
+
+ def subtractOne(key: K): this.type = {
+ val i = seekEntry(hashOf(key), key)
+ if (i >= 0) {
+ _size -= 1
+ _vacant += 1
+ _hashes(i) = Int.MinValue
+ _keys(i) = null
+ _values(i) = null
+ }
+ this
+ }
+
+ def iterator: Iterator[(K, V)] = new AnyRefMapIterator[(K, V)] {
+ protected def nextResult(k: K, v: V) = (k, v)
+ }
+ override def keysIterator: Iterator[K] = new AnyRefMapIterator[K] {
+ protected def nextResult(k: K, v: V) = k
+ }
+ override def valuesIterator: Iterator[V] = new AnyRefMapIterator[V] {
+ protected def nextResult(k: K, v: V) = v
+ }
+
+ private abstract class AnyRefMapIterator[A] extends AbstractIterator[A] {
+ private[this] val hz = _hashes
+ private[this] val kz = _keys
+ private[this] val vz = _values
+
+ private[this] var index = 0
+
+ def hasNext: Boolean = index= hz.length) return false
+ h = hz(index)
+ }
+ true
+ }
+
+ def next(): A = {
+ if (hasNext) {
+ val ans = nextResult(kz(index).asInstanceOf[K], vz(index).asInstanceOf[V])
+ index += 1
+ ans
+ }
+ else throw new NoSuchElementException("next")
+ }
+
+ protected def nextResult(k: K, v: V): A
+ }
+
+
+ override def foreach[U](f: ((K,V)) => U): Unit = {
+ var i = 0
+ var e = _size
+ while (e > 0) {
+ while(i < _hashes.length && { val h = _hashes(i); h+h == 0 && i < _hashes.length}) i += 1
+ if (i < _hashes.length) {
+ f((_keys(i).asInstanceOf[K], _values(i).asInstanceOf[V]))
+ i += 1
+ e -= 1
+ }
+ else return
+ }
+ }
+
+ override def foreachEntry[U](f: (K,V) => U): Unit = {
+ var i = 0
+ var e = _size
+ while (e > 0) {
+ while(i < _hashes.length && { val h = _hashes(i); h+h == 0 && i < _hashes.length}) i += 1
+ if (i < _hashes.length) {
+ f(_keys(i).asInstanceOf[K], _values(i).asInstanceOf[V])
+ i += 1
+ e -= 1
+ }
+ else return
+ }
+ }
+
+ override def clone(): AnyRefMap[K, V] = {
+ val hz = java.util.Arrays.copyOf(_hashes, _hashes.length)
+ val kz = java.util.Arrays.copyOf(_keys, _keys.length)
+ val vz = java.util.Arrays.copyOf(_values, _values.length)
+ val arm = new AnyRefMap[K, V](defaultEntry, 1, false)
+ arm.initializeTo(mask, _size, _vacant, hz, kz, vz)
+ arm
+ }
+
+ @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0")
+ override def + [sealed V1 >: V](kv: (K, V1)): AnyRefMap[K, V1] = AnyRefMap.from(new View.Appended(this, kv))
+
+ @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0")
+ override def + [sealed V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): AnyRefMap[K, V1] = {
+ val m = this + elem1 + elem2
+ if(elems.isEmpty) m else m.concat(elems)
+ }
+
+ override def concat[sealed V2 >: V](xs: scala.collection.IterableOnce[(K, V2)]^): AnyRefMap[K, V2] = {
+ val arm = clone().asInstanceOf[AnyRefMap[K, V2]]
+ xs.iterator.foreach(kv => arm += kv)
+ arm
+ }
+
+ override def ++[sealed V2 >: V](xs: scala.collection.IterableOnce[(K, V2)]^): AnyRefMap[K, V2] = concat(xs)
+
+ @deprecated("Use m.clone().addOne(k,v) instead of m.updated(k, v)", "2.13.0")
+ override def updated[sealed V1 >: V](key: K, value: V1): AnyRefMap[K, V1] =
+ clone().asInstanceOf[AnyRefMap[K, V1]].addOne(key, value)
+
+ private[this] def foreachElement[A,B](elems: Array[AnyRef], f: A => B): Unit = {
+ var i,j = 0
+ while (i < _hashes.length & j < _size) {
+ val h = _hashes(i)
+ if (h+h != 0) {
+ j += 1
+ f(elems(i).asInstanceOf[A])
+ }
+ i += 1
+ }
+ }
+
+ /** Applies a function to all keys of this map. */
+ def foreachKey[A](f: K => A): Unit = foreachElement[K,A](_keys, f)
+
+ /** Applies a function to all values of this map. */
+ def foreachValue[A](f: V => A): Unit = foreachElement[V,A](_values, f)
+
+ /** Creates a new `AnyRefMap` with different values.
+ * Unlike `mapValues`, this method generates a new
+ * collection immediately.
+ */
+ def mapValuesNow[sealed V1](f: V => V1): AnyRefMap[K, V1] = {
+ val arm = new AnyRefMap[K,V1](AnyRefMap.exceptionDefault, 1, false)
+ val hz = java.util.Arrays.copyOf(_hashes, _hashes.length)
+ val kz = java.util.Arrays.copyOf(_keys, _keys.length)
+ val vz = new Array[AnyRef](_values.length)
+ var i,j = 0
+ while (i < _hashes.length & j < _size) {
+ val h = _hashes(i)
+ if (h+h != 0) {
+ j += 1
+ vz(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef]
+ }
+ i += 1
+ }
+ arm.initializeTo(mask, _size, _vacant, hz, kz, vz)
+ arm
+ }
+
+ /** Applies a transformation function to all values stored in this map.
+ * Note: the default, if any, is not transformed.
+ */
+ @deprecated("Use transformValuesInPlace instead of transformValues", "2.13.0")
+ @`inline` final def transformValues(f: V => V): this.type = transformValuesInPlace(f)
+
+ /** Applies a transformation function to all values stored in this map.
+ * Note: the default, if any, is not transformed.
+ */
+ def transformValuesInPlace(f: V => V): this.type = {
+ var i,j = 0
+ while (i < _hashes.length & j < _size) {
+ val h = _hashes(i)
+ if (h+h != 0) {
+ j += 1
+ _values(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef]
+ }
+ i += 1
+ }
+ this
+ }
+
+ // The implicit dummy parameter is necessary to distinguish these methods from the base methods they overload (not override)
+ def map[K2 <: AnyRef, sealed V2](f: ((K, V)) => (K2, V2))(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] =
+ AnyRefMap.from(new View.Map(this, f))
+ def flatMap[K2 <: AnyRef, sealed V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] =
+ AnyRefMap.from(new View.FlatMap(this, f))
+ def collect[K2 <: AnyRef, sealed V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] =
+ strictOptimizedCollect(AnyRefMap.newBuilder[K2, V2], pf)
+
+ override def clear(): Unit = {
+ import java.util.Arrays.fill
+ fill(_keys, null)
+ fill(_values, null)
+ fill(_hashes, 0)
+ _size = 0
+ _vacant = 0
+ }
+
+ protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(AnyRefMap.toFactory[K, V](AnyRefMap), this)
+
+ @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""")
+ override protected[this] def stringPrefix = "AnyRefMap"
+}
+
+object AnyRefMap {
+ private final val IndexMask = 0x3FFFFFFF
+ private final val MissingBit = 0x80000000
+ private final val VacantBit = 0x40000000
+ private final val MissVacant = 0xC0000000
+
+ private class ExceptionDefault extends (Any -> Nothing) with Serializable {
+ def apply(k: Any): Nothing = throw new NoSuchElementException(if (k == null) "(null)" else k.toString)
+ }
+ private val exceptionDefault = new ExceptionDefault
+
+ /** A builder for instances of `AnyRefMap`.
+ *
+ * This builder can be reused to create multiple instances.
+ */
+ final class AnyRefMapBuilder[K <: AnyRef, sealed V] extends ReusableBuilder[(K, V), AnyRefMap[K, V]] {
+ private[collection] var elems: AnyRefMap[K, V] = new AnyRefMap[K, V]
+ def addOne(entry: (K, V)): this.type = {
+ elems += entry
+ this
+ }
+ def clear(): Unit = elems = new AnyRefMap[K, V]
+ def result(): AnyRefMap[K, V] = elems
+ override def knownSize: Int = elems.knownSize
+ }
+
+ /** Creates a new `AnyRefMap` with zero or more key/value pairs. */
+ def apply[K <: AnyRef, sealed V](elems: (K, V)*): AnyRefMap[K, V] = buildFromIterableOnce(elems)
+
+ def newBuilder[K <: AnyRef, sealed V]: ReusableBuilder[(K, V), AnyRefMap[K, V]] = new AnyRefMapBuilder[K, V]
+
+ private def buildFromIterableOnce[K <: AnyRef, sealed V](elems: IterableOnce[(K, V)]^): AnyRefMap[K, V] = {
+ var sz = elems.knownSize
+ if(sz < 0) sz = 4
+ val arm = new AnyRefMap[K, V](sz * 2)
+ elems.iterator.foreach{ case (k,v) => arm(k) = v }
+ if (arm.size < (sz>>3)) arm.repack()
+ arm
+ }
+
+ /** Creates a new empty `AnyRefMap`. */
+ def empty[K <: AnyRef, sealed V]: AnyRefMap[K, V] = new AnyRefMap[K, V]
+
+ /** Creates a new empty `AnyRefMap` with the supplied default */
+ def withDefault[K <: AnyRef, sealed V](default: K -> V): AnyRefMap[K, V] = new AnyRefMap[K, V](default)
+
+ /** Creates a new `AnyRefMap` from an existing source collection. A source collection
+ * which is already an `AnyRefMap` gets cloned.
+ *
+ * @param source Source collection
+ * @tparam K the type of the keys
+ * @tparam V the type of the values
+ * @return a new `AnyRefMap` with the elements of `source`
+ */
+ def from[K <: AnyRef, sealed V](source: IterableOnce[(K, V)]^): AnyRefMap[K, V] = source match {
+ case source: AnyRefMap[_, _] => source.clone().asInstanceOf[AnyRefMap[K, V]]
+ case _ => buildFromIterableOnce(source)
+ }
+
+ /** Creates a new `AnyRefMap` from arrays of keys and values.
+ * Equivalent to but more efficient than `AnyRefMap((keys zip values): _*)`.
+ */
+ def fromZip[K <: AnyRef, sealed V](keys: Array[K], values: Array[V]): AnyRefMap[K, V] = {
+ val sz = math.min(keys.length, values.length)
+ val arm = new AnyRefMap[K, V](sz * 2)
+ var i = 0
+ while (i < sz) { arm(keys(i)) = values(i); i += 1 }
+ if (arm.size < (sz>>3)) arm.repack()
+ arm
+ }
+
+ /** Creates a new `AnyRefMap` from keys and values.
+ * Equivalent to but more efficient than `AnyRefMap((keys zip values): _*)`.
+ */
+ def fromZip[K <: AnyRef, sealed V](keys: Iterable[K]^, values: Iterable[V]^): AnyRefMap[K, V] = {
+ val sz = math.min(keys.size, values.size)
+ val arm = new AnyRefMap[K, V](sz * 2)
+ val ki = keys.iterator
+ val vi = values.iterator
+ while (ki.hasNext && vi.hasNext) arm(ki.next()) = vi.next()
+ if (arm.size < (sz >> 3)) arm.repack()
+ arm
+ }
+
+ implicit def toFactory[K <: AnyRef, sealed V](dummy: AnyRefMap.type): Factory[(K, V), AnyRefMap[K, V]] = ToFactory.asInstanceOf[Factory[(K, V), AnyRefMap[K, V]]]
+
+ @SerialVersionUID(3L)
+ private[this] object ToFactory extends Factory[(AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] with Serializable {
+ def fromSpecific(it: IterableOnce[(AnyRef, AnyRef)]^): AnyRefMap[AnyRef, AnyRef] = AnyRefMap.from[AnyRef, AnyRef](it)
+ def newBuilder: Builder[(AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] = AnyRefMap.newBuilder[AnyRef, AnyRef]
+ }
+
+ implicit def toBuildFrom[K <: AnyRef, sealed V](factory: AnyRefMap.type): BuildFrom[Any, (K, V), AnyRefMap[K, V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (K, V), AnyRefMap[K, V]]]
+ private[this] object ToBuildFrom extends BuildFrom[Any, (AnyRef, AnyRef), AnyRefMap[AnyRef, AnyRef]] {
+ def fromSpecific(from: Any)(it: IterableOnce[(AnyRef, AnyRef)]^) = AnyRefMap.from(it)
+ def newBuilder(from: Any) = AnyRefMap.newBuilder[AnyRef, AnyRef]
+ }
+
+ implicit def iterableFactory[K <: AnyRef, sealed V]: Factory[(K, V), AnyRefMap[K, V]] = toFactory[K, V](this)
+ implicit def buildFromAnyRefMap[K <: AnyRef, sealed V]: BuildFrom[AnyRefMap[_, _], (K, V), AnyRefMap[K, V]] = toBuildFrom(this)
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala b/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala
new file mode 100644
index 000000000000..8fa1e6edd566
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/ArrayBuffer.scala
@@ -0,0 +1,406 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package mutable
+
+import java.util.Arrays
+
+import scala.annotation.nowarn
+import scala.annotation.tailrec
+import scala.collection.Stepper.EfficientSplit
+import scala.collection.generic.DefaultSerializable
+import language.experimental.captureChecking
+import scala.annotation.unchecked.uncheckedCaptures
+
+/** An implementation of the `Buffer` class using an array to
+ * represent the assembled sequence internally. Append, update and random
+ * access take constant time (amortized time). Prepends and removes are
+ * linear in the buffer size.
+ *
+ * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#array-buffers "Scala's Collection Library overview"]]
+ * section on `Array Buffers` for more information.
+
+ *
+ * @tparam A the type of this arraybuffer's elements.
+ *
+ * @define Coll `mutable.ArrayBuffer`
+ * @define coll array buffer
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ */
+@SerialVersionUID(-1582447879429021880L)
+class ArrayBuffer[sealed A] private (initialElements: Array[AnyRef], initialSize: Int)
+ extends AbstractBuffer[A]
+ with IndexedBuffer[A]
+ with IndexedSeqOps[A, ArrayBuffer, ArrayBuffer[A]]
+ with StrictOptimizedSeqOps[A, ArrayBuffer, ArrayBuffer[A]]
+ with IterableFactoryDefaults[A, ArrayBuffer]
+ with DefaultSerializable {
+
+ def this() = this(new Array[AnyRef](ArrayBuffer.DefaultInitialSize), 0)
+
+ def this(initialSize: Int) = this(new Array[AnyRef](initialSize max 1), 0)
+
+ @transient private[this] var mutationCount: Int = 0
+
+ // needs to be `private[collection]` or `protected[collection]` for parallel-collections
+ protected[collection] var array: Array[AnyRef] = initialElements
+ protected var size0 = initialSize
+
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = {
+ import scala.collection.convert.impl._
+ shape.parUnbox(new ObjectArrayStepper(array, 0, length).asInstanceOf[AnyStepper[A] with EfficientSplit])
+ }
+
+ override def knownSize: Int = super[IndexedSeqOps].knownSize
+
+ /** Ensure that the internal array has at least `n` cells. */
+ protected def ensureSize(n: Int): Unit = {
+ array = ArrayBuffer.ensureSize(array, size0, n)
+ }
+
+ // TODO 3.T: should be `protected`, perhaps `protected[this]`
+ /** Ensure that the internal array has at least `n` additional cells more than `size0`. */
+ private[mutable] def ensureAdditionalSize(n: Int): Unit = {
+ // `.toLong` to ensure `Long` arithmetic is used and prevent `Int` overflow
+ array = ArrayBuffer.ensureSize(array, size0, size0.toLong + n)
+ }
+
+ def sizeHint(size: Int): Unit =
+ if(size > length && size >= 1) ensureSize(size)
+
+ /** Reduce length to `n`, nulling out all dropped elements */
+ private def reduceToSize(n: Int): Unit = {
+ mutationCount += 1
+ Arrays.fill(array, n, size0, null)
+ size0 = n
+ }
+
+ /** Trims the ArrayBuffer to an appropriate size for the current
+ * number of elements (rounding up to the next natural size),
+ * which may replace the array by a shorter one.
+ * This allows releasing some unused memory.
+ */
+ def trimToSize(): Unit = {
+ resize(length)
+ }
+
+ /** Trims the `array` buffer size down to either a power of 2
+ * or Int.MaxValue while keeping first `requiredLength` elements.
+ */
+ private def resize(requiredLength: Int): Unit =
+ array = ArrayBuffer.downsize(array, requiredLength)
+
+ @inline private def checkWithinBounds(lo: Int, hi: Int) = {
+ if (lo < 0) throw new IndexOutOfBoundsException(s"$lo is out of bounds (min 0, max ${size0 - 1})")
+ if (hi > size0) throw new IndexOutOfBoundsException(s"${hi - 1} is out of bounds (min 0, max ${size0 - 1})")
+ }
+
+ def apply(n: Int): A = {
+ checkWithinBounds(n, n + 1)
+ array(n).asInstanceOf[A]
+ }
+
+ def update(@deprecatedName("n", "2.13.0") index: Int, elem: A): Unit = {
+ checkWithinBounds(index, index + 1)
+ mutationCount += 1
+ array(index) = elem.asInstanceOf[AnyRef]
+ }
+
+ def length = size0
+
+ // TODO: return `IndexedSeqView` rather than `ArrayBufferView`
+ override def view: ArrayBufferView[A] = new ArrayBufferView(this, () => mutationCount)
+
+ override def iterableFactory: SeqFactory[ArrayBuffer] = ArrayBuffer
+
+ /** Note: This does not actually resize the internal representation.
+ * See clearAndShrink if you want to also resize internally
+ */
+ def clear(): Unit = reduceToSize(0)
+
+ /**
+ * Clears this buffer and shrinks to @param size (rounding up to the next
+ * natural size)
+ * @param size
+ */
+ def clearAndShrink(size: Int = ArrayBuffer.DefaultInitialSize): this.type = {
+ clear()
+ resize(size)
+ this
+ }
+
+ def addOne(elem: A): this.type = {
+ mutationCount += 1
+ ensureAdditionalSize(1)
+ val oldSize = size0
+ size0 = oldSize + 1
+ this(oldSize) = elem
+ this
+ }
+
+ // Overridden to use array copying for efficiency where possible.
+ override def addAll(elems: IterableOnce[A]^): this.type = {
+ elems match {
+ case elems: ArrayBuffer[_] =>
+ val elemsLength = elems.size0
+ if (elemsLength > 0) {
+ mutationCount += 1
+ ensureAdditionalSize(elemsLength)
+ Array.copy(elems.array, 0, array, length, elemsLength)
+ size0 = length + elemsLength
+ }
+ case _ => super.addAll(elems)
+ }
+ this
+ }
+
+ def insert(@deprecatedName("n", "2.13.0") index: Int, elem: A): Unit = {
+ checkWithinBounds(index, index)
+ mutationCount += 1
+ ensureAdditionalSize(1)
+ Array.copy(array, index, array, index + 1, size0 - index)
+ size0 += 1
+ this(index) = elem
+ }
+
+ def prepend(elem: A): this.type = {
+ insert(0, elem)
+ this
+ }
+
+ def insertAll(@deprecatedName("n", "2.13.0") index: Int, elems: IterableOnce[A]^): Unit = {
+ checkWithinBounds(index, index)
+ elems match {
+ case elems: collection.Iterable[A] =>
+ val elemsLength = elems.size
+ if (elemsLength > 0) {
+ mutationCount += 1
+ ensureAdditionalSize(elemsLength)
+ val len = size0
+ Array.copy(array, index, array, index + elemsLength, len - index)
+ // if `elems eq this`, this copy is safe because
+ // - `elems.array eq this.array`
+ // - we didn't overwrite the values being inserted after moving them in
+ // the previous line
+ // - `copyElemsToArray` will call `System.arraycopy`
+ // - `System.arraycopy` will effectively "read" all the values before
+ // overwriting any of them when two arrays are the the same reference
+ val actual = IterableOnce.copyElemsToArray(elems, array.asInstanceOf[Array[Any]], index, elemsLength)
+ if (actual != elemsLength) throw new IllegalStateException(s"Copied $actual of $elemsLength")
+ size0 = len + elemsLength // update size AFTER the copy, in case we're inserting a proxy
+ }
+ case _ => insertAll(index, ArrayBuffer.from(elems))
+ }
+ }
+
+ /** Note: This does not actually resize the internal representation.
+ * See trimToSize if you want to also resize internally
+ */
+ def remove(@deprecatedName("n", "2.13.0") index: Int): A = {
+ checkWithinBounds(index, index + 1)
+ val res = this(index)
+ Array.copy(array, index + 1, array, index, size0 - (index + 1))
+ reduceToSize(size0 - 1)
+ res
+ }
+
+ /** Note: This does not actually resize the internal representation.
+ * See trimToSize if you want to also resize internally
+ */
+ def remove(@deprecatedName("n", "2.13.0") index: Int, count: Int): Unit =
+ if (count > 0) {
+ checkWithinBounds(index, index + count)
+ Array.copy(array, index + count, array, index, size0 - (index + count))
+ reduceToSize(size0 - count)
+ } else if (count < 0) {
+ throw new IllegalArgumentException("removing negative number of elements: " + count)
+ }
+
+ @deprecated("Use 'this' instance instead", "2.13.0")
+ @deprecatedOverriding("ArrayBuffer[A] no longer extends Builder[A, ArrayBuffer[A]]", "2.13.0")
+ @inline def result(): this.type = this
+
+ @deprecated("Use 'new GrowableBuilder(this).mapResult(f)' instead", "2.13.0")
+ @deprecatedOverriding("ArrayBuffer[A] no longer extends Builder[A, ArrayBuffer[A]]", "2.13.0")
+ @inline def mapResult[NewTo](f: (ArrayBuffer[A]) => NewTo): Builder[A, NewTo]^{f} = new GrowableBuilder[A, ArrayBuffer[A]](this).mapResult(f)
+
+ @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""")
+ override protected[this] def stringPrefix = "ArrayBuffer"
+
+ override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = {
+ val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len)
+ if(copied > 0) {
+ Array.copy(array, 0, xs, start, copied)
+ }
+ copied
+ }
+
+ /** Sorts this $coll in place according to an Ordering.
+ *
+ * @see [[scala.collection.mutable.IndexedSeqOps.sortInPlace]]
+ * @param ord the ordering to be used to compare elements.
+ * @return modified input $coll sorted according to the ordering `ord`.
+ */
+ override def sortInPlace[B >: A]()(implicit ord: Ordering[B]): this.type = {
+ if (length > 1) {
+ mutationCount += 1
+ scala.util.Sorting.stableSort(array.asInstanceOf[Array[B @uncheckedCaptures]], 0, length)
+ }
+ this
+ }
+
+ @tailrec private def foldl[B](start: Int, end: Int, z: B, op: (B, A) => B): B =
+ if (start == end) z
+ else foldl(start + 1, end, op(z, array(start).asInstanceOf[A]), op)
+
+ @tailrec private def foldr[B](start: Int, end: Int, z: B, op: (A, B) => B): B =
+ if (start == end) z
+ else foldr(start, end - 1, op(array(end - 1).asInstanceOf[A], z), op)
+
+ override def foldLeft[B](z: B)(op: (B, A) => B): B = foldl(0, length, z, op)
+
+ override def foldRight[B](z: B)(op: (A, B) => B): B = foldr(0, length, z, op)
+
+ override def reduceLeft[B >: A](op: (B, A) => B): B = if (length > 0) foldl(1, length, array(0).asInstanceOf[B], op) else super.reduceLeft(op)
+
+ override def reduceRight[B >: A](op: (A, B) => B): B = if (length > 0) foldr(0, length - 1, array(length - 1).asInstanceOf[B], op) else super.reduceRight(op)
+}
+
+/**
+ * Factory object for the `ArrayBuffer` class.
+ *
+ * $factoryInfo
+ *
+ * @define coll array buffer
+ * @define Coll `mutable.ArrayBuffer`
+ */
+@SerialVersionUID(3L)
+object ArrayBuffer extends StrictOptimizedSeqFactory[ArrayBuffer] {
+ final val DefaultInitialSize = 16
+ private[this] val emptyArray = new Array[AnyRef](0)
+
+ def from[sealed B](coll: collection.IterableOnce[B]^): ArrayBuffer[B] = {
+ val k = coll.knownSize
+ if (k >= 0) {
+ // Avoid reallocation of buffer if length is known
+ val array = ensureSize(emptyArray, 0, k) // don't duplicate sizing logic, and check VM array size limit
+ val actual = IterableOnce.copyElemsToArray(coll, array.asInstanceOf[Array[Any]])
+ if (actual != k) throw new IllegalStateException(s"Copied $actual of $k")
+ new ArrayBuffer[B](array, k)
+ }
+ else new ArrayBuffer[B] ++= coll
+ }
+
+ def newBuilder[sealed A]: Builder[A, ArrayBuffer[A]] =
+ new GrowableBuilder[A, ArrayBuffer[A]](empty) {
+ override def sizeHint(size: Int): Unit = elems.ensureSize(size)
+ }
+
+ def empty[sealed A]: ArrayBuffer[A] = new ArrayBuffer[A]()
+
+ /**
+ * @param arrayLen the length of the backing array
+ * @param targetLen the minimum length to resize up to
+ * @return -1 if no resizing is needed, or the size for the new array otherwise
+ */
+ private def resizeUp(arrayLen: Long, targetLen: Long): Int = {
+ if (targetLen <= arrayLen) -1
+ else {
+ if (targetLen > Int.MaxValue) throw new Exception(s"Collections cannot have more than ${Int.MaxValue} elements")
+ IterableOnce.checkArraySizeWithinVMLimit(targetLen.toInt) // safe because `targetSize <= Int.MaxValue`
+
+ val newLen = math.max(targetLen, math.max(arrayLen * 2, DefaultInitialSize))
+ math.min(newLen, scala.runtime.PStatics.VM_MaxArraySize).toInt
+ }
+ }
+ // if necessary, copy (curSize elements of) the array to a new array of capacity n.
+ // Should use Array.copyOf(array, resizeEnsuring(array.length))?
+ private def ensureSize(array: Array[AnyRef], curSize: Int, targetSize: Long): Array[AnyRef] = {
+ val newLen = resizeUp(array.length, targetSize)
+ if (newLen < 0) array
+ else {
+ val res = new Array[AnyRef](newLen)
+ System.arraycopy(array, 0, res, 0, curSize)
+ res
+ }
+ }
+
+ /**
+ * @param arrayLen the length of the backing array
+ * @param targetLen the length to resize down to, if smaller than `arrayLen`
+ * @return -1 if no resizing is needed, or the size for the new array otherwise
+ */
+ private def resizeDown(arrayLen: Int, targetLen: Int): Int =
+ if (targetLen >= arrayLen) -1 else math.max(targetLen, 0)
+ private def downsize(array: Array[AnyRef], targetSize: Int): Array[AnyRef] = {
+ val newLen = resizeDown(array.length, targetSize)
+ if (newLen < 0) array
+ else if (newLen == 0) emptyArray
+ else {
+ val res = new Array[AnyRef](newLen)
+ System.arraycopy(array, 0, res, 0, targetSize)
+ res
+ }
+ }
+}
+
+// TODO: use `CheckedIndexedSeqView.Id` once we can change the return type of `ArrayBuffer#view`
+final class ArrayBufferView[sealed A] private[mutable](underlying: ArrayBuffer[A], mutationCount: () -> Int)
+ extends AbstractIndexedSeqView[A], Pure {
+ /* Removed since it poses problems for capture checking
+ @deprecated("never intended to be public; call ArrayBuffer#view instead", since = "2.13.7")
+ def this(array: Array[AnyRef], length: Int) = {
+ // this won't actually track mutation, but it would be a pain to have the implementation
+ // check if we have a method to get the current mutation count or not on every method and
+ // change what it does based on that. hopefully no one ever calls this.
+ this({
+ val _array: Array[Object] = array
+ val _length = length
+ new ArrayBuffer[A](0) {
+ this.array = _array
+ this.size0 = _length
+ }: ArrayBuffer[A]
+ }, () => 0)
+ }*/
+
+ @deprecated("never intended to be public", since = "2.13.7")
+ def array: Array[AnyRef] = underlying.toArray[Any].asInstanceOf[Array[AnyRef]]
+
+ @throws[IndexOutOfBoundsException]
+ def apply(n: Int): A = underlying(n)
+ def length: Int = underlying.length
+ override protected[this] def className = "ArrayBufferView"
+
+ // we could inherit all these from `CheckedIndexedSeqView`, except this class is public
+ override def iterator: Iterator[A] = new CheckedIndexedSeqView.CheckedIterator(this, mutationCount())
+ override def reverseIterator: Iterator[A] = new CheckedIndexedSeqView.CheckedReverseIterator(this, mutationCount())
+
+ override def appended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Appended(this, elem)(mutationCount)
+ override def prepended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Prepended(elem, this)(mutationCount)
+ override def take(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Take(this, n)(mutationCount)
+ override def takeRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.TakeRight(this, n)(mutationCount)
+ override def drop(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Drop(this, n)(mutationCount)
+ override def dropRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.DropRight(this, n)(mutationCount)
+ override def map[B](f: A => B): IndexedSeqView[B]^{f} = new CheckedIndexedSeqView.Map(this, f)(mutationCount)
+ override def reverse: IndexedSeqView[A] = new CheckedIndexedSeqView.Reverse(this)(mutationCount)
+ override def slice(from: Int, until: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Slice(this, from, until)(mutationCount)
+ override def tapEach[U](f: A => U): IndexedSeqView[A]^{f} = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount)
+
+ override def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount)
+ override def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount)
+ override def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(prefix, this)(mutationCount)
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala b/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala
new file mode 100644
index 000000000000..0620d3d23061
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/ArrayBuilder.scala
@@ -0,0 +1,523 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+package mutable
+
+import language.experimental.captureChecking
+import scala.reflect.ClassTag
+
+/** A builder class for arrays.
+ *
+ * @tparam T the type of the elements for the builder.
+ */
+@SerialVersionUID(3L)
+sealed abstract class ArrayBuilder[sealed T]
+ extends ReusableBuilder[T, Array[T]]
+ with Serializable {
+ protected[this] var capacity: Int = 0
+ protected[this] def elems: Array[T]
+ protected var size: Int = 0
+
+ def length: Int = size
+
+ override def knownSize: Int = size
+
+ protected[this] final def ensureSize(size: Int): Unit = {
+ if (capacity < size || capacity == 0) {
+ var newsize = if (capacity == 0) 16 else capacity * 2
+ while (newsize < size) newsize *= 2
+ resize(newsize)
+ }
+ }
+
+ override final def sizeHint(size: Int): Unit =
+ if (capacity < size) resize(size)
+
+ def clear(): Unit = size = 0
+
+ protected[this] def resize(size: Int): Unit
+
+ /** Add all elements of an array */
+ def addAll(xs: Array[_ <: T]): this.type = addAll(xs, 0, xs.length)
+
+ /** Add a slice of an array */
+ def addAll(xs: Array[_ <: T], offset: Int, length: Int): this.type = {
+ ensureSize(this.size + length)
+ Array.copy(xs, offset, elems, this.size, length)
+ size += length
+ this
+ }
+
+ override def addAll(xs: IterableOnce[T]^): this.type = {
+ val k = xs.knownSize
+ if (k > 0) {
+ ensureSize(this.size + k)
+ val actual = IterableOnce.copyElemsToArray(xs, elems, this.size)
+ if (actual != k) throw new IllegalStateException(s"Copied $actual of $k")
+ size += k
+ } else if (k < 0) super.addAll(xs)
+ this
+ }
+}
+
+/** A companion object for array builders.
+ */
+object ArrayBuilder {
+
+ /** Creates a new arraybuilder of type `T`.
+ *
+ * @tparam T type of the elements for the array builder, with a `ClassTag` context bound.
+ * @return a new empty array builder.
+ */
+ @inline def make[T: ClassTag]: ArrayBuilder[T] = {
+ val tag = implicitly[ClassTag[T]]
+ tag.runtimeClass match {
+ case java.lang.Byte.TYPE => new ArrayBuilder.ofByte().asInstanceOf[ArrayBuilder[T]]
+ case java.lang.Short.TYPE => new ArrayBuilder.ofShort().asInstanceOf[ArrayBuilder[T]]
+ case java.lang.Character.TYPE => new ArrayBuilder.ofChar().asInstanceOf[ArrayBuilder[T]]
+ case java.lang.Integer.TYPE => new ArrayBuilder.ofInt().asInstanceOf[ArrayBuilder[T]]
+ case java.lang.Long.TYPE => new ArrayBuilder.ofLong().asInstanceOf[ArrayBuilder[T]]
+ case java.lang.Float.TYPE => new ArrayBuilder.ofFloat().asInstanceOf[ArrayBuilder[T]]
+ case java.lang.Double.TYPE => new ArrayBuilder.ofDouble().asInstanceOf[ArrayBuilder[T]]
+ case java.lang.Boolean.TYPE => new ArrayBuilder.ofBoolean().asInstanceOf[ArrayBuilder[T]]
+ case java.lang.Void.TYPE => new ArrayBuilder.ofUnit().asInstanceOf[ArrayBuilder[T]]
+ case _ => new ArrayBuilder.ofRef[T with AnyRef]()(tag.asInstanceOf[ClassTag[T with AnyRef]]).asInstanceOf[ArrayBuilder[T]]
+ }
+ }
+
+ /** A class for array builders for arrays of reference types.
+ *
+ * This builder can be reused.
+ *
+ * @tparam T type of elements for the array builder, subtype of `AnyRef` with a `ClassTag` context bound.
+ */
+ @SerialVersionUID(3L)
+ final class ofRef[T <: AnyRef](implicit ct: ClassTag[T]) extends ArrayBuilder[T] {
+
+ protected var elems: Array[T] = _
+
+ private def mkArray(size: Int): Array[T] = {
+ if (capacity == size && capacity > 0) elems
+ else if (elems eq null) new Array[T](size)
+ else java.util.Arrays.copyOf[T](elems, size)
+ }
+
+ protected[this] def resize(size: Int): Unit = {
+ elems = mkArray(size)
+ capacity = size
+ }
+
+ def addOne(elem: T): this.type = {
+ ensureSize(size + 1)
+ elems(size) = elem
+ size += 1
+ this
+ }
+
+ def result(): Array[T] = {
+ if (capacity != 0 && capacity == size) {
+ capacity = 0
+ val res = elems
+ elems = null
+ res
+ }
+ else mkArray(size)
+ }
+
+ override def clear(): Unit = {
+ super.clear()
+ if(elems ne null) java.util.Arrays.fill(elems.asInstanceOf[Array[AnyRef]], null)
+ }
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofRef[_] => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofRef"
+ }
+
+ /** A class for array builders for arrays of `byte`s. It can be reused. */
+ @SerialVersionUID(3L)
+ final class ofByte extends ArrayBuilder[Byte] {
+
+ protected var elems: Array[Byte] = _
+
+ private def mkArray(size: Int): Array[Byte] = {
+ val newelems = new Array[Byte](size)
+ if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size)
+ newelems
+ }
+
+ protected[this] def resize(size: Int): Unit = {
+ elems = mkArray(size)
+ capacity = size
+ }
+
+ def addOne(elem: Byte): this.type = {
+ ensureSize(size + 1)
+ elems(size) = elem
+ size += 1
+ this
+ }
+
+ def result(): Array[Byte] = {
+ if (capacity != 0 && capacity == size) {
+ capacity = 0
+ val res = elems
+ elems = null
+ res
+ }
+ else mkArray(size)
+ }
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofByte => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofByte"
+ }
+
+ /** A class for array builders for arrays of `short`s. It can be reused. */
+ @SerialVersionUID(3L)
+ final class ofShort extends ArrayBuilder[Short] {
+
+ protected var elems: Array[Short] = _
+
+ private def mkArray(size: Int): Array[Short] = {
+ val newelems = new Array[Short](size)
+ if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size)
+ newelems
+ }
+
+ protected[this] def resize(size: Int): Unit = {
+ elems = mkArray(size)
+ capacity = size
+ }
+
+ def addOne(elem: Short): this.type = {
+ ensureSize(size + 1)
+ elems(size) = elem
+ size += 1
+ this
+ }
+
+ def result(): Array[Short] = {
+ if (capacity != 0 && capacity == size) {
+ capacity = 0
+ val res = elems
+ elems = null
+ res
+ }
+ else mkArray(size)
+ }
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofShort => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofShort"
+ }
+
+ /** A class for array builders for arrays of `char`s. It can be reused. */
+ @SerialVersionUID(3L)
+ final class ofChar extends ArrayBuilder[Char] {
+
+ protected var elems: Array[Char] = _
+
+ private def mkArray(size: Int): Array[Char] = {
+ val newelems = new Array[Char](size)
+ if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size)
+ newelems
+ }
+
+ protected[this] def resize(size: Int): Unit = {
+ elems = mkArray(size)
+ capacity = size
+ }
+
+ def addOne(elem: Char): this.type = {
+ ensureSize(size + 1)
+ elems(size) = elem
+ size += 1
+ this
+ }
+
+ def result(): Array[Char] = {
+ if (capacity != 0 && capacity == size) {
+ capacity = 0
+ val res = elems
+ elems = null
+ res
+ }
+ else mkArray(size)
+ }
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofChar => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofChar"
+ }
+
+ /** A class for array builders for arrays of `int`s. It can be reused. */
+ @SerialVersionUID(3L)
+ final class ofInt extends ArrayBuilder[Int] {
+
+ protected var elems: Array[Int] = _
+
+ private def mkArray(size: Int): Array[Int] = {
+ val newelems = new Array[Int](size)
+ if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size)
+ newelems
+ }
+
+ protected[this] def resize(size: Int): Unit = {
+ elems = mkArray(size)
+ capacity = size
+ }
+
+ def addOne(elem: Int): this.type = {
+ ensureSize(size + 1)
+ elems(size) = elem
+ size += 1
+ this
+ }
+
+ def result(): Array[Int] = {
+ if (capacity != 0 && capacity == size) {
+ capacity = 0
+ val res = elems
+ elems = null
+ res
+ }
+ else mkArray(size)
+ }
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofInt => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofInt"
+ }
+
+ /** A class for array builders for arrays of `long`s. It can be reused. */
+ @SerialVersionUID(3L)
+ final class ofLong extends ArrayBuilder[Long] {
+
+ protected var elems: Array[Long] = _
+
+ private def mkArray(size: Int): Array[Long] = {
+ val newelems = new Array[Long](size)
+ if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size)
+ newelems
+ }
+
+ protected[this] def resize(size: Int): Unit = {
+ elems = mkArray(size)
+ capacity = size
+ }
+
+ def addOne(elem: Long): this.type = {
+ ensureSize(size + 1)
+ elems(size) = elem
+ size += 1
+ this
+ }
+
+ def result(): Array[Long] = {
+ if (capacity != 0 && capacity == size) {
+ capacity = 0
+ val res = elems
+ elems = null
+ res
+ }
+ else mkArray(size)
+ }
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofLong => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofLong"
+ }
+
+ /** A class for array builders for arrays of `float`s. It can be reused. */
+ @SerialVersionUID(3L)
+ final class ofFloat extends ArrayBuilder[Float] {
+
+ protected var elems: Array[Float] = _
+
+ private def mkArray(size: Int): Array[Float] = {
+ val newelems = new Array[Float](size)
+ if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size)
+ newelems
+ }
+
+ protected[this] def resize(size: Int): Unit = {
+ elems = mkArray(size)
+ capacity = size
+ }
+
+ def addOne(elem: Float): this.type = {
+ ensureSize(size + 1)
+ elems(size) = elem
+ size += 1
+ this
+ }
+
+ def result(): Array[Float] = {
+ if (capacity != 0 && capacity == size) {
+ capacity = 0
+ val res = elems
+ elems = null
+ res
+ }
+ else mkArray(size)
+ }
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofFloat => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofFloat"
+ }
+
+ /** A class for array builders for arrays of `double`s. It can be reused. */
+ @SerialVersionUID(3L)
+ final class ofDouble extends ArrayBuilder[Double] {
+
+ protected var elems: Array[Double] = _
+
+ private def mkArray(size: Int): Array[Double] = {
+ val newelems = new Array[Double](size)
+ if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size)
+ newelems
+ }
+
+ protected[this] def resize(size: Int): Unit = {
+ elems = mkArray(size)
+ capacity = size
+ }
+
+ def addOne(elem: Double): this.type = {
+ ensureSize(size + 1)
+ elems(size) = elem
+ size += 1
+ this
+ }
+
+ def result(): Array[Double] = {
+ if (capacity != 0 && capacity == size) {
+ capacity = 0
+ val res = elems
+ elems = null
+ res
+ }
+ else mkArray(size)
+ }
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofDouble => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofDouble"
+ }
+
+ /** A class for array builders for arrays of `boolean`s. It can be reused. */
+ @SerialVersionUID(3L)
+ class ofBoolean extends ArrayBuilder[Boolean] {
+
+ protected var elems: Array[Boolean] = _
+
+ private def mkArray(size: Int): Array[Boolean] = {
+ val newelems = new Array[Boolean](size)
+ if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size)
+ newelems
+ }
+
+ protected[this] def resize(size: Int): Unit = {
+ elems = mkArray(size)
+ capacity = size
+ }
+
+ def addOne(elem: Boolean): this.type = {
+ ensureSize(size + 1)
+ elems(size) = elem
+ size += 1
+ this
+ }
+
+ def result(): Array[Boolean] = {
+ if (capacity != 0 && capacity == size) {
+ capacity = 0
+ val res = elems
+ elems = null
+ res
+ }
+ else mkArray(size)
+ }
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofBoolean => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofBoolean"
+ }
+
+ /** A class for array builders for arrays of `Unit` type. It can be reused. */
+ @SerialVersionUID(3L)
+ final class ofUnit extends ArrayBuilder[Unit] {
+
+ protected def elems: Array[Unit] = throw new UnsupportedOperationException()
+
+ def addOne(elem: Unit): this.type = {
+ size += 1
+ this
+ }
+
+ override def addAll(xs: IterableOnce[Unit]^): this.type = {
+ size += xs.iterator.size
+ this
+ }
+
+ override def addAll(xs: Array[_ <: Unit], offset: Int, length: Int): this.type = {
+ size += length
+ this
+ }
+
+ def result() = {
+ val ans = new Array[Unit](size)
+ var i = 0
+ while (i < size) { ans(i) = (); i += 1 }
+ ans
+ }
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofUnit => (size == x.size)
+ case _ => false
+ }
+
+ protected[this] def resize(size: Int): Unit = ()
+
+ override def toString = "ArrayBuilder.ofUnit"
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala b/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala
new file mode 100644
index 000000000000..f22aacec65c5
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/ArrayDeque.scala
@@ -0,0 +1,646 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package mutable
+
+import scala.annotation.nowarn
+import scala.collection.Stepper.EfficientSplit
+import scala.collection.generic.DefaultSerializable
+import scala.reflect.ClassTag
+import language.experimental.captureChecking
+
+/** An implementation of a double-ended queue that internally uses a resizable circular buffer.
+ *
+ * Append, prepend, removeHead, removeLast and random-access (indexed-lookup and indexed-replacement)
+ * take amortized constant time. In general, removals and insertions at i-th index are O(min(i, n-i))
+ * and thus insertions and removals from end/beginning are fast.
+ *
+ * @note Subclasses ''must'' override the `ofArray` protected method to return a more specific type.
+ *
+ * @tparam A the type of this ArrayDeque's elements.
+ *
+ * @define Coll `mutable.ArrayDeque`
+ * @define coll array deque
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ */
+class ArrayDeque[sealed A] protected (
+ protected var array: Array[AnyRef],
+ private[ArrayDeque] var start: Int,
+ private[ArrayDeque] var end: Int
+) extends AbstractBuffer[A]
+ with IndexedBuffer[A]
+ with IndexedSeqOps[A, ArrayDeque, ArrayDeque[A]]
+ with StrictOptimizedSeqOps[A, ArrayDeque, ArrayDeque[A]]
+ with IterableFactoryDefaults[A, ArrayDeque]
+ with ArrayDequeOps[A, ArrayDeque, ArrayDeque[A]]
+ with Cloneable[ArrayDeque[A]]
+ with DefaultSerializable {
+
+ reset(array, start, end)
+
+ private[this] def reset(array: Array[AnyRef], start: Int, end: Int) = {
+ assert((array.length & (array.length - 1)) == 0, s"Array.length must be power of 2")
+ requireBounds(idx = start, until = array.length)
+ requireBounds(idx = end, until = array.length)
+ this.array = array
+ this.start = start
+ this.end = end
+ }
+
+ def this(initialSize: Int = ArrayDeque.DefaultInitialSize) = this(ArrayDeque.alloc(initialSize), start = 0, end = 0)
+
+ override def knownSize: Int = super[IndexedSeqOps].knownSize
+
+ // No-Op override to allow for more efficient stepper in a minor release.
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = super.stepper(shape)
+
+ def apply(idx: Int): A = {
+ requireBounds(idx)
+ _get(idx)
+ }
+
+ def update(idx: Int, elem: A): Unit = {
+ requireBounds(idx)
+ _set(idx, elem)
+ }
+
+ def addOne(elem: A): this.type = {
+ ensureSize(length + 1)
+ appendAssumingCapacity(elem)
+ }
+
+ def prepend(elem: A): this.type = {
+ ensureSize(length + 1)
+ prependAssumingCapacity(elem)
+ }
+
+ @inline private[ArrayDeque] def appendAssumingCapacity(elem: A): this.type = {
+ array(end) = elem.asInstanceOf[AnyRef]
+ end = end_+(1)
+ this
+ }
+
+ @inline private[ArrayDeque] def prependAssumingCapacity(elem: A): this.type = {
+ start = start_-(1)
+ array(start) = elem.asInstanceOf[AnyRef]
+ this
+ }
+
+ override def prependAll(elems: IterableOnce[A]^): this.type = {
+ val it = elems.iterator
+ if (it.nonEmpty) {
+ val n = length
+ // The following code resizes the current collection at most once and traverses elems at most twice
+ elems.knownSize match {
+ // Size is too expensive to compute AND we can traverse it only once - can't do much but retry with an IndexedSeq
+ case srcLength if srcLength < 0 => prependAll(it.to(IndexedSeq: Factory[A, IndexedSeq[A]] /* type ascription needed by Dotty */))
+
+ // We know for sure we need to resize to hold everything, might as well resize and memcopy upfront
+ case srcLength if mustGrow(srcLength + n) =>
+ val finalLength = srcLength + n
+ val array2 = ArrayDeque.alloc(finalLength)
+ it.copyToArray(array2.asInstanceOf[Array[A]])
+ copySliceToArray(srcStart = 0, dest = array2, destStart = srcLength, maxItems = n)
+ reset(array = array2, start = 0, end = finalLength)
+
+ // Just fill up from (start - srcLength) to (start - 1) and move back start
+ case srcLength =>
+ // Optimized version of `elems.zipWithIndex.foreach((elem, i) => _set(i - srcLength, elem))`
+ var i = 0
+ while(i < srcLength) {
+ _set(i - srcLength, it.next())
+ i += 1
+ }
+ start = start_-(srcLength)
+ }
+ }
+ this
+ }
+
+ override def addAll(elems: IterableOnce[A]^): this.type = {
+ elems.knownSize match {
+ case srcLength if srcLength > 0 =>
+ ensureSize(srcLength + length)
+ elems.iterator.foreach(appendAssumingCapacity)
+ case _ => elems.iterator.foreach(+=)
+ }
+ this
+ }
+
+ def insert(idx: Int, elem: A): Unit = {
+ requireBounds(idx, length+1)
+ val n = length
+ if (idx == 0) {
+ prepend(elem)
+ } else if (idx == n) {
+ addOne(elem)
+ } else {
+ val finalLength = n + 1
+ if (mustGrow(finalLength)) {
+ val array2 = ArrayDeque.alloc(finalLength)
+ copySliceToArray(srcStart = 0, dest = array2, destStart = 0, maxItems = idx)
+ array2(idx) = elem.asInstanceOf[AnyRef]
+ copySliceToArray(srcStart = idx, dest = array2, destStart = idx + 1, maxItems = n)
+ reset(array = array2, start = 0, end = finalLength)
+ } else if (n <= idx * 2) {
+ var i = n - 1
+ while(i >= idx) {
+ _set(i + 1, _get(i))
+ i -= 1
+ }
+ end = end_+(1)
+ i += 1
+ _set(i, elem)
+ } else {
+ var i = 0
+ while(i < idx) {
+ _set(i - 1, _get(i))
+ i += 1
+ }
+ start = start_-(1)
+ _set(i, elem)
+ }
+ }
+ }
+
+ def insertAll(idx: Int, elems: IterableOnce[A]^): Unit = {
+ requireBounds(idx, length+1)
+ val n = length
+ if (idx == 0) {
+ prependAll(elems)
+ } else if (idx == n) {
+ addAll(elems)
+ } else {
+ // Get both an iterator and the length of the source (by copying the source to an IndexedSeq if needed)
+ val (it, srcLength) = {
+ val _srcLength = elems.knownSize
+ if (_srcLength >= 0) (elems.iterator, _srcLength)
+ else {
+ val indexed = IndexedSeq.from(elems)
+ (indexed.iterator, indexed.size)
+ }
+ }
+ if (it.nonEmpty) {
+ val finalLength = srcLength + n
+ // Either we resize right away or move prefix left or suffix right
+ if (mustGrow(finalLength)) {
+ val array2 = ArrayDeque.alloc(finalLength)
+ copySliceToArray(srcStart = 0, dest = array2, destStart = 0, maxItems = idx)
+ it.copyToArray(array2.asInstanceOf[Array[A]], idx)
+ copySliceToArray(srcStart = idx, dest = array2, destStart = idx + srcLength, maxItems = n)
+ reset(array = array2, start = 0, end = finalLength)
+ } else if (2*idx >= n) { // Cheaper to shift the suffix right
+ var i = n - 1
+ while(i >= idx) {
+ _set(i + srcLength, _get(i))
+ i -= 1
+ }
+ end = end_+(srcLength)
+ while(it.hasNext) {
+ i += 1
+ _set(i, it.next())
+ }
+ } else { // Cheaper to shift prefix left
+ var i = 0
+ while(i < idx) {
+ _set(i - srcLength, _get(i))
+ i += 1
+ }
+ start = start_-(srcLength)
+ while(it.hasNext) {
+ _set(i, it.next())
+ i += 1
+ }
+ }
+ }
+ }
+ }
+
+ def remove(idx: Int, count: Int): Unit = {
+ if (count > 0) {
+ requireBounds(idx)
+ val n = length
+ val removals = Math.min(n - idx, count)
+ val finalLength = n - removals
+ val suffixStart = idx + removals
+ // If we know we can resize after removing, do it right away using arrayCopy
+ // Else, choose the shorter: either move the prefix (0 until idx) right OR the suffix (idx+removals until n) left
+ if (shouldShrink(finalLength)) {
+ val array2 = ArrayDeque.alloc(finalLength)
+ copySliceToArray(srcStart = 0, dest = array2, destStart = 0, maxItems = idx)
+ copySliceToArray(srcStart = suffixStart, dest = array2, destStart = idx, maxItems = n)
+ reset(array = array2, start = 0, end = finalLength)
+ } else if (2*idx <= finalLength) { // Cheaper to move the prefix right
+ var i = suffixStart - 1
+ while(i >= removals) {
+ _set(i, _get(i - removals))
+ i -= 1
+ }
+ while(i >= 0) {
+ _set(i, null.asInstanceOf[A])
+ i -= 1
+ }
+ start = start_+(removals)
+ } else { // Cheaper to move the suffix left
+ var i = idx
+ while(i < finalLength) {
+ _set(i, _get(i + removals))
+ i += 1
+ }
+ while(i < n) {
+ _set(i, null.asInstanceOf[A])
+ i += 1
+ }
+ end = end_-(removals)
+ }
+ } else {
+ require(count == 0, s"removing negative number of elements: $count")
+ }
+ }
+
+ def remove(idx: Int): A = {
+ val elem = this(idx)
+ remove(idx, 1)
+ elem
+ }
+
+ override def subtractOne(elem: A): this.type = {
+ val idx = indexOf(elem)
+ if (idx >= 0) remove(idx, 1) //TODO: SeqOps should be fluent API
+ this
+ }
+
+ /**
+ *
+ * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while
+ * @return
+ */
+ def removeHeadOption(resizeInternalRepr: Boolean = false): Option[A] =
+ if (isEmpty) None else Some(removeHeadAssumingNonEmpty(resizeInternalRepr))
+
+ /**
+ * Unsafely remove the first element (throws exception when empty)
+ * See also removeHeadOption()
+ *
+ * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while
+ * @throws NoSuchElementException when empty
+ * @return
+ */
+ def removeHead(resizeInternalRepr: Boolean = false): A =
+ if (isEmpty) throw new NoSuchElementException(s"empty collection") else removeHeadAssumingNonEmpty(resizeInternalRepr)
+
+ @inline private[this] def removeHeadAssumingNonEmpty(resizeInternalRepr: Boolean = false): A = {
+ val elem = array(start)
+ array(start) = null
+ start = start_+(1)
+ if (resizeInternalRepr) resize(length)
+ elem.asInstanceOf[A]
+ }
+
+ /**
+ *
+ * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while
+ * @return
+ */
+ def removeLastOption(resizeInternalRepr: Boolean = false): Option[A] =
+ if (isEmpty) None else Some(removeLastAssumingNonEmpty(resizeInternalRepr))
+
+ /**
+ * Unsafely remove the last element (throws exception when empty)
+ * See also removeLastOption()
+ *
+ * @param resizeInternalRepr If this is set, resize the internal representation to reclaim space once in a while
+ * @throws NoSuchElementException when empty
+ * @return
+ */
+ def removeLast(resizeInternalRepr: Boolean = false): A =
+ if (isEmpty) throw new NoSuchElementException(s"empty collection") else removeLastAssumingNonEmpty(resizeInternalRepr)
+
+ @`inline` private[this] def removeLastAssumingNonEmpty(resizeInternalRepr: Boolean = false): A = {
+ end = end_-(1)
+ val elem = array(end)
+ array(end) = null
+ if (resizeInternalRepr) resize(length)
+ elem.asInstanceOf[A]
+ }
+
+ /**
+ * Remove all elements from this collection and return the elements while emptying this data structure
+ * @return
+ */
+ def removeAll(): scala.collection.immutable.Seq[A] = {
+ val elems = scala.collection.immutable.Seq.newBuilder[A]
+ elems.sizeHint(length)
+ while(nonEmpty) {
+ elems += removeHeadAssumingNonEmpty()
+ }
+ elems.result()
+ }
+
+ /**
+ * Remove all elements from this collection and return the elements in reverse while emptying this data structure
+ * @return
+ */
+ def removeAllReverse(): scala.collection.immutable.Seq[A] = {
+ val elems = scala.collection.immutable.Seq.newBuilder[A]
+ elems.sizeHint(length)
+ while(nonEmpty) {
+ elems += removeLastAssumingNonEmpty()
+ }
+ elems.result()
+ }
+
+ /**
+ * Returns and removes all elements from the left of this queue which satisfy the given predicate
+ *
+ * @param f the predicate used for choosing elements
+ * @return
+ */
+ def removeHeadWhile(f: A => Boolean): scala.collection.immutable.Seq[A] = {
+ val elems = scala.collection.immutable.Seq.newBuilder[A]
+ while(headOption.exists(f)) {
+ elems += removeHeadAssumingNonEmpty()
+ }
+ elems.result()
+ }
+
+ /**
+ * Returns and removes all elements from the right of this queue which satisfy the given predicate
+ *
+ * @param f the predicate used for choosing elements
+ * @return
+ */
+ def removeLastWhile(f: A => Boolean): scala.collection.immutable.Seq[A] = {
+ val elems = scala.collection.immutable.Seq.newBuilder[A]
+ while(lastOption.exists(f)) {
+ elems += removeLastAssumingNonEmpty()
+ }
+ elems.result()
+ }
+
+ /** Returns the first element which satisfies the given predicate after or at some start index
+ * and removes this element from the collections
+ *
+ * @param p the predicate used for choosing the first element
+ * @param from the start index
+ * @return the first element of the queue for which p yields true
+ */
+ def removeFirst(p: A => Boolean, from: Int = 0): Option[A] = {
+ val i = indexWhere(p, from)
+ if (i < 0) None else Some(remove(i))
+ }
+
+ /** Returns all elements in this collection which satisfy the given predicate
+ * and removes those elements from this collections.
+ *
+ * @param p the predicate used for choosing elements
+ * @return a sequence of all elements in the queue for which
+ * p yields true.
+ */
+ def removeAll(p: A => Boolean): scala.collection.immutable.Seq[A] = {
+ val res = scala.collection.immutable.Seq.newBuilder[A]
+ var i, j = 0
+ while (i < size) {
+ if (p(this(i))) {
+ res += this(i)
+ } else {
+ if (i != j) {
+ this(j) = this(i)
+ }
+ j += 1
+ }
+ i += 1
+ }
+ if (i != j) takeInPlace(j)
+ res.result()
+ }
+
+ @inline def ensureSize(hint: Int) = if (hint > length && mustGrow(hint)) resize(hint)
+
+ def length = end_-(start)
+
+ override def isEmpty = start == end
+
+ override protected def klone(): ArrayDeque[A] = new ArrayDeque(array.clone(), start = start, end = end)
+
+ override def iterableFactory: SeqFactory[ArrayDeque] = ArrayDeque
+
+ /**
+ * Note: This does not actually resize the internal representation.
+ * See clearAndShrink if you want to also resize internally
+ */
+ def clear(): Unit = {
+ while(nonEmpty) {
+ removeHeadAssumingNonEmpty()
+ }
+ }
+
+ /**
+ * Clears this buffer and shrinks to @param size
+ *
+ * @param size
+ * @return
+ */
+ def clearAndShrink(size: Int = ArrayDeque.DefaultInitialSize): this.type = {
+ reset(array = ArrayDeque.alloc(size), start = 0, end = 0)
+ this
+ }
+
+ protected def ofArray(array: Array[AnyRef], end: Int): ArrayDeque[A] =
+ new ArrayDeque[A](array, start = 0, end)
+
+ override def copyToArray[sealed B >: A](dest: Array[B], destStart: Int, len: Int): Int = {
+ val copied = IterableOnce.elemsToCopyToArray(length, dest.length, destStart, len)
+ if (copied > 0) {
+ copySliceToArray(srcStart = 0, dest = dest, destStart = destStart, maxItems = len)
+ }
+ copied
+ }
+
+ override def toArray[sealed B >: A: ClassTag]: Array[B] =
+ copySliceToArray(srcStart = 0, dest = new Array[B](length), destStart = 0, maxItems = length)
+
+ /**
+ * Trims the capacity of this ArrayDeque's instance to be the current size
+ */
+ def trimToSize(): Unit = resize(length)
+
+ // Utils for common modular arithmetic:
+ @inline protected def start_+(idx: Int) = (start + idx) & (array.length - 1)
+ @inline private[this] def start_-(idx: Int) = (start - idx) & (array.length - 1)
+ @inline private[this] def end_+(idx: Int) = (end + idx) & (array.length - 1)
+ @inline private[this] def end_-(idx: Int) = (end - idx) & (array.length - 1)
+
+ // Note: here be overflow dragons! This is used for int overflow
+ // assumptions in resize(). Use caution changing.
+ @inline private[this] def mustGrow(len: Int) = {
+ len >= array.length
+ }
+
+ // Assumes that 0 <= len < array.length!
+ @inline private[this] def shouldShrink(len: Int) = {
+ // To avoid allocation churn, only shrink when array is large
+ // and less than 2/5 filled.
+ array.length > ArrayDeque.StableSize && array.length - len - (len >> 1) > len
+ }
+
+ // Assumes that 0 <= len < array.length!
+ @inline private[this] def canShrink(len: Int) = {
+ array.length > ArrayDeque.DefaultInitialSize && array.length - len > len
+ }
+
+ @inline private[this] def _get(idx: Int): A = array(start_+(idx)).asInstanceOf[A]
+
+ @inline private[this] def _set(idx: Int, elem: A) = array(start_+(idx)) = elem.asInstanceOf[AnyRef]
+
+ // Assumes that 0 <= len.
+ private[this] def resize(len: Int) = if (mustGrow(len) || canShrink(len)) {
+ val n = length
+ val array2 = copySliceToArray(srcStart = 0, dest = ArrayDeque.alloc(len), destStart = 0, maxItems = n)
+ reset(array = array2, start = 0, end = n)
+ }
+
+ @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""")
+ override protected[this] def stringPrefix = "ArrayDeque"
+}
+
+/**
+ * $factoryInfo
+ * @define coll array deque
+ * @define Coll `ArrayDeque`
+ */
+@SerialVersionUID(3L)
+object ArrayDeque extends StrictOptimizedSeqFactory[ArrayDeque] {
+
+ def from[sealed B](coll: collection.IterableOnce[B]^): ArrayDeque[B] = {
+ val s = coll.knownSize
+ if (s >= 0) {
+ val array = alloc(s)
+ val actual = IterableOnce.copyElemsToArray(coll, array.asInstanceOf[Array[Any]])
+ if (actual != s) throw new IllegalStateException(s"Copied $actual of $s")
+ new ArrayDeque[B](array, start = 0, end = s)
+ } else new ArrayDeque[B]() ++= coll
+ }
+
+ def newBuilder[sealed A]: Builder[A, ArrayDeque[A]] =
+ new GrowableBuilder[A, ArrayDeque[A]](empty) {
+ override def sizeHint(size: Int): Unit = {
+ elems.ensureSize(size)
+ }
+ }
+
+ def empty[sealed A]: ArrayDeque[A] = new ArrayDeque[A]()
+
+ final val DefaultInitialSize = 16
+
+ /**
+ * We try to not repeatedly resize arrays smaller than this
+ */
+ private[ArrayDeque] final val StableSize = 128
+
+ /**
+ * Allocates an array whose size is next power of 2 > `len`
+ * Largest possible len is 1<<30 - 1
+ *
+ * @param len
+ * @return
+ */
+ private[mutable] def alloc(len: Int) = {
+ require(len >= 0, s"Non-negative array size required")
+ val size = (1 << 31) >>> java.lang.Integer.numberOfLeadingZeros(len) << 1
+ require(size >= 0, s"ArrayDeque too big - cannot allocate ArrayDeque of length $len")
+ new Array[AnyRef](Math.max(size, DefaultInitialSize))
+ }
+}
+
+trait ArrayDequeOps[A, +CC[_], +C <: AnyRef] extends StrictOptimizedSeqOps[A, CC, C] {
+ protected def array: Array[AnyRef]
+
+ final override def clone(): C = klone()
+
+ protected def klone(): C
+
+ protected def ofArray(array: Array[AnyRef], end: Int): C
+
+ protected def start_+(idx: Int): Int
+
+ @inline protected final def requireBounds(idx: Int, until: Int = length): Unit =
+ if (idx < 0 || idx >= until) throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${until-1})")
+
+ /**
+ * This is a more general version of copyToArray - this also accepts a srcStart unlike copyToArray
+ * This copies maxItems elements from this collections srcStart to dest's destStart
+ * If we reach the end of either collections before we could copy maxItems, we simply stop copying
+ *
+ * @param dest
+ * @param srcStart
+ * @param destStart
+ * @param maxItems
+ */
+ def copySliceToArray(srcStart: Int, dest: Array[_], destStart: Int, maxItems: Int): dest.type = {
+ requireBounds(destStart, dest.length+1)
+ val toCopy = Math.min(maxItems, Math.min(length - srcStart, dest.length - destStart))
+ if (toCopy > 0) {
+ requireBounds(srcStart)
+ val startIdx = start_+(srcStart)
+ val block1 = Math.min(toCopy, array.length - startIdx)
+ Array.copy(src = array, srcPos = startIdx, dest = dest, destPos = destStart, length = block1)
+ val block2 = toCopy - block1
+ if (block2 > 0) Array.copy(src = array, srcPos = 0, dest = dest, destPos = destStart + block1, length = block2)
+ }
+ dest
+ }
+
+ override def reverse: C = {
+ val n = length
+ val arr = ArrayDeque.alloc(n)
+ var i = 0
+ while(i < n) {
+ arr(i) = this(n - i - 1).asInstanceOf[AnyRef]
+ i += 1
+ }
+ ofArray(arr, n)
+ }
+
+ override def slice(from: Int, until: Int): C = {
+ val n = length
+ val left = Math.max(0, Math.min(n, from))
+ val right = Math.max(0, Math.min(n, until))
+ val len = right - left
+ if (len <= 0) {
+ empty
+ } else if (len >= n) {
+ klone()
+ } else {
+ val array2 = copySliceToArray(srcStart = left, dest = ArrayDeque.alloc(len), destStart = 0, maxItems = len)
+ ofArray(array2, len)
+ }
+ }
+
+ override def sliding(window: Int, step: Int): Iterator[C] = {
+ require(window > 0 && step > 0, s"window=$window and step=$step, but both must be positive")
+ length match {
+ case 0 => Iterator.empty
+ case n if n <= window => Iterator.single(slice(0, length))
+ case n =>
+ val lag = if (window > step) window - step else 0
+ Iterator.range(start = 0, end = n - lag, step = step).map(i => slice(i, i + window))
+ }
+ }
+
+ override def grouped(n: Int): Iterator[C] = sliding(n, n)
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala b/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala
new file mode 100644
index 000000000000..bd3a208a94c0
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/ArraySeq.scala
@@ -0,0 +1,351 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+package mutable
+
+import java.util.Arrays
+
+import scala.collection.Stepper.EfficientSplit
+import scala.collection.convert.impl._
+import scala.reflect.ClassTag
+import scala.util.hashing.MurmurHash3
+import language.experimental.captureChecking
+import annotation.unchecked.uncheckedCaptures
+
+/**
+ * A collection representing `Array[T]`. Unlike `ArrayBuffer` it is always backed by the same
+ * underlying `Array`, therefore it is not growable or shrinkable.
+ *
+ * @tparam T type of the elements in this wrapped array.
+ *
+ * @define Coll `ArraySeq`
+ * @define coll wrapped array
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ */
+@SerialVersionUID(3L)
+sealed abstract class ArraySeq[sealed T]
+ extends AbstractSeq[T]
+ with IndexedSeq[T]
+ with IndexedSeqOps[T, ArraySeq, ArraySeq[T]]
+ with StrictOptimizedSeqOps[T, ArraySeq, ArraySeq[T]]
+ with Serializable
+ with Pure {
+
+ override def iterableFactory: scala.collection.SeqFactory[ArraySeq] = ArraySeq.untagged
+
+ override protected def fromSpecific(coll: scala.collection.IterableOnce[T]^): ArraySeq[T] = {
+ val b = ArrayBuilder.make(elemTag).asInstanceOf[ArrayBuilder[T]]
+ val s = coll.knownSize
+ if(s > 0) b.sizeHint(s)
+ b ++= coll
+ ArraySeq.make(b.result())
+ }
+ override protected def newSpecificBuilder: Builder[T, ArraySeq[T]] =
+ ArraySeq.newBuilder[T](elemTag.asInstanceOf[ClassTag[T]]).asInstanceOf[Builder[T, ArraySeq[T]]]
+ override def empty: ArraySeq[T] = ArraySeq.empty(elemTag.asInstanceOf[ClassTag[T]])
+
+ /** The tag of the element type. This does not have to be equal to the element type of this ArraySeq. A primitive
+ * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype
+ * or subtype of the element type. */
+ def elemTag: ClassTag[_]
+
+ /** Update element at given index */
+ def update(@deprecatedName("idx", "2.13.0") index: Int, elem: T): Unit
+
+ /** The underlying array. Its element type does not have to be equal to the element type of this ArraySeq. A primitive
+ * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype
+ * or subtype of the element type. */
+ def array: Array[_]
+
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit
+
+ override protected[this] def className = "ArraySeq"
+
+ /** Clones this object, including the underlying Array. */
+ override def clone(): ArraySeq[T] = ArraySeq.make[T](array.clone().asInstanceOf[Array[T]])
+
+ override def copyToArray[sealed B >: T](xs: Array[B], start: Int, len: Int): Int = {
+ val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len)
+ if(copied > 0) {
+ Array.copy(array, 0, xs, start, copied)
+ }
+ copied
+ }
+
+ override def equals(other: Any): Boolean = other match {
+ case that: ArraySeq[_] if this.array.length != that.array.length =>
+ false
+ case _ =>
+ super.equals(other)
+ }
+
+ override def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq[T] =
+ ArraySeq.make(array.asInstanceOf[Array[T]].sorted(ord.asInstanceOf[Ordering[Any]])).asInstanceOf[ArraySeq[T]]
+
+ override def sortInPlace[B >: T]()(implicit ord: Ordering[B]): this.type = {
+ if (length > 1) scala.util.Sorting.stableSort(array.asInstanceOf[Array[B @uncheckedCaptures]])
+ this
+ }
+}
+
+/** A companion object used to create instances of `ArraySeq`.
+ */
+@SerialVersionUID(3L)
+object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self =>
+ val untagged: SeqFactory[ArraySeq] = new ClassTagSeqFactory.AnySeqDelegate(self)
+
+ // This is reused for all calls to empty.
+ private[this] val EmptyArraySeq = new ofRef[AnyRef](new Array[AnyRef](0))
+ def empty[T : ClassTag]: ArraySeq[T] = EmptyArraySeq.asInstanceOf[ArraySeq[T]]
+
+ def from[sealed A : ClassTag](it: scala.collection.IterableOnce[A]^): ArraySeq[A] = make(Array.from[A](it))
+
+ def newBuilder[sealed A : ClassTag]: Builder[A, ArraySeq[A]] = ArrayBuilder.make[A].mapResult(make)
+
+ /**
+ * Wrap an existing `Array` into a `ArraySeq` of the proper primitive specialization type
+ * without copying.
+ *
+ * Note that an array containing boxed primitives can be converted to a `ArraySeq` without
+ * copying. For example, `val a: Array[Any] = Array(1)` is an array of `Object` at runtime,
+ * containing `Integer`s. An `ArraySeq[Int]` can be obtained with a cast:
+ * `ArraySeq.make(a).asInstanceOf[ArraySeq[Int]]`. The values are still
+ * boxed, the resulting instance is an [[ArraySeq.ofRef]]. Writing
+ * `ArraySeq.make(a.asInstanceOf[Array[Int]])` does not work, it throws a `ClassCastException`
+ * at runtime.
+ */
+ def make[sealed T](x: Array[T]): ArraySeq[T] = ((x.asInstanceOf[Array[_]]: @unchecked) match {
+ case null => null
+ case x: Array[AnyRef] => new ofRef[AnyRef](x)
+ case x: Array[Int] => new ofInt(x)
+ case x: Array[Double] => new ofDouble(x)
+ case x: Array[Long] => new ofLong(x)
+ case x: Array[Float] => new ofFloat(x)
+ case x: Array[Char] => new ofChar(x)
+ case x: Array[Byte] => new ofByte(x)
+ case x: Array[Short] => new ofShort(x)
+ case x: Array[Boolean] => new ofBoolean(x)
+ case x: Array[Unit] => new ofUnit(x)
+ }).asInstanceOf[ArraySeq[T]]
+
+ @SerialVersionUID(3L)
+ final class ofRef[T <: AnyRef](val array: Array[T]) extends ArraySeq[T] {
+ def elemTag = ClassTag[T](array.getClass.getComponentType)
+ def length: Int = array.length
+ def apply(index: Int): T = array(index)
+ def update(index: Int, elem: T): Unit = { array(index) = elem }
+ override def hashCode = MurmurHash3.arraySeqHash(array)
+ override def equals(that: Any) = that match {
+ case that: ofRef[_] =>
+ Array.equals(
+ this.array.asInstanceOf[Array[AnyRef]],
+ that.array.asInstanceOf[Array[AnyRef]])
+ case _ => super.equals(that)
+ }
+ override def iterator: Iterator[T] = new ArrayOps.ArrayIterator[T](array)
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit = (
+ if(shape.shape == StepperShape.ReferenceShape)
+ new ObjectArrayStepper(array, 0, array.length)
+ else shape.parUnbox(new ObjectArrayStepper(array, 0, array.length).asInstanceOf[AnyStepper[T] with EfficientSplit])
+ ).asInstanceOf[S with EfficientSplit]
+ }
+
+ @SerialVersionUID(3L)
+ final class ofByte(val array: Array[Byte]) extends ArraySeq[Byte] {
+ def elemTag = ClassTag.Byte
+ def length: Int = array.length
+ def apply(index: Int): Byte = array(index)
+ def update(index: Int, elem: Byte): Unit = { array(index) = elem }
+ override def hashCode = MurmurHash3.arraySeqHash(array)
+ override def equals(that: Any) = that match {
+ case that: ofByte => Arrays.equals(array, that.array)
+ case _ => super.equals(that)
+ }
+ override def iterator: Iterator[Byte] = new ArrayOps.ArrayIterator[Byte](array)
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Byte, S]): S with EfficientSplit = (
+ if(shape.shape == StepperShape.ReferenceShape)
+ AnyStepper.ofParIntStepper(new WidenedByteArrayStepper(array, 0, array.length))
+ else new WidenedByteArrayStepper(array, 0, array.length)
+ ).asInstanceOf[S with EfficientSplit]
+ }
+
+ @SerialVersionUID(3L)
+ final class ofShort(val array: Array[Short]) extends ArraySeq[Short] {
+ def elemTag = ClassTag.Short
+ def length: Int = array.length
+ def apply(index: Int): Short = array(index)
+ def update(index: Int, elem: Short): Unit = { array(index) = elem }
+ override def hashCode = MurmurHash3.arraySeqHash(array)
+ override def equals(that: Any) = that match {
+ case that: ofShort => Arrays.equals(array, that.array)
+ case _ => super.equals(that)
+ }
+ override def iterator: Iterator[Short] = new ArrayOps.ArrayIterator[Short](array)
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Short, S]): S with EfficientSplit = (
+ if(shape.shape == StepperShape.ReferenceShape)
+ AnyStepper.ofParIntStepper(new WidenedShortArrayStepper(array, 0, array.length))
+ else new WidenedShortArrayStepper(array, 0, array.length)
+ ).asInstanceOf[S with EfficientSplit]
+ }
+
+ @SerialVersionUID(3L)
+ final class ofChar(val array: Array[Char]) extends ArraySeq[Char] {
+ def elemTag = ClassTag.Char
+ def length: Int = array.length
+ def apply(index: Int): Char = array(index)
+ def update(index: Int, elem: Char): Unit = { array(index) = elem }
+ override def hashCode = MurmurHash3.arraySeqHash(array)
+ override def equals(that: Any) = that match {
+ case that: ofChar => Arrays.equals(array, that.array)
+ case _ => super.equals(that)
+ }
+ override def iterator: Iterator[Char] = new ArrayOps.ArrayIterator[Char](array)
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Char, S]): S with EfficientSplit = (
+ if(shape.shape == StepperShape.ReferenceShape)
+ AnyStepper.ofParIntStepper(new WidenedCharArrayStepper(array, 0, array.length))
+ else new WidenedCharArrayStepper(array, 0, array.length)
+ ).asInstanceOf[S with EfficientSplit]
+
+ override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = {
+ val jsb = sb.underlying
+ if (start.length != 0) jsb.append(start)
+ val len = array.length
+ if (len != 0) {
+ if (sep.isEmpty) jsb.append(array)
+ else {
+ jsb.ensureCapacity(jsb.length + len + end.length + (len - 1) * sep.length)
+ jsb.append(array(0))
+ var i = 1
+ while (i < len) {
+ jsb.append(sep)
+ jsb.append(array(i))
+ i += 1
+ }
+ }
+ }
+ if (end.length != 0) jsb.append(end)
+ sb
+ }
+ }
+
+ @SerialVersionUID(3L)
+ final class ofInt(val array: Array[Int]) extends ArraySeq[Int] {
+ def elemTag = ClassTag.Int
+ def length: Int = array.length
+ def apply(index: Int): Int = array(index)
+ def update(index: Int, elem: Int): Unit = { array(index) = elem }
+ override def hashCode = MurmurHash3.arraySeqHash(array)
+ override def equals(that: Any) = that match {
+ case that: ofInt => Arrays.equals(array, that.array)
+ case _ => super.equals(that)
+ }
+ override def iterator: Iterator[Int] = new ArrayOps.ArrayIterator[Int](array)
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = (
+ if(shape.shape == StepperShape.ReferenceShape)
+ AnyStepper.ofParIntStepper(new IntArrayStepper(array, 0, array.length))
+ else new IntArrayStepper(array, 0, array.length)
+ ).asInstanceOf[S with EfficientSplit]
+ }
+
+ @SerialVersionUID(3L)
+ final class ofLong(val array: Array[Long]) extends ArraySeq[Long] {
+ def elemTag = ClassTag.Long
+ def length: Int = array.length
+ def apply(index: Int): Long = array(index)
+ def update(index: Int, elem: Long): Unit = { array(index) = elem }
+ override def hashCode = MurmurHash3.arraySeqHash(array)
+ override def equals(that: Any) = that match {
+ case that: ofLong => Arrays.equals(array, that.array)
+ case _ => super.equals(that)
+ }
+ override def iterator: Iterator[Long] = new ArrayOps.ArrayIterator[Long](array)
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Long, S]): S with EfficientSplit = (
+ if(shape.shape == StepperShape.ReferenceShape)
+ AnyStepper.ofParLongStepper(new LongArrayStepper(array, 0, array.length))
+ else new LongArrayStepper(array, 0, array.length)
+ ).asInstanceOf[S with EfficientSplit]
+ }
+
+ @SerialVersionUID(3L)
+ final class ofFloat(val array: Array[Float]) extends ArraySeq[Float] {
+ def elemTag = ClassTag.Float
+ def length: Int = array.length
+ def apply(index: Int): Float = array(index)
+ def update(index: Int, elem: Float): Unit = { array(index) = elem }
+ override def hashCode = MurmurHash3.arraySeqHash(array)
+ override def equals(that: Any) = that match {
+ case that: ofFloat => Arrays.equals(array, that.array)
+ case _ => super.equals(that)
+ }
+ override def iterator: Iterator[Float] = new ArrayOps.ArrayIterator[Float](array)
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Float, S]): S with EfficientSplit = (
+ if(shape.shape == StepperShape.ReferenceShape)
+ AnyStepper.ofParDoubleStepper(new WidenedFloatArrayStepper(array, 0, array.length))
+ else new WidenedFloatArrayStepper(array, 0, array.length)
+ ).asInstanceOf[S with EfficientSplit]
+ }
+
+ @SerialVersionUID(3L)
+ final class ofDouble(val array: Array[Double]) extends ArraySeq[Double] {
+ def elemTag = ClassTag.Double
+ def length: Int = array.length
+ def apply(index: Int): Double = array(index)
+ def update(index: Int, elem: Double): Unit = { array(index) = elem }
+ override def hashCode = MurmurHash3.arraySeqHash(array)
+ override def equals(that: Any) = that match {
+ case that: ofDouble => Arrays.equals(array, that.array)
+ case _ => super.equals(that)
+ }
+ override def iterator: Iterator[Double] = new ArrayOps.ArrayIterator[Double](array)
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Double, S]): S with EfficientSplit = (
+ if(shape.shape == StepperShape.ReferenceShape)
+ AnyStepper.ofParDoubleStepper(new DoubleArrayStepper(array, 0, array.length))
+ else new DoubleArrayStepper(array, 0, array.length)
+ ).asInstanceOf[S with EfficientSplit]
+ }
+
+ @SerialVersionUID(3L)
+ final class ofBoolean(val array: Array[Boolean]) extends ArraySeq[Boolean] {
+ def elemTag = ClassTag.Boolean
+ def length: Int = array.length
+ def apply(index: Int): Boolean = array(index)
+ def update(index: Int, elem: Boolean): Unit = { array(index) = elem }
+ override def hashCode = MurmurHash3.arraySeqHash(array)
+ override def equals(that: Any) = that match {
+ case that: ofBoolean => Arrays.equals(array, that.array)
+ case _ => super.equals(that)
+ }
+ override def iterator: Iterator[Boolean] = new ArrayOps.ArrayIterator[Boolean](array)
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Boolean, S]): S with EfficientSplit =
+ new BoxedBooleanArrayStepper(array, 0, array.length).asInstanceOf[S with EfficientSplit]
+ }
+
+ @SerialVersionUID(3L)
+ final class ofUnit(val array: Array[Unit]) extends ArraySeq[Unit] {
+ def elemTag = ClassTag.Unit
+ def length: Int = array.length
+ def apply(index: Int): Unit = array(index)
+ def update(index: Int, elem: Unit): Unit = { array(index) = elem }
+ override def hashCode = MurmurHash3.arraySeqHash(array)
+ override def equals(that: Any) = that match {
+ case that: ofUnit => array.length == that.array.length
+ case _ => super.equals(that)
+ }
+ override def iterator: Iterator[Unit] = new ArrayOps.ArrayIterator[Unit](array)
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Unit, S]): S with EfficientSplit =
+ new ObjectArrayStepper[AnyRef](array.asInstanceOf[Array[AnyRef]], 0, array.length).asInstanceOf[S with EfficientSplit]
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/BitSet.scala b/tests/pos-special/stdlib/collection/mutable/BitSet.scala
new file mode 100644
index 000000000000..dcb8a157389b
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/BitSet.scala
@@ -0,0 +1,393 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package mutable
+
+import scala.collection.immutable.Range
+import BitSetOps.{LogWL, MaxSize}
+import scala.annotation.implicitNotFound
+import language.experimental.captureChecking
+
+/**
+ * A class for mutable bitsets.
+ *
+ * $bitsetinfo
+ *
+ * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#mutable-bitsets "Scala's Collection Library overview"]]
+ * section on `Mutable Bitsets` for more information.
+ *
+ * @define Coll `BitSet`
+ * @define coll bitset
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ */
+class BitSet(protected[collection] final var elems: Array[Long])
+ extends AbstractSet[Int]
+ with SortedSet[Int]
+ with SortedSetOps[Int, SortedSet, BitSet]
+ with StrictOptimizedIterableOps[Int, Set, BitSet]
+ with StrictOptimizedSortedSetOps[Int, SortedSet, BitSet]
+ with collection.BitSet
+ with collection.BitSetOps[BitSet]
+ with Serializable {
+
+ def this(initSize: Int) = this(new Array[Long](math.max((initSize + 63) >> 6, 1)))
+
+ def this() = this(0)
+
+ override protected def fromSpecific(coll: IterableOnce[Int]^): BitSet = bitSetFactory.fromSpecific(coll)
+ override protected def newSpecificBuilder: Builder[Int, BitSet] = bitSetFactory.newBuilder
+ override def empty: BitSet = bitSetFactory.empty
+
+ def bitSetFactory = BitSet
+
+ override def unsorted: Set[Int] = this
+
+ protected[collection] final def nwords: Int = elems.length
+
+ protected[collection] final def word(idx: Int): Long =
+ if (idx < nwords) elems(idx) else 0L
+
+ protected[collection] def fromBitMaskNoCopy(elems: Array[Long]): BitSet =
+ if (elems.length == 0) empty
+ else new BitSet(elems)
+
+ def addOne(elem: Int): this.type = {
+ require(elem >= 0)
+ if (!contains(elem)) {
+ val idx = elem >> LogWL
+ updateWord(idx, word(idx) | (1L << elem))
+ }
+ this
+ }
+
+ def subtractOne(elem: Int): this.type = {
+ require(elem >= 0)
+ if (contains(elem)) {
+ val idx = elem >> LogWL
+ updateWord(idx, word(idx) & ~(1L << elem))
+ }
+ this
+ }
+
+ def clear(): Unit = {
+ elems = new Array[Long](elems.length)
+ }
+
+ protected final def updateWord(idx: Int, w: Long): Unit = {
+ ensureCapacity(idx)
+ elems(idx) = w
+ }
+
+ protected final def ensureCapacity(idx: Int): Unit = {
+ require(idx < MaxSize)
+ if (idx >= nwords) {
+ var newlen = nwords
+ while (idx >= newlen) newlen = math.min(newlen * 2, MaxSize)
+ val elems1 = new Array[Long](newlen)
+ Array.copy(elems, 0, elems1, 0, nwords)
+ elems = elems1
+ }
+ }
+
+ def unconstrained: collection.Set[Int] = this
+
+ /** Updates this bitset to the union with another bitset by performing a bitwise "or".
+ *
+ * @param other the bitset to form the union with.
+ * @return the bitset itself.
+ */
+ def |= (other: collection.BitSet): this.type = {
+ ensureCapacity(other.nwords - 1)
+ var i = 0
+ val othernwords = other.nwords
+ while (i < othernwords) {
+ elems(i) = elems(i) | other.word(i)
+ i += 1
+ }
+ this
+ }
+ /** Updates this bitset to the intersection with another bitset by performing a bitwise "and".
+ *
+ * @param other the bitset to form the intersection with.
+ * @return the bitset itself.
+ */
+ def &= (other: collection.BitSet): this.type = {
+ // Different from other operations: no need to ensure capacity because
+ // anything beyond the capacity is 0. Since we use other.word which is 0
+ // off the end, we also don't need to make sure we stay in bounds there.
+ var i = 0
+ val thisnwords = nwords
+ while (i < thisnwords) {
+ elems(i) = elems(i) & other.word(i)
+ i += 1
+ }
+ this
+ }
+ /** Updates this bitset to the symmetric difference with another bitset by performing a bitwise "xor".
+ *
+ * @param other the bitset to form the symmetric difference with.
+ * @return the bitset itself.
+ */
+ def ^= (other: collection.BitSet): this.type = {
+ ensureCapacity(other.nwords - 1)
+ var i = 0
+ val othernwords = other.nwords
+ while (i < othernwords) {
+
+ elems(i) = elems(i) ^ other.word(i)
+ i += 1
+ }
+ this
+ }
+ /** Updates this bitset to the difference with another bitset by performing a bitwise "and-not".
+ *
+ * @param other the bitset to form the difference with.
+ * @return the bitset itself.
+ */
+ def &~= (other: collection.BitSet): this.type = {
+ var i = 0
+ val max = Math.min(nwords, other.nwords)
+ while (i < max) {
+ elems(i) = elems(i) & ~other.word(i)
+ i += 1
+ }
+ this
+ }
+
+ override def clone(): BitSet = new BitSet(java.util.Arrays.copyOf(elems, elems.length))
+
+ def toImmutable: immutable.BitSet = immutable.BitSet.fromBitMask(elems)
+
+ override def map(f: Int => Int): BitSet = strictOptimizedMap(newSpecificBuilder, f)
+ override def map[B](f: Int => B)(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] =
+ super[StrictOptimizedSortedSetOps].map(f)
+
+ override def flatMap(f: Int => IterableOnce[Int]): BitSet = strictOptimizedFlatMap(newSpecificBuilder, f)
+ override def flatMap[B](f: Int => IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] =
+ super[StrictOptimizedSortedSetOps].flatMap(f)
+
+ override def collect(pf: PartialFunction[Int, Int]): BitSet = strictOptimizedCollect(newSpecificBuilder, pf)
+ override def collect[B](pf: scala.PartialFunction[Int, B])(implicit @implicitNotFound(collection.BitSet.ordMsg) ev: Ordering[B]): SortedSet[B] =
+ super[StrictOptimizedSortedSetOps].collect(pf)
+
+ // necessary for disambiguation
+ override def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(collection.BitSet.zipOrdMsg) ev: Ordering[(Int, B)]): SortedSet[(Int, B)] =
+ super.zip(that)
+
+ override def addAll(xs: IterableOnce[Int]^): this.type = xs match {
+ case bs: collection.BitSet =>
+ this |= bs
+ case range: Range =>
+ if (range.nonEmpty) {
+ val start = range.min
+ if (start >= 0) {
+ val end = range.max
+ val endIdx = end >> LogWL
+ ensureCapacity(endIdx)
+
+ if (range.step == 1 || range.step == -1) {
+ val startIdx = start >> LogWL
+ val wordStart = startIdx * BitSetOps.WordLength
+ val wordMask = -1L << (start - wordStart)
+
+ if (endIdx > startIdx) {
+ elems(startIdx) |= wordMask
+ java.util.Arrays.fill(elems, startIdx + 1, endIdx, -1L)
+ elems(endIdx) |= -1L >>> (BitSetOps.WordLength - (end - endIdx * BitSetOps.WordLength) - 1)
+ } else elems(endIdx) |= (wordMask & (-1L >>> (BitSetOps.WordLength - (end - wordStart) - 1)))
+ } else super.addAll(range)
+ } else super.addAll(range)
+ }
+ this
+
+ case sorted: collection.SortedSet[Int] =>
+ // if `sorted` is using the regular Int ordering, ensure capacity for the largest
+ // element up front to avoid multiple resizing allocations
+ if (sorted.nonEmpty) {
+ val ord = sorted.ordering
+ if (ord eq Ordering.Int) {
+ ensureCapacity(sorted.lastKey >> LogWL)
+ } else if (ord eq Ordering.Int.reverse) {
+ ensureCapacity(sorted.firstKey >> LogWL)
+ }
+ val iter = sorted.iterator
+ while (iter.hasNext) {
+ addOne(iter.next())
+ }
+ }
+
+ this
+
+ case other =>
+ super.addAll(other)
+ }
+
+ override def subsetOf(that: collection.Set[Int]): Boolean = that match {
+ case bs: collection.BitSet =>
+ val thisnwords = this.nwords
+ val bsnwords = bs.nwords
+ val minWords = Math.min(thisnwords, bsnwords)
+
+ // if any bits are set to `1` in words out of range of `bs`, then this is not a subset. Start there
+ var i = bsnwords
+ while (i < thisnwords) {
+ if (word(i) != 0L) return false
+ i += 1
+ }
+
+ // the higher range of `this` is all `0`s, fall back to lower range
+ var j = 0
+ while (j < minWords) {
+ if ((word(j) & ~bs.word(j)) != 0L) return false
+ j += 1
+ }
+
+ true
+ case other =>
+ super.subsetOf(other)
+ }
+
+ override def subtractAll(xs: IterableOnce[Int]^): this.type = xs match {
+ case bs: collection.BitSet => this &~= bs
+ case other => super.subtractAll(other)
+ }
+
+ protected[this] def writeReplace(): AnyRef = new BitSet.SerializationProxy(this)
+
+ override def diff(that: collection.Set[Int]): BitSet = that match {
+ case bs: collection.BitSet =>
+ /*
+ * Algorithm:
+ *
+ * We iterate, word-by-word, backwards from the shortest of the two bitsets (this, or bs) i.e. the one with
+ * the fewer words.
+ *
+ * Array Shrinking:
+ * If `this` is not longer than `bs`, then since we must iterate through the full array of words,
+ * we can track the new highest index word which is non-zero, at little additional cost. At the end, the new
+ * Array[Long] allocated for the returned BitSet will only be of size `maxNonZeroIndex + 1`
+ */
+
+ val bsnwords = bs.nwords
+ val thisnwords = nwords
+ if (bsnwords >= thisnwords) {
+ // here, we may have opportunity to shrink the size of the array
+ // so, track the highest index which is non-zero. That ( + 1 ) will be our new array length
+ var i = thisnwords - 1
+ var currentWord = 0L
+
+ while (i >= 0 && currentWord == 0L) {
+ val oldWord = word(i)
+ currentWord = oldWord & ~bs.word(i)
+ i -= 1
+ }
+
+ if (i < 0) {
+ fromBitMaskNoCopy(Array(currentWord))
+ } else {
+ val minimumNonZeroIndex: Int = i + 1
+ val newArray = elems.take(minimumNonZeroIndex + 1)
+ newArray(i + 1) = currentWord
+ while (i >= 0) {
+ newArray(i) = word(i) & ~bs.word(i)
+ i -= 1
+ }
+ fromBitMaskNoCopy(newArray)
+ }
+ } else {
+ // here, there is no opportunity to shrink the array size, no use in tracking highest non-zero index
+ val newElems = elems.clone()
+ var i = bsnwords - 1
+ while (i >= 0) {
+ newElems(i) = word(i) & ~bs.word(i)
+ i -= 1
+ }
+ fromBitMaskNoCopy(newElems)
+ }
+ case _ => super.diff(that)
+ }
+
+ override def filterImpl(pred: Int => Boolean, isFlipped: Boolean): BitSet = {
+ // We filter the BitSet from highest to lowest, so we can determine exactly the highest non-zero word
+ // index which lets us avoid:
+ // * over-allocating -- the resulting array will be exactly the right size
+ // * multiple resizing allocations -- the array is allocated one time, not log(n) times.
+ var i = nwords - 1
+ var newArray: Array[Long] = null
+ while (i >= 0) {
+ val w = BitSetOps.computeWordForFilter(pred, isFlipped, word(i), i)
+ if (w != 0L) {
+ if (newArray eq null) {
+ newArray = new Array(i + 1)
+ }
+ newArray(i) = w
+ }
+ i -= 1
+ }
+ if (newArray eq null) {
+ empty
+ } else {
+ fromBitMaskNoCopy(newArray)
+ }
+ }
+
+ override def filterInPlace(p: Int => Boolean): this.type = {
+ val thisnwords = nwords
+ var i = 0
+ while (i < thisnwords) {
+ elems(i) = BitSetOps.computeWordForFilter(p, isFlipped = false, elems(i), i)
+ i += 1
+ }
+ this
+ }
+
+ override def toBitMask: Array[Long] = elems.clone()
+}
+
+@SerialVersionUID(3L)
+object BitSet extends SpecificIterableFactory[Int, BitSet] {
+
+ def fromSpecific(it: scala.collection.IterableOnce[Int]^): BitSet = Growable.from(empty, it)
+
+ def empty: BitSet = new BitSet()
+
+ def newBuilder: Builder[Int, BitSet] = new GrowableBuilder(empty)
+
+ /** A bitset containing all the bits in an array */
+ def fromBitMask(elems: Array[Long]): BitSet = {
+ val len = elems.length
+ if (len == 0) empty
+ else {
+ val a = java.util.Arrays.copyOf(elems, len)
+ new BitSet(a)
+ }
+ }
+
+ /** A bitset containing all the bits in an array, wrapping the existing
+ * array without copying.
+ */
+ def fromBitMaskNoCopy(elems: Array[Long]): BitSet = {
+ val len = elems.length
+ if (len == 0) empty
+ else new BitSet(elems)
+ }
+
+ @SerialVersionUID(3L)
+ private final class SerializationProxy(coll: BitSet) extends scala.collection.BitSet.SerializationProxy(coll) {
+ protected[this] def readResolve(): Any = BitSet.fromBitMaskNoCopy(elems)
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/Buffer.scala b/tests/pos-special/stdlib/collection/mutable/Buffer.scala
index 0a70c75bac0c..0f472dc9ac82 100644
--- a/tests/pos-special/stdlib/collection/mutable/Buffer.scala
+++ b/tests/pos-special/stdlib/collection/mutable/Buffer.scala
@@ -15,10 +15,11 @@ package mutable
import scala.annotation.nowarn
import language.experimental.captureChecking
+import scala.annotation.unchecked.uncheckedCaptures
/** A `Buffer` is a growable and shrinkable `Seq`. */
-trait Buffer[A]
+trait Buffer[sealed A]
extends Seq[A]
with SeqOps[A, Buffer, Buffer[A]]
with Growable[A]
@@ -185,7 +186,7 @@ trait IndexedBuffer[A] extends IndexedSeq[A]
// There's scope for a better implementation which copies elements in place.
var i = 0
val s = size
- val newElems = new Array[IterableOnce[A]^](s)
+ val newElems = new Array[(IterableOnce[A]^) @uncheckedCaptures](s)
while (i < s) { newElems(i) = f(this(i)); i += 1 }
clear()
i = 0
diff --git a/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala b/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala
new file mode 100644
index 000000000000..152b6cc9ffc7
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/CheckedIndexedSeqView.scala
@@ -0,0 +1,120 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package mutable
+import language.experimental.captureChecking
+
+private[mutable] trait CheckedIndexedSeqView[+A] extends IndexedSeqView[A] {
+ this: CheckedIndexedSeqView[A]^ =>
+
+ protected val mutationCount: () => Int
+
+ override def iterator: Iterator[A]^{this} = new CheckedIndexedSeqView.CheckedIterator(this, mutationCount())
+ override def reverseIterator: Iterator[A]^{this} = new CheckedIndexedSeqView.CheckedReverseIterator(this, mutationCount())
+
+ override def appended[B >: A](elem: B): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Appended(this, elem)(mutationCount)
+ override def prepended[B >: A](elem: B): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Prepended(elem, this)(mutationCount)
+ override def take(n: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.Take(this, n)(mutationCount)
+ override def takeRight(n: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.TakeRight(this, n)(mutationCount)
+ override def drop(n: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.Drop(this, n)(mutationCount)
+ override def dropRight(n: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.DropRight(this, n)(mutationCount)
+ override def map[B](f: A => B): IndexedSeqView[B]^{this, f} = new CheckedIndexedSeqView.Map(this, f)(mutationCount)
+ override def reverse: IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.Reverse(this)(mutationCount)
+ override def slice(from: Int, until: Int): IndexedSeqView[A]^{this} = new CheckedIndexedSeqView.Slice(this, from, until)(mutationCount)
+ override def tapEach[U](f: A => U): IndexedSeqView[A]^{this, f} = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount)
+
+ override def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount)
+ override def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount)
+ override def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B]^{this} = new CheckedIndexedSeqView.Concat(prefix, this)(mutationCount)
+}
+
+private[mutable] object CheckedIndexedSeqView {
+ import IndexedSeqView.SomeIndexedSeqOps
+
+ @SerialVersionUID(3L)
+ private[mutable] class CheckedIterator[A](self: IndexedSeqView[A]^, mutationCount: => Int)
+ extends IndexedSeqView.IndexedSeqViewIterator[A](self) {
+ private[this] val expectedCount = mutationCount
+ override def hasNext: Boolean = {
+ MutationTracker.checkMutationsForIteration(expectedCount, mutationCount)
+ super.hasNext
+ }
+ }
+
+ @SerialVersionUID(3L)
+ private[mutable] class CheckedReverseIterator[A](self: IndexedSeqView[A]^, mutationCount: => Int)
+ extends IndexedSeqView.IndexedSeqViewReverseIterator[A](self) {
+ private[this] val expectedCount = mutationCount
+ override def hasNext: Boolean = {
+ MutationTracker.checkMutationsForIteration(expectedCount, mutationCount)
+ super.hasNext
+ }
+ }
+
+ @SerialVersionUID(3L)
+ class Id[+A](underlying: SomeIndexedSeqOps[A]^)(protected val mutationCount: () => Int)
+ extends IndexedSeqView.Id(underlying) with CheckedIndexedSeqView[A]
+
+ @SerialVersionUID(3L)
+ class Appended[+A](underlying: SomeIndexedSeqOps[A]^, elem: A)(protected val mutationCount: () => Int)
+ extends IndexedSeqView.Appended(underlying, elem) with CheckedIndexedSeqView[A]
+
+ @SerialVersionUID(3L)
+ class Prepended[+A](elem: A, underlying: SomeIndexedSeqOps[A]^)(protected val mutationCount: () => Int)
+ extends IndexedSeqView.Prepended(elem, underlying) with CheckedIndexedSeqView[A]
+
+ @SerialVersionUID(3L)
+ class Concat[A](prefix: SomeIndexedSeqOps[A]^, suffix: SomeIndexedSeqOps[A]^)(protected val mutationCount: () => Int)
+ extends IndexedSeqView.Concat[A](prefix, suffix) with CheckedIndexedSeqView[A]
+
+ @SerialVersionUID(3L)
+ class Take[A](underlying: SomeIndexedSeqOps[A]^, n: Int)(protected val mutationCount: () => Int)
+ extends IndexedSeqView.Take(underlying, n) with CheckedIndexedSeqView[A]
+
+ @SerialVersionUID(3L)
+ class TakeRight[A](underlying: SomeIndexedSeqOps[A]^, n: Int)(protected val mutationCount: () => Int)
+ extends IndexedSeqView.TakeRight(underlying, n) with CheckedIndexedSeqView[A]
+
+ @SerialVersionUID(3L)
+ class Drop[A](underlying: SomeIndexedSeqOps[A]^, n: Int)(protected val mutationCount: () => Int)
+ extends IndexedSeqView.Drop[A](underlying, n) with CheckedIndexedSeqView[A]
+
+ @SerialVersionUID(3L)
+ class DropRight[A](underlying: SomeIndexedSeqOps[A]^, n: Int)(protected val mutationCount: () => Int)
+ extends IndexedSeqView.DropRight[A](underlying, n) with CheckedIndexedSeqView[A]
+
+ @SerialVersionUID(3L)
+ class Map[A, B](underlying: SomeIndexedSeqOps[A]^, f: A => B)(protected val mutationCount: () => Int)
+ extends IndexedSeqView.Map(underlying, f) with CheckedIndexedSeqView[B]
+
+ @SerialVersionUID(3L)
+ class Reverse[A](underlying: SomeIndexedSeqOps[A]^)(protected val mutationCount: () => Int)
+ extends IndexedSeqView.Reverse[A](underlying) with CheckedIndexedSeqView[A] {
+ override def reverse: IndexedSeqView[A] = underlying match {
+ case x: IndexedSeqView[A] => x
+ case _ => super.reverse
+ }
+ }
+
+ @SerialVersionUID(3L)
+ class Slice[A](underlying: SomeIndexedSeqOps[A]^, from: Int, until: Int)(protected val mutationCount: () => Int)
+ extends AbstractIndexedSeqView[A] with CheckedIndexedSeqView[A] {
+ protected val lo = from max 0
+ protected val hi = (until max 0) min underlying.length
+ protected val len = (hi - lo) max 0
+ @throws[IndexOutOfBoundsException]
+ def apply(i: Int): A = underlying(lo + i)
+ def length: Int = len
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/Cloneable.scala b/tests/pos-special/stdlib/collection/mutable/Cloneable.scala
new file mode 100644
index 000000000000..39149e98cbf0
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/Cloneable.scala
@@ -0,0 +1,22 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection.mutable
+import language.experimental.captureChecking
+
+/** A trait for cloneable collections.
+ *
+ * @tparam C Type of the collection, covariant and with reference types as upperbound.
+ */
+trait Cloneable[+C <: AnyRef] extends scala.Cloneable {
+ override def clone(): C = super.clone().asInstanceOf[C]
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala b/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala
new file mode 100644
index 000000000000..2b27efb6eac1
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/CollisionProofHashMap.scala
@@ -0,0 +1,889 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+package mutable
+
+import scala.{unchecked => uc}
+import scala.annotation.{implicitNotFound, tailrec, unused}
+import scala.annotation.unchecked.uncheckedVariance
+import scala.collection.generic.DefaultSerializationProxy
+import scala.runtime.Statics
+import language.experimental.captureChecking
+
+/** This class implements mutable maps using a hashtable with red-black trees in the buckets for good
+ * worst-case performance on hash collisions. An `Ordering` is required for the element type. Equality
+ * as determined by the `Ordering` has to be consistent with `equals` and `hashCode`. Universal equality
+ * of numeric types is not supported (similar to `AnyRefMap`).
+ *
+ * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]]
+ * section on `Hash Tables` for more information.
+ *
+ * @define Coll `mutable.CollisionProofHashMap`
+ * @define coll mutable collision-proof hash map
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ */
+final class CollisionProofHashMap[sealed K, sealed V](initialCapacity: Int, loadFactor: Double)(implicit ordering: Ordering[K])
+ extends AbstractMap[K, V]
+ with MapOps[K, V, Map, CollisionProofHashMap[K, V]] //--
+ with StrictOptimizedIterableOps[(K, V), Iterable, CollisionProofHashMap[K, V]]
+ with StrictOptimizedMapOps[K, V, Map, CollisionProofHashMap[K, V]] { //--
+
+ private[this] final def sortedMapFactory: SortedMapFactory[CollisionProofHashMap] = CollisionProofHashMap
+
+ def this()(implicit ordering: Ordering[K]) = this(CollisionProofHashMap.defaultInitialCapacity, CollisionProofHashMap.defaultLoadFactor)(ordering)
+
+ import CollisionProofHashMap.Node
+ private[this] type RBNode = CollisionProofHashMap.RBNode[K, V]
+ private[this] type LLNode = CollisionProofHashMap.LLNode[K, V]
+
+ /** The actual hash table. */
+ private[this] var table: Array[Node] = new Array[Node](tableSizeFor(initialCapacity))
+
+ /** The next size value at which to resize (capacity * load factor). */
+ private[this] var threshold: Int = newThreshold(table.length)
+
+ private[this] var contentSize = 0
+
+ override def size: Int = contentSize
+
+ @`inline` private[this] final def computeHash(o: K): Int = {
+ val h = if(o.asInstanceOf[AnyRef] eq null) 0 else o.hashCode
+ h ^ (h >>> 16)
+ }
+
+ @`inline` private[this] final def index(hash: Int) = hash & (table.length - 1)
+
+ override protected def fromSpecific(coll: (IterableOnce[(K, V)]^) @uncheckedVariance): CollisionProofHashMap[K, V] @uncheckedVariance = CollisionProofHashMap.from(coll)
+ override protected def newSpecificBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] @uncheckedVariance = CollisionProofHashMap.newBuilder[K, V]
+
+ override def empty: CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V]
+
+ override def contains(key: K): Boolean = findNode(key) ne null
+
+ def get(key: K): Option[V] = findNode(key) match {
+ case null => None
+ case nd => Some(nd match {
+ case nd: LLNode @uc => nd.value
+ case nd: RBNode @uc => nd.value
+ })
+ }
+
+ @throws[NoSuchElementException]
+ override def apply(key: K): V = findNode(key) match {
+ case null => default(key)
+ case nd => nd match {
+ case nd: LLNode @uc => nd.value
+ case nd: RBNode @uc => nd.value
+ }
+ }
+
+ override def getOrElse[V1 >: V](key: K, default: => V1): V1 = {
+ val nd = findNode(key)
+ if (nd eq null) default else nd match {
+ case nd: LLNode @uc => nd.value
+ case n => n.asInstanceOf[RBNode].value
+ }
+ }
+
+ @`inline` private[this] def findNode(elem: K): Node = {
+ val hash = computeHash(elem)
+ table(index(hash)) match {
+ case null => null
+ case n: LLNode @uc => n.getNode(elem, hash)
+ case n => n.asInstanceOf[RBNode].getNode(elem, hash)
+ }
+ }
+
+ override def sizeHint(size: Int): Unit = {
+ val target = tableSizeFor(((size + 1).toDouble / loadFactor).toInt)
+ if(target > table.length) {
+ if(size == 0) reallocTable(target)
+ else growTable(target)
+ }
+ }
+
+ override def update(key: K, value: V): Unit = put0(key, value, false)
+
+ override def put(key: K, value: V): Option[V] = put0(key, value, true) match {
+ case null => None
+ case sm => sm
+ }
+
+ def addOne(elem: (K, V)): this.type = { put0(elem._1, elem._2, false); this }
+
+ @`inline` private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = {
+ if(contentSize + 1 >= threshold) growTable(table.length * 2)
+ val hash = computeHash(key)
+ val idx = index(hash)
+ put0(key, value, getOld, hash, idx)
+ }
+
+ private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = {
+ val res = table(idx) match {
+ case n: RBNode @uc =>
+ insert(n, idx, key, hash, value)
+ case _old =>
+ val old: LLNode = _old.asInstanceOf[LLNode]
+ if(old eq null) {
+ table(idx) = new LLNode(key, hash, value, null)
+ } else {
+ var remaining = CollisionProofHashMap.treeifyThreshold
+ var prev: LLNode = null
+ var n = old
+ while((n ne null) && n.hash <= hash && remaining > 0) {
+ if(n.hash == hash && key == n.key) {
+ val old = n.value
+ n.value = value
+ return (if(getOld) Some(old) else null)
+ }
+ prev = n
+ n = n.next
+ remaining -= 1
+ }
+ if(remaining == 0) {
+ treeify(old, idx)
+ return put0(key, value, getOld, hash, idx)
+ }
+ if(prev eq null) table(idx) = new LLNode(key, hash, value, old)
+ else prev.next = new LLNode(key, hash, value, prev.next)
+ }
+ true
+ }
+ if(res) contentSize += 1
+ if(res) Some(null.asInstanceOf[V]) else null //TODO
+ }
+
+ private[this] def treeify(old: LLNode, idx: Int): Unit = {
+ table(idx) = CollisionProofHashMap.leaf(old.key, old.hash, old.value, red = false, null)
+ var n: LLNode = old.next
+ while(n ne null) {
+ val root = table(idx).asInstanceOf[RBNode]
+ insertIntoExisting(root, idx, n.key, n.hash, n.value, root)
+ n = n.next
+ }
+ }
+
+ override def addAll(xs: IterableOnce[(K, V)]^): this.type = {
+ val k = xs.knownSize
+ if(k > 0) sizeHint(contentSize + k)
+ super.addAll(xs)
+ }
+
+ // returns the old value or Statics.pfMarker if not found
+ private[this] def remove0(elem: K) : Any = {
+ val hash = computeHash(elem)
+ val idx = index(hash)
+ table(idx) match {
+ case null => Statics.pfMarker
+ case t: RBNode @uc =>
+ val v = delete(t, idx, elem, hash)
+ if(v.asInstanceOf[AnyRef] ne Statics.pfMarker) contentSize -= 1
+ v
+ case nd: LLNode @uc if nd.hash == hash && nd.key == elem =>
+ // first element matches
+ table(idx) = nd.next
+ contentSize -= 1
+ nd.value
+ case nd: LLNode @uc =>
+ // find an element that matches
+ var prev = nd
+ var next = nd.next
+ while((next ne null) && next.hash <= hash) {
+ if(next.hash == hash && next.key == elem) {
+ prev.next = next.next
+ contentSize -= 1
+ return next.value
+ }
+ prev = next
+ next = next.next
+ }
+ Statics.pfMarker
+ }
+ }
+
+ private[this] abstract class MapIterator[R] extends AbstractIterator[R] {
+ protected[this] def extract(node: LLNode): R
+ protected[this] def extract(node: RBNode): R
+
+ private[this] var i = 0
+ private[this] var node: Node = null
+ private[this] val len = table.length
+
+ def hasNext: Boolean = {
+ if(node ne null) true
+ else {
+ while(i < len) {
+ val n = table(i)
+ i += 1
+ n match {
+ case null =>
+ case n: RBNode @uc =>
+ node = CollisionProofHashMap.minNodeNonNull(n)
+ return true
+ case n: LLNode @uc =>
+ node = n
+ return true
+ }
+ }
+ false
+ }
+ }
+
+ def next(): R =
+ if(!hasNext) Iterator.empty.next()
+ else node match {
+ case n: RBNode @uc =>
+ val r = extract(n)
+ node = CollisionProofHashMap.successor(n )
+ r
+ case n: LLNode @uc =>
+ val r = extract(n)
+ node = n.next
+ r
+ }
+ }
+
+ override def keysIterator: Iterator[K] = {
+ if (isEmpty) Iterator.empty
+ else new MapIterator[K] {
+ protected[this] def extract(node: LLNode) = node.key
+ protected[this] def extract(node: RBNode) = node.key
+ }
+ }
+
+ override def iterator: Iterator[(K, V)] = {
+ if (isEmpty) Iterator.empty
+ else new MapIterator[(K, V)] {
+ protected[this] def extract(node: LLNode) = (node.key, node.value)
+ protected[this] def extract(node: RBNode) = (node.key, node.value)
+ }
+ }
+
+ private[this] def growTable(newlen: Int) = {
+ var oldlen = table.length
+ table = java.util.Arrays.copyOf(table, newlen)
+ threshold = newThreshold(table.length)
+ while(oldlen < newlen) {
+ var i = 0
+ while (i < oldlen) {
+ val old = table(i)
+ if(old ne null) splitBucket(old, i, i + oldlen, oldlen)
+ i += 1
+ }
+ oldlen *= 2
+ }
+ }
+
+ @`inline` private[this] def reallocTable(newlen: Int) = {
+ table = new Array(newlen)
+ threshold = newThreshold(table.length)
+ }
+
+ @`inline` private[this] def splitBucket(tree: Node, lowBucket: Int, highBucket: Int, mask: Int): Unit = tree match {
+ case t: LLNode @uc => splitBucket(t, lowBucket, highBucket, mask)
+ case t: RBNode @uc => splitBucket(t, lowBucket, highBucket, mask)
+ }
+
+ private[this] def splitBucket(list: LLNode, lowBucket: Int, highBucket: Int, mask: Int): Unit = {
+ val preLow: LLNode = new LLNode(null.asInstanceOf[K], 0, null.asInstanceOf[V], null)
+ val preHigh: LLNode = new LLNode(null.asInstanceOf[K], 0, null.asInstanceOf[V], null)
+ //preLow.next = null
+ //preHigh.next = null
+ var lastLow: LLNode = preLow
+ var lastHigh: LLNode = preHigh
+ var n = list
+ while(n ne null) {
+ val next = n.next
+ if((n.hash & mask) == 0) { // keep low
+ lastLow.next = n
+ lastLow = n
+ } else { // move to high
+ lastHigh.next = n
+ lastHigh = n
+ }
+ n = next
+ }
+ lastLow.next = null
+ if(list ne preLow.next) table(lowBucket) = preLow.next
+ if(preHigh.next ne null) {
+ table(highBucket) = preHigh.next
+ lastHigh.next = null
+ }
+ }
+
+ private[this] def splitBucket(tree: RBNode, lowBucket: Int, highBucket: Int, mask: Int): Unit = {
+ var lowCount, highCount = 0
+ tree.foreachNode((n: RBNode) => if((n.hash & mask) != 0) highCount += 1 else lowCount += 1)
+ if(highCount != 0) {
+ if(lowCount == 0) {
+ table(lowBucket) = null
+ table(highBucket) = tree
+ } else {
+ table(lowBucket) = fromNodes(new CollisionProofHashMap.RBNodesIterator(tree).filter(n => (n.hash & mask) == 0), lowCount)
+ table(highBucket) = fromNodes(new CollisionProofHashMap.RBNodesIterator(tree).filter(n => (n.hash & mask) != 0), highCount)
+ }
+ }
+ }
+
+ private[this] def tableSizeFor(capacity: Int) =
+ (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30)
+
+ private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt
+
+ override def clear(): Unit = {
+ java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null)
+ contentSize = 0
+ }
+
+ override def remove(key: K): Option[V] = {
+ val v = remove0(key)
+ if(v.asInstanceOf[AnyRef] eq Statics.pfMarker) None else Some(v.asInstanceOf[V])
+ }
+
+ def subtractOne(elem: K): this.type = { remove0(elem); this }
+
+ override def knownSize: Int = size
+
+ override def isEmpty: Boolean = size == 0
+
+ override def foreach[U](f: ((K, V)) => U): Unit = {
+ val len = table.length
+ var i = 0
+ while(i < len) {
+ val n = table(i)
+ if(n ne null) n match {
+ case n: LLNode @uc => n.foreach(f)
+ case n: RBNode @uc => n.foreach(f)
+ }
+ i += 1
+ }
+ }
+
+ override def foreachEntry[U](f: (K, V) => U): Unit = {
+ val len = table.length
+ var i = 0
+ while(i < len) {
+ val n = table(i)
+ if(n ne null) n match {
+ case n: LLNode @uc => n.foreachEntry(f)
+ case n: RBNode @uc => n.foreachEntry(f)
+ }
+ i += 1
+ }
+ }
+
+ protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new CollisionProofHashMap.DeserializationFactory[K, V](table.length, loadFactor, ordering), this)
+
+ override protected[this] def className = "CollisionProofHashMap"
+
+ override def getOrElseUpdate(key: K, defaultValue: => V): V = {
+ val hash = computeHash(key)
+ val idx = index(hash)
+ table(idx) match {
+ case null => ()
+ case n: LLNode @uc =>
+ val nd = n.getNode(key, hash)
+ if(nd != null) return nd.value
+ case n =>
+ val nd = n.asInstanceOf[RBNode].getNode(key, hash)
+ if(nd != null) return nd.value
+ }
+ val table0 = table
+ val default = defaultValue
+ if(contentSize + 1 >= threshold) growTable(table.length * 2)
+ // Avoid recomputing index if the `defaultValue()` or new element hasn't triggered a table resize.
+ val newIdx = if (table0 eq table) idx else index(hash)
+ put0(key, default, false, hash, newIdx)
+ default
+ }
+
+ ///////////////////// Overrides code from SortedMapOps
+
+ /** Builds a new `CollisionProofHashMap` by applying a function to all elements of this $coll.
+ *
+ * @param f the function to apply to each element.
+ * @return a new $coll resulting from applying the given function
+ * `f` to each element of this $coll and collecting the results.
+ */
+ def map[K2, V2](f: ((K, V)) => (K2, V2))
+ (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] =
+ sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](this, f))
+
+ /** Builds a new `CollisionProofHashMap` by applying a function to all elements of this $coll
+ * and using the elements of the resulting collections.
+ *
+ * @param f the function to apply to each element.
+ * @return a new $coll resulting from applying the given collection-valued function
+ * `f` to each element of this $coll and concatenating the results.
+ */
+ def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)])
+ (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] =
+ sortedMapFactory.from(new View.FlatMap(this, f))
+
+ /** Builds a new sorted map by applying a partial function to all elements of this $coll
+ * on which the function is defined.
+ *
+ * @param pf the partial function which filters and maps the $coll.
+ * @return a new $coll resulting from applying the given partial function
+ * `pf` to each element on which it is defined and collecting the results.
+ * The order of the elements is preserved.
+ */
+ def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])
+ (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] =
+ sortedMapFactory.from(new View.Collect(this, pf))
+
+ override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): CollisionProofHashMap[K, V2] = sortedMapFactory.from(suffix match {
+ case it: Iterable[(K, V2)] => new View.Concat(this, it)
+ case _ => iterator.concat(suffix.iterator)
+ })
+
+ /** Alias for `concat` */
+ @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]^): CollisionProofHashMap[K, V2] = concat(xs)
+
+ @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0")
+ override def + [V1 >: V](kv: (K, V1)): CollisionProofHashMap[K, V1] =
+ sortedMapFactory.from(new View.Appended(this, kv))
+
+ @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0")
+ override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CollisionProofHashMap[K, V1] =
+ sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems))
+
+ ///////////////////// RedBlackTree code derived from mutable.RedBlackTree:
+
+ @`inline` private[this] def isRed(node: RBNode) = (node ne null) && node.red
+ @`inline` private[this] def isBlack(node: RBNode) = (node eq null) || !node.red
+
+ @unused @`inline` private[this] def compare(key: K, hash: Int, node: LLNode): Int = {
+ val i = hash - node.hash
+ if(i != 0) i else ordering.compare(key, node.key)
+ }
+
+ @`inline` private[this] def compare(key: K, hash: Int, node: RBNode): Int = {
+ /*val i = hash - node.hash
+ if(i != 0) i else*/ ordering.compare(key, node.key)
+ }
+
+ // ---- insertion ----
+
+ @tailrec private[this] final def insertIntoExisting(_root: RBNode, bucket: Int, key: K, hash: Int, value: V, x: RBNode): Boolean = {
+ val cmp = compare(key, hash, x)
+ if(cmp == 0) {
+ x.value = value
+ false
+ } else {
+ val next = if(cmp < 0) x.left else x.right
+ if(next eq null) {
+ val z = CollisionProofHashMap.leaf(key, hash, value, red = true, x)
+ if (cmp < 0) x.left = z else x.right = z
+ table(bucket) = fixAfterInsert(_root, z)
+ return true
+ }
+ else insertIntoExisting(_root, bucket, key, hash, value, next)
+ }
+ }
+
+ private[this] final def insert(tree: RBNode, bucket: Int, key: K, hash: Int, value: V): Boolean = {
+ if(tree eq null) {
+ table(bucket) = CollisionProofHashMap.leaf(key, hash, value, red = false, null)
+ true
+ } else insertIntoExisting(tree, bucket, key, hash, value, tree)
+ }
+
+ private[this] def fixAfterInsert(_root: RBNode, node: RBNode): RBNode = {
+ var root = _root
+ var z = node
+ while (isRed(z.parent)) {
+ if (z.parent eq z.parent.parent.left) {
+ val y = z.parent.parent.right
+ if (isRed(y)) {
+ z.parent.red = false
+ y.red = false
+ z.parent.parent.red = true
+ z = z.parent.parent
+ } else {
+ if (z eq z.parent.right) {
+ z = z.parent
+ root = rotateLeft(root, z)
+ }
+ z.parent.red = false
+ z.parent.parent.red = true
+ root = rotateRight(root, z.parent.parent)
+ }
+ } else { // symmetric cases
+ val y = z.parent.parent.left
+ if (isRed(y)) {
+ z.parent.red = false
+ y.red = false
+ z.parent.parent.red = true
+ z = z.parent.parent
+ } else {
+ if (z eq z.parent.left) {
+ z = z.parent
+ root = rotateRight(root, z)
+ }
+ z.parent.red = false
+ z.parent.parent.red = true
+ root = rotateLeft(root, z.parent.parent)
+ }
+ }
+ }
+ root.red = false
+ root
+ }
+
+ // ---- deletion ----
+
+ // returns the old value or Statics.pfMarker if not found
+ private[this] def delete(_root: RBNode, bucket: Int, key: K, hash: Int): Any = {
+ var root = _root
+ val z = root.getNode(key, hash: Int)
+ if (z ne null) {
+ val oldValue = z.value
+ var y = z
+ var yIsRed = y.red
+ var x: RBNode = null
+ var xParent: RBNode = null
+
+ if (z.left eq null) {
+ x = z.right
+ root = transplant(root, z, z.right)
+ xParent = z.parent
+ }
+ else if (z.right eq null) {
+ x = z.left
+ root = transplant(root, z, z.left)
+ xParent = z.parent
+ }
+ else {
+ y = CollisionProofHashMap.minNodeNonNull(z.right)
+ yIsRed = y.red
+ x = y.right
+
+ if (y.parent eq z) xParent = y
+ else {
+ xParent = y.parent
+ root = transplant(root, y, y.right)
+ y.right = z.right
+ y.right.parent = y
+ }
+ root = transplant(root, z, y)
+ y.left = z.left
+ y.left.parent = y
+ y.red = z.red
+ }
+
+ if (!yIsRed) root = fixAfterDelete(root, x, xParent)
+ if(root ne _root) table(bucket) = root
+ oldValue
+ } else Statics.pfMarker
+ }
+
+ private[this] def fixAfterDelete(_root: RBNode, node: RBNode, parent: RBNode): RBNode = {
+ var root = _root
+ var x = node
+ var xParent = parent
+ while ((x ne root) && isBlack(x)) {
+ if (x eq xParent.left) {
+ var w = xParent.right
+ // assert(w ne null)
+
+ if (w.red) {
+ w.red = false
+ xParent.red = true
+ root = rotateLeft(root, xParent)
+ w = xParent.right
+ }
+ if (isBlack(w.left) && isBlack(w.right)) {
+ w.red = true
+ x = xParent
+ } else {
+ if (isBlack(w.right)) {
+ w.left.red = false
+ w.red = true
+ root = rotateRight(root, w)
+ w = xParent.right
+ }
+ w.red = xParent.red
+ xParent.red = false
+ w.right.red = false
+ root = rotateLeft(root, xParent)
+ x = root
+ }
+ } else { // symmetric cases
+ var w = xParent.left
+ // assert(w ne null)
+
+ if (w.red) {
+ w.red = false
+ xParent.red = true
+ root = rotateRight(root, xParent)
+ w = xParent.left
+ }
+ if (isBlack(w.right) && isBlack(w.left)) {
+ w.red = true
+ x = xParent
+ } else {
+ if (isBlack(w.left)) {
+ w.right.red = false
+ w.red = true
+ root = rotateLeft(root, w)
+ w = xParent.left
+ }
+ w.red = xParent.red
+ xParent.red = false
+ w.left.red = false
+ root = rotateRight(root, xParent)
+ x = root
+ }
+ }
+ xParent = x.parent
+ }
+ if (x ne null) x.red = false
+ root
+ }
+
+ // ---- helpers ----
+
+ @`inline` private[this] def rotateLeft(_root: RBNode, x: RBNode): RBNode = {
+ var root = _root
+ val y = x.right
+ x.right = y.left
+
+ val xp = x.parent
+ if (y.left ne null) y.left.parent = x
+ y.parent = xp
+
+ if (xp eq null) root = y
+ else if (x eq xp.left) xp.left = y
+ else xp.right = y
+
+ y.left = x
+ x.parent = y
+ root
+ }
+
+ @`inline` private[this] def rotateRight(_root: RBNode, x: RBNode): RBNode = {
+ var root = _root
+ val y = x.left
+ x.left = y.right
+
+ val xp = x.parent
+ if (y.right ne null) y.right.parent = x
+ y.parent = xp
+
+ if (xp eq null) root = y
+ else if (x eq xp.right) xp.right = y
+ else xp.left = y
+
+ y.right = x
+ x.parent = y
+ root
+ }
+
+ /**
+ * Transplant the node `from` to the place of node `to`. This is done by setting `from` as a child of `to`'s previous
+ * parent and setting `from`'s parent to the `to`'s previous parent. The children of `from` are left unchanged.
+ */
+ private[this] def transplant(_root: RBNode, to: RBNode, from: RBNode): RBNode = {
+ var root = _root
+ if (to.parent eq null) root = from
+ else if (to eq to.parent.left) to.parent.left = from
+ else to.parent.right = from
+ if (from ne null) from.parent = to.parent
+ root
+ }
+
+ // building
+
+ def fromNodes(xs: Iterator[Node], size: Int): RBNode = {
+ val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes
+ def f(level: Int, size: Int): RBNode = size match {
+ case 0 => null
+ case 1 =>
+ val nn = xs.next()
+ val (key, hash, value) = nn match {
+ case nn: LLNode @uc => (nn.key, nn.hash, nn.value)
+ case nn: RBNode @uc => (nn.key, nn.hash, nn.value)
+ }
+ new RBNode(key, hash, value, level == maxUsedDepth && level != 1, null, null, null)
+ case n =>
+ val leftSize = (size-1)/2
+ val left = f(level+1, leftSize)
+ val nn = xs.next()
+ val right = f(level+1, size-1-leftSize)
+ val (key, hash, value) = nn match {
+ case nn: LLNode @uc => (nn.key, nn.hash, nn.value)
+ case nn: RBNode @uc => (nn.key, nn.hash, nn.value)
+ }
+ val n = new RBNode(key, hash, value, false, left, right, null)
+ if(left ne null) left.parent = n
+ right.parent = n
+ n
+ }
+ f(1, size)
+ }
+}
+
+/**
+ * $factoryInfo
+ * @define Coll `mutable.CollisionProofHashMap`
+ * @define coll mutable collision-proof hash map
+ */
+@SerialVersionUID(3L)
+object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] {
+ private[collection] final val ordMsg = "No implicit Ordering[${K2}] found to build a CollisionProofHashMap[${K2}, ${V2}]. You may want to upcast to a Map[${K}, ${V}] first by calling `unsorted`."
+
+ def from[sealed K : Ordering, sealed V](it: scala.collection.IterableOnce[(K, V)]^): CollisionProofHashMap[K, V] = {
+ val k = it.knownSize
+ val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity
+ new CollisionProofHashMap[K, V](cap, defaultLoadFactor) ++= it
+ }
+
+ def empty[sealed K : Ordering, sealed V]: CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V]
+
+ def newBuilder[sealed K : Ordering, sealed V]: Builder[(K, V), CollisionProofHashMap[K, V]] = newBuilder(defaultInitialCapacity, defaultLoadFactor)
+
+ def newBuilder[sealed K : Ordering, sealed V](initialCapacity: Int, loadFactor: Double): Builder[(K, V), CollisionProofHashMap[K, V]] =
+ new GrowableBuilder[(K, V), CollisionProofHashMap[K, V]](new CollisionProofHashMap[K, V](initialCapacity, loadFactor)) {
+ override def sizeHint(size: Int) = elems.sizeHint(size)
+ }
+
+ /** The default load factor for the hash table */
+ final def defaultLoadFactor: Double = 0.75
+
+ /** The default initial capacity for the hash table */
+ final def defaultInitialCapacity: Int = 16
+
+ @SerialVersionUID(3L)
+ private final class DeserializationFactory[sealed K, sealed V](val tableLength: Int, val loadFactor: Double, val ordering: Ordering[K]) extends Factory[(K, V), CollisionProofHashMap[K, V]] with Serializable {
+ def fromSpecific(it: IterableOnce[(K, V)]^): CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V](tableLength, loadFactor)(ordering) ++= it
+ def newBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] = CollisionProofHashMap.newBuilder(tableLength, loadFactor)(ordering)
+ }
+
+ @unused @`inline` private def compare[K, V](key: K, hash: Int, node: LLNode[K, V])(implicit ord: Ordering[K]): Int = {
+ val i = hash - node.hash
+ if(i != 0) i else ord.compare(key, node.key)
+ }
+
+ @`inline` private def compare[K, V](key: K, hash: Int, node: RBNode[K, V])(implicit ord: Ordering[K]): Int = {
+ /*val i = hash - node.hash
+ if(i != 0) i else*/ ord.compare(key, node.key)
+ }
+
+ private final val treeifyThreshold = 8
+
+ // Superclass for RBNode and LLNode to help the JIT with optimizing instance checks, but no shared common fields.
+ // Keeping calls monomorphic where possible and dispatching manually where needed is faster.
+ sealed abstract class Node
+
+ /////////////////////////// Red-Black Tree Node
+
+ final class RBNode[sealed K, sealed V](var key: K, var hash: Int, var value: V, var red: Boolean, var left: RBNode[K, V], var right: RBNode[K, V], var parent: RBNode[K, V]) extends Node {
+ override def toString: String = "RBNode(" + key + ", " + hash + ", " + value + ", " + red + ", " + left + ", " + right + ")"
+
+ @tailrec def getNode(k: K, h: Int)(implicit ord: Ordering[K]): RBNode[K, V] = {
+ val cmp = compare(k, h, this)
+ if (cmp < 0) {
+ if(left ne null) left.getNode(k, h) else null
+ } else if (cmp > 0) {
+ if(right ne null) right.getNode(k, h) else null
+ } else this
+ }
+
+ def foreach[U](f: ((K, V)) => U): Unit = {
+ if(left ne null) left.foreach(f)
+ f((key, value))
+ if(right ne null) right.foreach(f)
+ }
+
+ def foreachEntry[U](f: (K, V) => U): Unit = {
+ if(left ne null) left.foreachEntry(f)
+ f(key, value)
+ if(right ne null) right.foreachEntry(f)
+ }
+
+ def foreachNode[U](f: RBNode[K, V] => U): Unit = {
+ if(left ne null) left.foreachNode(f)
+ f(this)
+ if(right ne null) right.foreachNode(f)
+ }
+ }
+
+ @`inline` private def leaf[sealed A, sealed B](key: A, hash: Int, value: B, red: Boolean, parent: RBNode[A, B]): RBNode[A, B] =
+ new RBNode(key, hash, value, red, null, null, parent)
+
+ @tailrec private def minNodeNonNull[sealed A, sealed B](node: RBNode[A, B]): RBNode[A, B] =
+ if (node.left eq null) node else minNodeNonNull(node.left)
+
+ /**
+ * Returns the node that follows `node` in an in-order tree traversal. If `node` has the maximum key (and is,
+ * therefore, the last node), this method returns `null`.
+ */
+ private def successor[sealed A, sealed B](node: RBNode[A, B]): RBNode[A, B] = {
+ if (node.right ne null) minNodeNonNull(node.right)
+ else {
+ var x = node
+ var y = x.parent
+ while ((y ne null) && (x eq y.right)) {
+ x = y
+ y = y.parent
+ }
+ y
+ }
+ }
+
+ private final class RBNodesIterator[sealed A, sealed B](tree: RBNode[A, B])(implicit @unused ord: Ordering[A]) extends AbstractIterator[RBNode[A, B]] {
+ private[this] var nextNode: RBNode[A, B] = if(tree eq null) null else minNodeNonNull(tree)
+
+ def hasNext: Boolean = nextNode ne null
+
+ @throws[NoSuchElementException]
+ def next(): RBNode[A, B] = nextNode match {
+ case null => Iterator.empty.next()
+ case node =>
+ nextNode = successor(node)
+ node
+ }
+ }
+
+ /////////////////////////// Linked List Node
+
+ private final class LLNode[sealed K, sealed V](var key: K, var hash: Int, var value: V, var next: LLNode[K, V]) extends Node {
+ override def toString = s"LLNode($key, $value, $hash) -> $next"
+
+ private[this] def eq(a: Any, b: Any): Boolean =
+ if(a.asInstanceOf[AnyRef] eq null) b.asInstanceOf[AnyRef] eq null else a.asInstanceOf[AnyRef].equals(b)
+
+ @tailrec def getNode(k: K, h: Int)(implicit ord: Ordering[K]): LLNode[K, V] = {
+ if(h == hash && eq(k, key) /*ord.compare(k, key) == 0*/) this
+ else if((next eq null) || (hash > h)) null
+ else next.getNode(k, h)
+ }
+
+ @tailrec def foreach[U](f: ((K, V)) => U): Unit = {
+ f((key, value))
+ if(next ne null) next.foreach(f)
+ }
+
+ @tailrec def foreachEntry[U](f: (K, V) => U): Unit = {
+ f(key, value)
+ if(next ne null) next.foreachEntry(f)
+ }
+
+ @tailrec def foreachNode[U](f: LLNode[K, V] => U): Unit = {
+ f(this)
+ if(next ne null) next.foreachNode(f)
+ }
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala b/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala
new file mode 100644
index 000000000000..4d6f989e6f3d
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/GrowableBuilder.scala
@@ -0,0 +1,37 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection.mutable
+import language.experimental.captureChecking
+
+/** The canonical builder for collections that are growable, i.e. that support an
+ * efficient `+=` method which adds an element to the collection.
+ *
+ * GrowableBuilders can produce only a single instance of the collection they are growing.
+ *
+ * @define Coll `GrowingBuilder`
+ * @define coll growing builder
+ */
+class GrowableBuilder[Elem, To <: Growable[Elem]](protected val elems: To)
+ extends Builder[Elem, To] {
+
+ def clear(): Unit = elems.clear()
+
+ def result(): To = elems
+
+ def addOne(elem: Elem): this.type = { elems += elem; this }
+
+ override def addAll(xs: IterableOnce[Elem]^): this.type = { elems.addAll(xs); this }
+
+ override def knownSize: Int = elems.knownSize
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/HashMap.scala b/tests/pos-special/stdlib/collection/mutable/HashMap.scala
new file mode 100644
index 000000000000..ab45e7ffc73d
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/HashMap.scala
@@ -0,0 +1,655 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+package mutable
+
+import scala.annotation.{nowarn, tailrec}
+import scala.collection.Stepper.EfficientSplit
+import scala.collection.generic.DefaultSerializationProxy
+import scala.util.hashing.MurmurHash3
+import language.experimental.captureChecking
+
+/** This class implements mutable maps using a hashtable.
+ *
+ * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]]
+ * section on `Hash Tables` for more information.
+ *
+ * @tparam K the type of the keys contained in this hash map.
+ * @tparam V the type of the values assigned to keys in this hash map.
+ *
+ * @define Coll `mutable.HashMap`
+ * @define coll mutable hash map
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ */
+@deprecatedInheritance("HashMap will be made final; use .withDefault for the common use case of computing a default value", "2.13.0")
+class HashMap[sealed K, sealed V](initialCapacity: Int, loadFactor: Double)
+ extends AbstractMap[K, V]
+ with MapOps[K, V, HashMap, HashMap[K, V]]
+ with StrictOptimizedIterableOps[(K, V), Iterable, HashMap[K, V]]
+ with StrictOptimizedMapOps[K, V, HashMap, HashMap[K, V]]
+ with MapFactoryDefaults[K, V, HashMap, Iterable]
+ with Serializable {
+
+ /* The HashMap class holds the following invariant:
+ * - For each i between 0 and table.length, the bucket at table(i) only contains keys whose hash-index is i.
+ * - Every bucket is sorted in ascendent hash order
+ * - The sum of the lengths of all buckets is equal to contentSize.
+ */
+ def this() = this(HashMap.defaultInitialCapacity, HashMap.defaultLoadFactor)
+
+ import HashMap.Node
+
+ /** The actual hash table. */
+ private[this] var table = new Array[Node[K, V]](tableSizeFor(initialCapacity))
+
+ /** The next size value at which to resize (capacity * load factor). */
+ private[this] var threshold: Int = newThreshold(table.length)
+
+ private[this] var contentSize = 0
+
+ override def size: Int = contentSize
+
+ /** Performs the inverse operation of improveHash. In this case, it happens to be identical to improveHash*/
+ @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash)
+
+ /** Computes the improved hash of an original (`any.##`) hash. */
+ @`inline` private[this] def improveHash(originalHash: Int): Int = {
+ // Improve the hash by xoring the high 16 bits into the low 16 bits just in case entropy is skewed towards the
+ // high-value bits. We only use the lowest bits to determine the hash bucket. This is the same improvement
+ // algorithm as in java.util.HashMap.
+ //
+ // This function is also its own inverse. That is, for all ints i, improveHash(improveHash(i)) = i
+ // this allows us to retrieve the original hash when we need it, for instance when appending to an immutable.HashMap
+ // and that is why unimproveHash simply forwards to this method
+ originalHash ^ (originalHash >>> 16)
+ }
+
+ /** Computes the improved hash of this key */
+ @`inline` private[this] def computeHash(o: K): Int = improveHash(o.##)
+
+ @`inline` private[this] def index(hash: Int) = hash & (table.length - 1)
+
+ override def contains(key: K): Boolean = findNode(key) ne null
+
+ @`inline` private[this] def findNode(key: K): Node[K, V] = {
+ val hash = computeHash(key)
+ table(index(hash)) match {
+ case null => null
+ case nd => nd.findNode(key, hash)
+ }
+ }
+
+ override def sizeHint(size: Int): Unit = {
+ val target = tableSizeFor(((size + 1).toDouble / loadFactor).toInt)
+ if(target > table.length) growTable(target)
+ }
+
+ override def addAll(xs: IterableOnce[(K, V)]^): this.type = {
+ sizeHint(xs.knownSize)
+
+ xs match {
+ case hm: immutable.HashMap[K, V] =>
+ hm.foreachWithHash((k, v, h) => put0(k, v, improveHash(h), getOld = false))
+ this
+ case hm: mutable.HashMap[K, V] =>
+ val iter = hm.nodeIterator
+ while (iter.hasNext) {
+ val next = iter.next()
+ put0(next.key, next.value, next.hash, getOld = false)
+ }
+ this
+ case lhm: mutable.LinkedHashMap[K, V] =>
+ val iter = lhm.entryIterator
+ while (iter.hasNext) {
+ val entry = iter.next()
+ put0(entry.key, entry.value, entry.hash, getOld = false)
+ }
+ this
+ case thatMap: Map[K, V] =>
+ thatMap.foreachEntry { (key: K, value: V) =>
+ put0(key, value, improveHash(key.##), getOld = false)
+ }
+ this
+ case _ =>
+ super.addAll(xs)
+ }
+ }
+
+ // Override updateWith for performance, so we can do the update while hashing
+ // the input key only once and performing one lookup into the hash table
+ override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = {
+ if (getClass != classOf[HashMap[_, _]]) {
+ // subclasses of HashMap might customise `get` ...
+ super.updateWith(key)(remappingFunction)
+ } else {
+ val hash = computeHash(key)
+ val indexedHash = index(hash)
+
+ var foundNode: Node[K, V] = null
+ var previousNode: Node[K, V] = null
+ table(indexedHash) match {
+ case null =>
+ case nd =>
+ @tailrec
+ def findNode(prev: Node[K, V], nd: Node[K, V], k: K, h: Int): Unit = {
+ if (h == nd.hash && k == nd.key) {
+ previousNode = prev
+ foundNode = nd
+ }
+ else if ((nd.next eq null) || (nd.hash > h)) ()
+ else findNode(nd, nd.next, k, h)
+ }
+
+ findNode(null, nd, key, hash)
+ }
+
+ val previousValue = foundNode match {
+ case null => None
+ case nd => Some(nd.value)
+ }
+
+ val nextValue = remappingFunction(previousValue)
+
+ (previousValue, nextValue) match {
+ case (None, None) => // do nothing
+
+ case (Some(_), None) =>
+ if (previousNode != null) previousNode.next = foundNode.next
+ else table(indexedHash) = foundNode.next
+ contentSize -= 1
+
+ case (None, Some(value)) =>
+ val newIndexedHash =
+ if (contentSize + 1 >= threshold) {
+ growTable(table.length * 2)
+ index(hash)
+ } else indexedHash
+ put0(key, value, false, hash, newIndexedHash)
+
+ case (Some(_), Some(newValue)) => foundNode.value = newValue
+ }
+ nextValue
+ }
+ }
+
+ override def subtractAll(xs: IterableOnce[K]^): this.type = {
+ if (size == 0) {
+ return this
+ }
+
+ xs match {
+ case hs: immutable.HashSet[K] =>
+ hs.foreachWithHashWhile { (k, h) =>
+ remove0(k, improveHash(h))
+ size > 0
+ }
+ this
+ case hs: mutable.HashSet[K] =>
+ val iter = hs.nodeIterator
+ while (iter.hasNext) {
+ val next = iter.next()
+ remove0(next.key, next.hash)
+ if (size == 0) return this
+ }
+ this
+ case lhs: mutable.LinkedHashSet[K] =>
+ val iter = lhs.entryIterator
+ while (iter.hasNext) {
+ val next = iter.next()
+ remove0(next.key, next.hash)
+ if (size == 0) return this
+ }
+ this
+ case _ => super.subtractAll(xs)
+ }
+ }
+
+ /** Adds a key-value pair to this map
+ *
+ * @param key the key to add
+ * @param value the value to add
+ * @param hash the **improved** hashcode of `key` (see computeHash)
+ * @param getOld if true, then the previous value for `key` will be returned, otherwise, false
+ */
+ private[this] def put0(key: K, value: V, hash: Int, getOld: Boolean): Some[V] = {
+ if(contentSize + 1 >= threshold) growTable(table.length * 2)
+ val idx = index(hash)
+ put0(key, value, getOld, hash, idx)
+ }
+
+ private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = {
+ if(contentSize + 1 >= threshold) growTable(table.length * 2)
+ val hash = computeHash(key)
+ val idx = index(hash)
+ put0(key, value, getOld, hash, idx)
+ }
+
+
+ private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = {
+ table(idx) match {
+ case null =>
+ table(idx) = new Node[K, V](key, hash, value, null)
+ case old =>
+ var prev: Node[K, V] = null
+ var n = old
+ while((n ne null) && n.hash <= hash) {
+ if(n.hash == hash && key == n.key) {
+ val old = n.value
+ n.value = value
+ return if(getOld) Some(old) else null
+ }
+ prev = n
+ n = n.next
+ }
+ if(prev eq null) table(idx) = new Node(key, hash, value, old)
+ else prev.next = new Node(key, hash, value, prev.next)
+ }
+ contentSize += 1
+ null
+ }
+
+ private def remove0(elem: K) : Node[K, V] = remove0(elem, computeHash(elem))
+
+ /** Removes a key from this map if it exists
+ *
+ * @param elem the element to remove
+ * @param hash the **improved** hashcode of `element` (see computeHash)
+ * @return the node that contained element if it was present, otherwise null
+ */
+ private[this] def remove0(elem: K, hash: Int) : Node[K, V] = {
+ val idx = index(hash)
+ table(idx) match {
+ case null => null
+ case nd if nd.hash == hash && nd.key == elem =>
+ // first element matches
+ table(idx) = nd.next
+ contentSize -= 1
+ nd
+ case nd =>
+ // find an element that matches
+ var prev = nd
+ var next = nd.next
+ while((next ne null) && next.hash <= hash) {
+ if(next.hash == hash && next.key == elem) {
+ prev.next = next.next
+ contentSize -= 1
+ return next
+ }
+ prev = next
+ next = next.next
+ }
+ null
+ }
+ }
+
+ private[this] abstract class HashMapIterator[A] extends AbstractIterator[A] {
+ private[this] var i = 0
+ private[this] var node: Node[K, V] = null
+ private[this] val len = table.length
+
+ protected[this] def extract(nd: Node[K, V]): A
+
+ def hasNext: Boolean = {
+ if(node ne null) true
+ else {
+ while(i < len) {
+ val n = table(i)
+ i += 1
+ if(n ne null) { node = n; return true }
+ }
+ false
+ }
+ }
+
+ def next(): A =
+ if(!hasNext) Iterator.empty.next()
+ else {
+ val r = extract(node)
+ node = node.next
+ r
+ }
+ }
+
+ override def iterator: Iterator[(K, V)] =
+ if(size == 0) Iterator.empty
+ else new HashMapIterator[(K, V)] {
+ protected[this] def extract(nd: Node[K, V]) = (nd.key, nd.value)
+ }
+
+ override def keysIterator: Iterator[K] =
+ if(size == 0) Iterator.empty
+ else new HashMapIterator[K] {
+ protected[this] def extract(nd: Node[K, V]) = nd.key
+ }
+
+ override def valuesIterator: Iterator[V] =
+ if(size == 0) Iterator.empty
+ else new HashMapIterator[V] {
+ protected[this] def extract(nd: Node[K, V]) = nd.value
+ }
+
+
+ /** Returns an iterator over the nodes stored in this HashMap */
+ private[collection] def nodeIterator: Iterator[Node[K, V]] =
+ if(size == 0) Iterator.empty
+ else new HashMapIterator[Node[K, V]] {
+ protected[this] def extract(nd: Node[K, V]) = nd
+ }
+
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit =
+ shape.
+ parUnbox(new convert.impl.AnyTableStepper[(K, V), Node[K, V]](size, table, _.next, node => (node.key, node.value), 0, table.length)).
+ asInstanceOf[S with EfficientSplit]
+
+ override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = {
+ import convert.impl._
+ val s = shape.shape match {
+ case StepperShape.IntShape => new IntTableStepper[Node[K, V]] (size, table, _.next, _.key.asInstanceOf[Int], 0, table.length)
+ case StepperShape.LongShape => new LongTableStepper[Node[K, V]] (size, table, _.next, _.key.asInstanceOf[Long], 0, table.length)
+ case StepperShape.DoubleShape => new DoubleTableStepper[Node[K, V]](size, table, _.next, _.key.asInstanceOf[Double], 0, table.length)
+ case _ => shape.parUnbox(new AnyTableStepper[K, Node[K, V]](size, table, _.next, _.key, 0, table.length))
+ }
+ s.asInstanceOf[S with EfficientSplit]
+ }
+
+ override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = {
+ import convert.impl._
+ val s = shape.shape match {
+ case StepperShape.IntShape => new IntTableStepper[Node[K, V]] (size, table, _.next, _.value.asInstanceOf[Int], 0, table.length)
+ case StepperShape.LongShape => new LongTableStepper[Node[K, V]] (size, table, _.next, _.value.asInstanceOf[Long], 0, table.length)
+ case StepperShape.DoubleShape => new DoubleTableStepper[Node[K, V]](size, table, _.next, _.value.asInstanceOf[Double], 0, table.length)
+ case _ => shape.parUnbox(new AnyTableStepper[V, Node[K, V]](size, table, _.next, _.value, 0, table.length))
+ }
+ s.asInstanceOf[S with EfficientSplit]
+ }
+
+ private[this] def growTable(newlen: Int) = {
+ if (newlen < 0)
+ throw new RuntimeException(s"new HashMap table size $newlen exceeds maximum")
+ var oldlen = table.length
+ threshold = newThreshold(newlen)
+ if(size == 0) table = new Array(newlen)
+ else {
+ table = java.util.Arrays.copyOf(table, newlen)
+ val preLow: Node[K, V] = new Node(null.asInstanceOf[K], 0, null.asInstanceOf[V], null)
+ val preHigh: Node[K, V] = new Node(null.asInstanceOf[K], 0, null.asInstanceOf[V], null)
+ // Split buckets until the new length has been reached. This could be done more
+ // efficiently when growing an already filled table to more than double the size.
+ while(oldlen < newlen) {
+ var i = 0
+ while (i < oldlen) {
+ val old = table(i)
+ if(old ne null) {
+ preLow.next = null
+ preHigh.next = null
+ var lastLow: Node[K, V] = preLow
+ var lastHigh: Node[K, V] = preHigh
+ var n = old
+ while(n ne null) {
+ val next = n.next
+ if((n.hash & oldlen) == 0) { // keep low
+ lastLow.next = n
+ lastLow = n
+ } else { // move to high
+ lastHigh.next = n
+ lastHigh = n
+ }
+ n = next
+ }
+ lastLow.next = null
+ if(old ne preLow.next) table(i) = preLow.next
+ if(preHigh.next ne null) {
+ table(i + oldlen) = preHigh.next
+ lastHigh.next = null
+ }
+ }
+ i += 1
+ }
+ oldlen *= 2
+ }
+ }
+ }
+
+ private[this] def tableSizeFor(capacity: Int) =
+ (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30)
+
+ private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt
+
+ override def clear(): Unit = {
+ java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null)
+ contentSize = 0
+ }
+
+ def get(key: K): Option[V] = findNode(key) match {
+ case null => None
+ case nd => Some(nd.value)
+ }
+
+ @throws[NoSuchElementException]
+ override def apply(key: K): V = findNode(key) match {
+ case null => default(key)
+ case nd => nd.value
+ }
+
+ override def getOrElse[V1 >: V](key: K, default: => V1): V1 = {
+ if (getClass != classOf[HashMap[_, _]]) {
+ // subclasses of HashMap might customise `get` ...
+ super.getOrElse(key, default)
+ } else {
+ // .. but in the common case, we can avoid the Option boxing.
+ val nd = findNode(key)
+ if (nd eq null) default else nd.value
+ }
+ }
+
+ override def getOrElseUpdate(key: K, defaultValue: => V): V = {
+ if (getClass != classOf[HashMap[_, _]]) {
+ // subclasses of HashMap might customise `get` ...
+ super.getOrElseUpdate(key, defaultValue)
+ } else {
+ val hash = computeHash(key)
+ val idx = index(hash)
+ val nd = table(idx) match {
+ case null => null
+ case nd => nd.findNode(key, hash)
+ }
+ if(nd != null) nd.value
+ else {
+ val table0 = table
+ val default = defaultValue
+ if(contentSize + 1 >= threshold) growTable(table.length * 2)
+ // Avoid recomputing index if the `defaultValue()` or new element hasn't triggered a table resize.
+ val newIdx = if (table0 eq table) idx else index(hash)
+ put0(key, default, false, hash, newIdx)
+ default
+ }
+ }
+ }
+
+ override def put(key: K, value: V): Option[V] = put0(key, value, true) match {
+ case null => None
+ case sm => sm
+ }
+
+ override def remove(key: K): Option[V] = remove0(key) match {
+ case null => None
+ case nd => Some(nd.value)
+ }
+
+ override def update(key: K, value: V): Unit = put0(key, value, false)
+
+ def addOne(elem: (K, V)): this.type = { put0(elem._1, elem._2, false); this }
+
+ def subtractOne(elem: K): this.type = { remove0(elem); this }
+
+ override def knownSize: Int = size
+
+ override def isEmpty: Boolean = size == 0
+
+ override def foreach[U](f: ((K, V)) => U): Unit = {
+ val len = table.length
+ var i = 0
+ while(i < len) {
+ val n = table(i)
+ if(n ne null) n.foreach(f)
+ i += 1
+ }
+ }
+
+ override def foreachEntry[U](f: (K, V) => U): Unit = {
+ val len = table.length
+ var i = 0
+ while(i < len) {
+ val n = table(i)
+ if(n ne null) n.foreachEntry(f)
+ i += 1
+ }
+ }
+
+ protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new mutable.HashMap.DeserializationFactory[K, V](table.length, loadFactor), this)
+
+ override def filterInPlace(p: (K, V) => Boolean): this.type = {
+ if (nonEmpty) {
+ var bucket = 0
+
+ while (bucket < table.length) {
+ var head = table(bucket)
+
+ while ((head ne null) && !p(head.key, head.value)) {
+ head = head.next
+ contentSize -= 1
+ }
+
+ if (head ne null) {
+ var prev = head
+ var next = head.next
+
+ while (next ne null) {
+ if (p(next.key, next.value)) {
+ prev = next
+ } else {
+ prev.next = next.next
+ contentSize -= 1
+ }
+ next = next.next
+ }
+ }
+
+ table(bucket) = head
+ bucket += 1
+ }
+ }
+ this
+ }
+
+ // TODO: rename to `mapValuesInPlace` and override the base version (not binary compatible)
+ private[mutable] def mapValuesInPlaceImpl(f: (K, V) => V): this.type = {
+ val len = table.length
+ var i = 0
+ while (i < len) {
+ var n = table(i)
+ while (n ne null) {
+ n.value = f(n.key, n.value)
+ n = n.next
+ }
+ i += 1
+ }
+ this
+ }
+
+ override def mapFactory: MapFactory[HashMap] = HashMap
+
+ @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""")
+ override protected[this] def stringPrefix = "HashMap"
+
+ override def hashCode: Int = {
+ if (isEmpty) MurmurHash3.emptyMapHash
+ else {
+ val tupleHashIterator = new HashMapIterator[Any] {
+ var hash: Int = 0
+ override def hashCode: Int = hash
+ override protected[this] def extract(nd: Node[K, V]): Any = {
+ hash = MurmurHash3.tuple2Hash(unimproveHash(nd.hash), nd.value.##)
+ this
+ }
+ }
+ MurmurHash3.unorderedHash(tupleHashIterator, MurmurHash3.mapSeed)
+ }
+ }
+}
+
+/**
+ * $factoryInfo
+ * @define Coll `mutable.HashMap`
+ * @define coll mutable hash map
+ */
+@SerialVersionUID(3L)
+object HashMap extends MapFactory[HashMap] {
+
+ def empty[sealed K, sealed V]: HashMap[K, V] = new HashMap[K, V]
+
+ def from[sealed K, sealed V](it: collection.IterableOnce[(K, V)]^): HashMap[K, V] = {
+ val k = it.knownSize
+ val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity
+ new HashMap[K, V](cap, defaultLoadFactor).addAll(it)
+ }
+
+ def newBuilder[sealed K, sealed V]: Builder[(K, V), HashMap[K, V]] = newBuilder(defaultInitialCapacity, defaultLoadFactor)
+
+ def newBuilder[sealed K, sealed V](initialCapacity: Int, loadFactor: Double): Builder[(K, V), HashMap[K, V]] =
+ new GrowableBuilder[(K, V), HashMap[K, V]](new HashMap[K, V](initialCapacity, loadFactor)) {
+ override def sizeHint(size: Int) = elems.sizeHint(size)
+ }
+
+ /** The default load factor for the hash table */
+ final def defaultLoadFactor: Double = 0.75
+
+ /** The default initial capacity for the hash table */
+ final def defaultInitialCapacity: Int = 16
+
+ @SerialVersionUID(3L)
+ private final class DeserializationFactory[sealed K, sealed V](val tableLength: Int, val loadFactor: Double) extends Factory[(K, V), HashMap[K, V]] with Serializable {
+ def fromSpecific(it: IterableOnce[(K, V)]^): HashMap[K, V] = new HashMap[K, V](tableLength, loadFactor).addAll(it)
+ def newBuilder: Builder[(K, V), HashMap[K, V]] = HashMap.newBuilder(tableLength, loadFactor)
+ }
+
+ private[collection] final class Node[K, V](_key: K, _hash: Int, private[this] var _value: V, private[this] var _next: Node[K, V]) {
+ def key: K = _key
+ def hash: Int = _hash
+ def value: V = _value
+ def value_= (v: V): Unit = _value = v
+ def next: Node[K, V] = _next
+ def next_= (n: Node[K, V]): Unit = _next = n
+
+ @tailrec
+ def findNode(k: K, h: Int): Node[K, V] =
+ if(h == _hash && k == _key) this
+ else if((_next eq null) || (_hash > h)) null
+ else _next.findNode(k, h)
+
+ @tailrec
+ def foreach[U](f: ((K, V)) => U): Unit = {
+ f((_key, _value))
+ if(_next ne null) _next.foreach(f)
+ }
+
+ @tailrec
+ def foreachEntry[U](f: (K, V) => U): Unit = {
+ f(_key, _value)
+ if(_next ne null) _next.foreachEntry(f)
+ }
+
+ override def toString = s"Node($key, $value, $hash) -> $next"
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/HashSet.scala b/tests/pos-special/stdlib/collection/mutable/HashSet.scala
new file mode 100644
index 000000000000..e8c055ff15ef
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/HashSet.scala
@@ -0,0 +1,457 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+package mutable
+
+import scala.annotation.tailrec
+import scala.collection.Stepper.EfficientSplit
+import scala.collection.generic.DefaultSerializationProxy
+import scala.util.hashing.MurmurHash3
+import language.experimental.captureChecking
+
+/** This class implements mutable sets using a hashtable.
+ *
+ * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]]
+ * section on `Hash Tables` for more information.
+ *
+ * @define Coll `mutable.HashSet`
+ * @define coll mutable hash set
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ */
+final class HashSet[sealed A](initialCapacity: Int, loadFactor: Double)
+ extends AbstractSet[A]
+ with SetOps[A, HashSet, HashSet[A]]
+ with StrictOptimizedIterableOps[A, HashSet, HashSet[A]]
+ with IterableFactoryDefaults[A, HashSet]
+ with Serializable {
+
+ def this() = this(HashSet.defaultInitialCapacity, HashSet.defaultLoadFactor)
+
+ import HashSet.Node
+
+ /* The Hashset class holds the following invariant:
+ * - For each i between 0 and table.length, the bucket at table(i) only contains elements whose hash-index is i.
+ * - Every bucket is sorted in ascendent hash order
+ * - The sum of the lengths of all buckets is equal to contentSize.
+ */
+ /** The actual hash table. */
+ private[this] var table = new Array[Node[A]](tableSizeFor(initialCapacity))
+
+ /** The next size value at which to resize (capacity * load factor). */
+ private[this] var threshold: Int = newThreshold(table.length)
+
+ private[this] var contentSize = 0
+
+ override def size: Int = contentSize
+
+ /** Performs the inverse operation of improveHash. In this case, it happens to be identical to improveHash*/
+ @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash)
+
+ /** Computes the improved hash of an original (`any.##`) hash. */
+ private[this] def improveHash(originalHash: Int): Int = {
+ // Improve the hash by xoring the high 16 bits into the low 16 bits just in case entropy is skewed towards the
+ // high-value bits. We only use the lowest bits to determine the hash bucket. This is the same improvement
+ // algorithm as in java.util.HashMap.
+ originalHash ^ (originalHash >>> 16)
+ }
+
+ /** Computes the improved hash of this element */
+ @`inline` private[this] def computeHash(o: A): Int = improveHash(o.##)
+
+ @`inline` private[this] def index(hash: Int) = hash & (table.length - 1)
+
+ override def contains(elem: A): Boolean = findNode(elem) ne null
+
+ @`inline` private[this] def findNode(elem: A): Node[A] = {
+ val hash = computeHash(elem)
+ table(index(hash)) match {
+ case null => null
+ case nd => nd.findNode(elem, hash)
+ }
+ }
+
+ override def sizeHint(size: Int): Unit = {
+ val target = tableSizeFor(((size + 1).toDouble / loadFactor).toInt)
+ if(target > table.length) growTable(target)
+ }
+
+ override def add(elem: A) : Boolean = {
+ if(contentSize + 1 >= threshold) growTable(table.length * 2)
+ addElem(elem, computeHash(elem))
+ }
+
+ override def addAll(xs: IterableOnce[A]^): this.type = {
+ sizeHint(xs.knownSize)
+ xs match {
+ case hs: immutable.HashSet[A] =>
+ hs.foreachWithHash((k, h) => addElem(k, improveHash(h)))
+ this
+ case hs: mutable.HashSet[A] =>
+ val iter = hs.nodeIterator
+ while (iter.hasNext) {
+ val next = iter.next()
+ addElem(next.key, next.hash)
+ }
+ this
+ case lhs: mutable.LinkedHashSet[A] =>
+ val iter = lhs.entryIterator
+ while (iter.hasNext) {
+ val next = iter.next()
+ addElem(next.key, next.hash)
+ }
+ this
+ case _ => super.addAll(xs)
+ }
+ }
+
+ override def subtractAll(xs: IterableOnce[A]^): this.type = {
+ if (size == 0) {
+ return this
+ }
+
+ xs match {
+ case hs: immutable.HashSet[A] =>
+ hs.foreachWithHashWhile { (k, h) =>
+ remove(k, improveHash(h))
+ size > 0
+ }
+ this
+ case hs: mutable.HashSet[A] =>
+ val iter = hs.nodeIterator
+ while (iter.hasNext) {
+ val next = iter.next()
+ remove(next.key, next.hash)
+ if (size == 0) return this
+ }
+ this
+ case lhs: mutable.LinkedHashSet[A] =>
+ val iter = lhs.entryIterator
+ while (iter.hasNext) {
+ val next = iter.next()
+ remove(next.key, next.hash)
+ if (size == 0) return this
+ }
+ this
+ case _ => super.subtractAll(xs)
+ }
+ }
+
+ /** Adds an element to this set
+ * @param elem element to add
+ * @param hash the **improved** hash of `elem` (see computeHash)
+ */
+ private[this] def addElem(elem: A, hash: Int) : Boolean = {
+ val idx = index(hash)
+ table(idx) match {
+ case null =>
+ table(idx) = new Node(elem, hash, null)
+ case old =>
+ var prev: Node[A] = null
+ var n = old
+ while((n ne null) && n.hash <= hash) {
+ if(n.hash == hash && elem == n.key) return false
+ prev = n
+ n = n.next
+ }
+ if(prev eq null)
+ table(idx) = new Node(elem, hash, old)
+ else
+ prev.next = new Node(elem, hash, prev.next)
+ }
+ contentSize += 1
+ true
+ }
+
+ private[this] def remove(elem: A, hash: Int): Boolean = {
+ val idx = index(hash)
+ table(idx) match {
+ case null => false
+ case nd if nd.hash == hash && nd.key == elem =>
+ // first element matches
+ table(idx) = nd.next
+ contentSize -= 1
+ true
+ case nd =>
+ // find an element that matches
+ var prev = nd
+ var next = nd.next
+ while((next ne null) && next.hash <= hash) {
+ if(next.hash == hash && next.key == elem) {
+ prev.next = next.next
+ contentSize -= 1
+ return true
+ }
+ prev = next
+ next = next.next
+ }
+ false
+ }
+ }
+
+ override def remove(elem: A) : Boolean = remove(elem, computeHash(elem))
+
+ private[this] abstract class HashSetIterator[B] extends AbstractIterator[B] {
+ private[this] var i = 0
+ private[this] var node: Node[A] = null
+ private[this] val len = table.length
+
+ protected[this] def extract(nd: Node[A]): B
+
+ def hasNext: Boolean = {
+ if(node ne null) true
+ else {
+ while(i < len) {
+ val n = table(i)
+ i += 1
+ if(n ne null) { node = n; return true }
+ }
+ false
+ }
+ }
+
+ def next(): B =
+ if(!hasNext) Iterator.empty.next()
+ else {
+ val r = extract(node)
+ node = node.next
+ r
+ }
+ }
+
+ override def iterator: Iterator[A] = new HashSetIterator[A] {
+ override protected[this] def extract(nd: Node[A]): A = nd.key
+ }
+
+ /** Returns an iterator over the nodes stored in this HashSet */
+ private[collection] def nodeIterator: Iterator[Node[A]] = new HashSetIterator[Node[A]] {
+ override protected[this] def extract(nd: Node[A]): Node[A] = nd
+ }
+
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = {
+ import convert.impl._
+ val s = shape.shape match {
+ case StepperShape.IntShape => new IntTableStepper[Node[A]] (size, table, _.next, _.key.asInstanceOf[Int], 0, table.length)
+ case StepperShape.LongShape => new LongTableStepper[Node[A]] (size, table, _.next, _.key.asInstanceOf[Long], 0, table.length)
+ case StepperShape.DoubleShape => new DoubleTableStepper[Node[A]](size, table, _.next, _.key.asInstanceOf[Double], 0, table.length)
+ case _ => shape.parUnbox(new AnyTableStepper[A, Node[A]](size, table, _.next, _.key, 0, table.length))
+ }
+ s.asInstanceOf[S with EfficientSplit]
+ }
+
+ private[this] def growTable(newlen: Int) = {
+ var oldlen = table.length
+ threshold = newThreshold(newlen)
+ if(size == 0) table = new Array(newlen)
+ else {
+ table = java.util.Arrays.copyOf(table, newlen)
+ val preLow: Node[A] = new Node(null.asInstanceOf[A], 0, null)
+ val preHigh: Node[A] = new Node(null.asInstanceOf[A], 0, null)
+ // Split buckets until the new length has been reached. This could be done more
+ // efficiently when growing an already filled table to more than double the size.
+ while(oldlen < newlen) {
+ var i = 0
+ while (i < oldlen) {
+ val old = table(i)
+ if(old ne null) {
+ preLow.next = null
+ preHigh.next = null
+ var lastLow: Node[A] = preLow
+ var lastHigh: Node[A] = preHigh
+ var n = old
+ while(n ne null) {
+ val next = n.next
+ if((n.hash & oldlen) == 0) { // keep low
+ lastLow.next = n
+ lastLow = n
+ } else { // move to high
+ lastHigh.next = n
+ lastHigh = n
+ }
+ n = next
+ }
+ lastLow.next = null
+ if(old ne preLow.next) table(i) = preLow.next
+ if(preHigh.next ne null) {
+ table(i + oldlen) = preHigh.next
+ lastHigh.next = null
+ }
+ }
+ i += 1
+ }
+ oldlen *= 2
+ }
+ }
+ }
+
+ override def filterInPlace(p: A => Boolean): this.type = {
+ if (nonEmpty) {
+ var bucket = 0
+
+ while (bucket < table.length) {
+ var head = table(bucket)
+
+ while ((head ne null) && !p(head.key)) {
+ head = head.next
+ contentSize -= 1
+ }
+
+ if (head ne null) {
+ var prev = head
+ var next = head.next
+
+ while (next ne null) {
+ if (p(next.key)) {
+ prev = next
+ } else {
+ prev.next = next.next
+ contentSize -= 1
+ }
+ next = next.next
+ }
+ }
+
+ table(bucket) = head
+ bucket += 1
+ }
+ }
+ this
+ }
+
+ /*
+ private[mutable] def checkTable(): Unit = {
+ var i = 0
+ var count = 0
+ var prev: Node[A] = null
+ while(i < table.length) {
+ var n = table(i)
+ prev = null
+ while(n != null) {
+ count += 1
+ assert(index(n.hash) == i)
+ if(prev ne null) assert(prev.hash <= n.hash)
+ prev = n
+ n = n.next
+ }
+ i += 1
+ }
+ assert(contentSize == count)
+ }
+ */
+
+ private[this] def tableSizeFor(capacity: Int) =
+ (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30)
+
+ private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt
+
+ def clear(): Unit = {
+ java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null)
+ contentSize = 0
+ }
+
+ override def iterableFactory: IterableFactory[HashSet] = HashSet
+
+ @`inline` def addOne(elem: A): this.type = { add(elem); this }
+
+ @`inline` def subtractOne(elem: A): this.type = { remove(elem); this }
+
+ override def knownSize: Int = size
+
+ override def isEmpty: Boolean = size == 0
+
+ override def foreach[U](f: A => U): Unit = {
+ val len = table.length
+ var i = 0
+ while(i < len) {
+ val n = table(i)
+ if(n ne null) n.foreach(f)
+ i += 1
+ }
+ }
+
+ protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new HashSet.DeserializationFactory[A](table.length, loadFactor), this)
+
+ override protected[this] def className = "HashSet"
+
+ override def hashCode: Int = {
+ val setIterator = this.iterator
+ val hashIterator: Iterator[Any] =
+ if (setIterator.isEmpty) setIterator
+ else new HashSetIterator[Any] {
+ var hash: Int = 0
+ override def hashCode: Int = hash
+ override protected[this] def extract(nd: Node[A]): Any = {
+ hash = unimproveHash(nd.hash)
+ this
+ }
+ }
+ MurmurHash3.unorderedHash(hashIterator, MurmurHash3.setSeed)
+ }
+}
+
+/**
+ * $factoryInfo
+ * @define Coll `mutable.HashSet`
+ * @define coll mutable hash set
+ */
+@SerialVersionUID(3L)
+object HashSet extends IterableFactory[HashSet] {
+
+ def from[sealed B](it: scala.collection.IterableOnce[B]^): HashSet[B] = {
+ val k = it.knownSize
+ val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity
+ new HashSet[B](cap, defaultLoadFactor) ++= it
+ }
+
+ def empty[sealed A]: HashSet[A] = new HashSet[A]
+
+ def newBuilder[sealed A]: Builder[A, HashSet[A]] = newBuilder(defaultInitialCapacity, defaultLoadFactor)
+
+ def newBuilder[sealed A](initialCapacity: Int, loadFactor: Double): Builder[A, HashSet[A]] =
+ new GrowableBuilder[A, HashSet[A]](new HashSet[A](initialCapacity, loadFactor)) {
+ override def sizeHint(size: Int) = elems.sizeHint(size)
+ }
+
+ /** The default load factor for the hash table */
+ final def defaultLoadFactor: Double = 0.75
+
+ /** The default initial capacity for the hash table */
+ final def defaultInitialCapacity: Int = 16
+
+ @SerialVersionUID(3L)
+ private final class DeserializationFactory[sealed A](val tableLength: Int, val loadFactor: Double) extends Factory[A, HashSet[A]] with Serializable {
+ def fromSpecific(it: IterableOnce[A]^): HashSet[A] = new HashSet[A](tableLength, loadFactor) ++= it
+ def newBuilder: Builder[A, HashSet[A]] = HashSet.newBuilder(tableLength, loadFactor)
+ }
+
+ private[collection] final class Node[K](_key: K, _hash: Int, private[this] var _next: Node[K]) {
+ def key: K = _key
+ def hash: Int = _hash
+ def next: Node[K] = _next
+ def next_= (n: Node[K]): Unit = _next = n
+
+ @tailrec
+ def findNode(k: K, h: Int): Node[K] =
+ if(h == _hash && k == _key) this
+ else if((_next eq null) || (_hash > h)) null
+ else _next.findNode(k, h)
+
+ @tailrec
+ def foreach[U](f: K => U): Unit = {
+ f(_key)
+ if(_next ne null) _next.foreach(f)
+ }
+
+ override def toString = s"Node($key, $hash) -> $next"
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/HashTable.scala b/tests/pos-special/stdlib/collection/mutable/HashTable.scala
new file mode 100644
index 000000000000..a3534e322cf3
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/HashTable.scala
@@ -0,0 +1,418 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection.mutable
+
+import collection.{AbstractIterator, Iterator}
+
+import java.lang.Integer.{numberOfLeadingZeros, rotateRight}
+import scala.util.hashing.byteswap32
+
+import java.lang.Integer
+import language.experimental.captureChecking
+
+/** This class can be used to construct data structures that are based
+ * on hashtables. Class `HashTable[A]` implements a hashtable
+ * that maps keys of type `A` to values of the fully abstract
+ * member type `Entry`. Classes that make use of `HashTable`
+ * have to provide an implementation for `Entry`.
+ *
+ * There are mainly two parameters that affect the performance of a hashtable:
+ * the initial size and the load factor. The size
+ * refers to the number of buckets in the hashtable, and the load
+ * factor is a measure of how full the hashtable is allowed to get before
+ * its size is automatically doubled. Both parameters may be changed by
+ * overriding the corresponding values in class `HashTable`.
+ *
+ * @tparam A type of the elements contained in this hash table.
+ */
+// Not used in the standard library, but used in scala-parallel-collections
+private[collection] trait HashTable[sealed A, B, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashUtils[A] {
+ // Replacing Entry type parameter by abstract type member here allows to not expose to public
+ // implementation-specific entry classes such as `DefaultEntry` or `LinkedEntry`.
+ // However, I'm afraid it's too late now for such breaking change.
+ import HashTable._
+
+ protected var _loadFactor = defaultLoadFactor
+
+ /** The actual hash table.
+ */
+ protected[collection] var table: Array[HashEntry[A, Entry]] = new Array(initialCapacity)
+
+ /** The number of mappings contained in this hash table.
+ */
+ protected[collection] var tableSize: Int = 0
+
+ final def size: Int = tableSize
+
+ /** The next size value at which to resize (capacity * load factor).
+ */
+ protected[collection] var threshold: Int = initialThreshold(_loadFactor)
+
+ /** The array keeping track of the number of elements in 32 element blocks.
+ */
+ protected var sizemap: Array[Int] = null
+
+ protected var seedvalue: Int = tableSizeSeed
+
+ protected def tableSizeSeed = Integer.bitCount(table.length - 1)
+
+ /** The initial size of the hash table.
+ */
+ protected def initialSize: Int = 16
+
+ /** The initial threshold.
+ */
+ private def initialThreshold(_loadFactor: Int): Int = newThreshold(_loadFactor, initialCapacity)
+
+ private def initialCapacity = capacity(initialSize)
+
+ private def lastPopulatedIndex = {
+ var idx = table.length - 1
+ while (table(idx) == null && idx > 0)
+ idx -= 1
+
+ idx
+ }
+
+ /**
+ * Initializes the collection from the input stream. `readEntry` will be called for each
+ * entry to be read from the input stream.
+ */
+ private[collection] def init(in: java.io.ObjectInputStream, readEntry: => Entry): Unit = {
+ _loadFactor = in.readInt()
+ assert(_loadFactor > 0)
+
+ val size = in.readInt()
+ tableSize = 0
+ assert(size >= 0)
+
+ seedvalue = in.readInt()
+
+ val smDefined = in.readBoolean()
+
+ table = new Array(capacity(sizeForThreshold(_loadFactor, size)))
+ threshold = newThreshold(_loadFactor, table.length)
+
+ if (smDefined) sizeMapInit(table.length) else sizemap = null
+
+ var index = 0
+ while (index < size) {
+ addEntry(readEntry)
+ index += 1
+ }
+ }
+
+ /**
+ * Serializes the collection to the output stream by saving the load factor, collection
+ * size and collection entries. `writeEntry` is responsible for writing an entry to the stream.
+ *
+ * `foreachEntry` determines the order in which the key/value pairs are saved to the stream. To
+ * deserialize, `init` should be used.
+ */
+ private[collection] def serializeTo(out: java.io.ObjectOutputStream, writeEntry: Entry => Unit): Unit = {
+ out.writeInt(_loadFactor)
+ out.writeInt(tableSize)
+ out.writeInt(seedvalue)
+ out.writeBoolean(isSizeMapDefined)
+
+ foreachEntry(writeEntry)
+ }
+
+ /** Find entry with given key in table, null if not found.
+ */
+ final def findEntry(key: A): Entry =
+ findEntry0(key, index(elemHashCode(key)))
+
+ protected[collection] final def findEntry0(key: A, h: Int): Entry = {
+ var e = table(h).asInstanceOf[Entry]
+ while (e != null && !elemEquals(e.key, key)) e = e.next
+ e
+ }
+
+ /** Add entry to table
+ * pre: no entry with same key exists
+ */
+ protected[collection] final def addEntry(e: Entry): Unit = {
+ addEntry0(e, index(elemHashCode(e.key)))
+ }
+
+ protected[collection] final def addEntry0(e: Entry, h: Int): Unit = {
+ e.next = table(h).asInstanceOf[Entry]
+ table(h) = e
+ tableSize = tableSize + 1
+ nnSizeMapAdd(h)
+ if (tableSize > threshold)
+ resize(2 * table.length)
+ }
+
+ /** Find entry with given key in table, or add new one if not found.
+ * May be somewhat faster then `findEntry`/`addEntry` pair as it
+ * computes entry's hash index only once.
+ * Returns entry found in table or null.
+ * New entries are created by calling `createNewEntry` method.
+ */
+ def findOrAddEntry(key: A, value: B): Entry = {
+ val h = index(elemHashCode(key))
+ val e = findEntry0(key, h)
+ if (e ne null) e else { addEntry0(createNewEntry(key, value), h); null }
+ }
+
+ /** Creates new entry to be immediately inserted into the hashtable.
+ * This method is guaranteed to be called only once and in case that the entry
+ * will be added. In other words, an implementation may be side-effecting.
+ */
+ def createNewEntry(key: A, value: B): Entry
+
+ /** Remove entry from table if present.
+ */
+ final def removeEntry(key: A) : Entry = {
+ removeEntry0(key, index(elemHashCode(key)))
+ }
+ /** Remove entry from table if present.
+ */
+ private[collection] final def removeEntry0(key: A, h: Int) : Entry = {
+ var e = table(h).asInstanceOf[Entry]
+ if (e != null) {
+ if (elemEquals(e.key, key)) {
+ table(h) = e.next
+ tableSize = tableSize - 1
+ nnSizeMapRemove(h)
+ e.next = null
+ return e
+ } else {
+ var e1 = e.next
+ while (e1 != null && !elemEquals(e1.key, key)) {
+ e = e1
+ e1 = e1.next
+ }
+ if (e1 != null) {
+ e.next = e1.next
+ tableSize = tableSize - 1
+ nnSizeMapRemove(h)
+ e1.next = null
+ return e1
+ }
+ }
+ }
+ null
+ }
+
+ /** An iterator returning all entries.
+ */
+ def entriesIterator: Iterator[Entry] = new AbstractIterator[Entry] {
+ val iterTable = table
+ var idx = lastPopulatedIndex
+ var es = iterTable(idx)
+
+ def hasNext = es != null
+ def next() = {
+ val res = es
+ es = es.next
+ while (es == null && idx > 0) {
+ idx = idx - 1
+ es = iterTable(idx)
+ }
+ res.asInstanceOf[Entry]
+ }
+ }
+
+ /** Avoid iterator for a 2x faster traversal. */
+ def foreachEntry[U](f: Entry => U): Unit = {
+ val iterTable = table
+ var idx = lastPopulatedIndex
+ var es = iterTable(idx)
+
+ while (es != null) {
+ val next = es.next // Cache next in case f removes es.
+ f(es.asInstanceOf[Entry])
+ es = next
+
+ while (es == null && idx > 0) {
+ idx -= 1
+ es = iterTable(idx)
+ }
+ }
+ }
+
+ /** Remove all entries from table
+ */
+ def clearTable(): Unit = {
+ var i = table.length - 1
+ while (i >= 0) { table(i) = null; i = i - 1 }
+ tableSize = 0
+ nnSizeMapReset(0)
+ }
+
+ private def resize(newSize: Int): Unit = {
+ val oldTable = table
+ table = new Array(newSize)
+ nnSizeMapReset(table.length)
+ var i = oldTable.length - 1
+ while (i >= 0) {
+ var e = oldTable(i)
+ while (e != null) {
+ val h = index(elemHashCode(e.key))
+ val e1 = e.next
+ e.next = table(h).asInstanceOf[Entry]
+ table(h) = e
+ e = e1
+ nnSizeMapAdd(h)
+ }
+ i = i - 1
+ }
+ threshold = newThreshold(_loadFactor, newSize)
+ }
+
+ /* Size map handling code */
+
+ /*
+ * The following three sizeMap* functions (Add, Remove, Reset)
+ * are used to update the size map of the hash table.
+ *
+ * The size map logically divides the hash table into `sizeMapBucketSize` element buckets
+ * by keeping an integer entry for each such bucket. Each integer entry simply denotes
+ * the number of elements in the corresponding bucket.
+ * Best understood through an example, see:
+ * table = [/, 1, /, 6, 90, /, -3, 5] (8 entries)
+ * sizemap = [ 2 | 3 ] (2 entries)
+ * where sizeMapBucketSize == 4.
+ *
+ * By default the size map is not initialized, so these methods don't do anything, thus,
+ * their impact on hash table performance is negligible. However, if the hash table
+ * is converted into a parallel hash table, the size map is initialized, as it will be needed
+ * there.
+ */
+ protected final def nnSizeMapAdd(h: Int) = if (sizemap ne null) {
+ sizemap(h >> sizeMapBucketBitSize) += 1
+ }
+
+ protected final def nnSizeMapRemove(h: Int) = if (sizemap ne null) {
+ sizemap(h >> sizeMapBucketBitSize) -= 1
+ }
+
+ protected final def nnSizeMapReset(tableLength: Int) = if (sizemap ne null) {
+ val nsize = calcSizeMapSize(tableLength)
+ if (sizemap.length != nsize) sizemap = new Array[Int](nsize)
+ else java.util.Arrays.fill(sizemap, 0)
+ }
+
+ private[collection] final def totalSizeMapBuckets = if (sizeMapBucketSize < table.length) 1 else table.length / sizeMapBucketSize
+
+ protected final def calcSizeMapSize(tableLength: Int) = (tableLength >> sizeMapBucketBitSize) + 1
+
+ // discards the previous sizemap and only allocates a new one
+ protected def sizeMapInit(tableLength: Int): Unit = {
+ sizemap = new Array[Int](calcSizeMapSize(tableLength))
+ }
+
+ // discards the previous sizemap and populates the new one
+ protected final def sizeMapInitAndRebuild() = {
+ sizeMapInit(table.length)
+
+ // go through the buckets, count elements
+ var tableidx = 0
+ var bucketidx = 0
+ val tbl = table
+ var tableuntil = 0
+ if (tbl.length < sizeMapBucketSize) tableuntil = tbl.length else tableuntil = sizeMapBucketSize
+ val totalbuckets = totalSizeMapBuckets
+ while (bucketidx < totalbuckets) {
+ var currbucketsize = 0
+ while (tableidx < tableuntil) {
+ var e = tbl(tableidx)
+ while (e ne null) {
+ currbucketsize += 1
+ e = e.next
+ }
+ tableidx += 1
+ }
+ sizemap(bucketidx) = currbucketsize
+ tableuntil += sizeMapBucketSize
+ bucketidx += 1
+ }
+ }
+
+ private[collection] def printSizeMap() = {
+ println(sizemap.to(collection.immutable.List))
+ }
+
+ protected final def sizeMapDisable() = sizemap = null
+
+ protected final def isSizeMapDefined = sizemap ne null
+
+ // override to automatically initialize the size map
+ protected def alwaysInitSizeMap = false
+
+ /* End of size map handling code */
+
+ protected def elemEquals(key1: A, key2: A): Boolean = (key1 == key2)
+
+ /**
+ * Note: we take the most significant bits of the hashcode, not the lower ones
+ * this is of crucial importance when populating the table in parallel
+ */
+ protected[collection] final def index(hcode: Int): Int = {
+ val ones = table.length - 1
+ val exponent = Integer.numberOfLeadingZeros(ones)
+ (improve(hcode, seedvalue) >>> exponent) & ones
+ }
+}
+
+private[collection] object HashTable {
+ /** The load factor for the hash table (in 0.001 step).
+ */
+ private[collection] final def defaultLoadFactor: Int = 750 // corresponds to 75%
+ private[collection] final def loadFactorDenum = 1000 // should be loadFactorDenom, but changing that isn't binary compatible
+
+ private[collection] final def newThreshold(_loadFactor: Int, size: Int) = ((size.toLong * _loadFactor) / loadFactorDenum).toInt
+
+ private[collection] final def sizeForThreshold(_loadFactor: Int, thr: Int) = ((thr.toLong * loadFactorDenum) / _loadFactor).toInt
+
+ private[collection] final def capacity(expectedSize: Int) = nextPositivePowerOfTwo(expectedSize)
+
+ trait HashUtils[KeyType] {
+ protected final def sizeMapBucketBitSize = 5
+ // so that:
+ protected final def sizeMapBucketSize = 1 << sizeMapBucketBitSize
+
+ protected[collection] def elemHashCode(key: KeyType) = key.##
+
+ /**
+ * Defer to a high-quality hash in [[scala.util.hashing]].
+ * The goal is to distribute across bins as well as possible even if a hash code has low entropy at some bits.
+ *
+ * OLD VERSION - quick, but bad for sequence 0-10000 - little entropy in higher bits - since 2003
+ * {{{
+ * var h: Int = hcode + ~(hcode << 9)
+ * h = h ^ (h >>> 14)
+ * h = h + (h << 4)
+ * h ^ (h >>> 10)
+ * }}}
+ * the rest of the computation is due to SI-5293
+ */
+ protected final def improve(hcode: Int, seed: Int): Int = rotateRight(byteswap32(hcode), seed)
+ }
+
+ /**
+ * Returns a power of two >= `target`.
+ */
+ private[collection] def nextPositivePowerOfTwo(target: Int): Int = 1 << -numberOfLeadingZeros(target - 1)
+}
+
+/** Class used internally.
+ */
+private[collection] trait HashEntry[A, sealed E <: HashEntry[A, E]] {
+ val key: A
+ var next: E = _
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala b/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala
new file mode 100644
index 000000000000..1af98162e9f3
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/ImmutableBuilder.scala
@@ -0,0 +1,32 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package mutable
+import language.experimental.captureChecking
+
+
+/**
+ * Reusable builder for immutable collections
+ */
+abstract class ImmutableBuilder[-A, C <: IterableOnce[_]](empty: C)
+ extends ReusableBuilder[A, C] {
+
+ protected var elems: C = empty
+
+ def clear(): Unit = { elems = empty }
+
+ def result(): C = elems
+
+ override def knownSize: Int = elems.knownSize
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala b/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala
new file mode 100644
index 000000000000..022970b4c56f
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/IndexedSeq.scala
@@ -0,0 +1,84 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+package mutable
+import language.experimental.captureChecking
+
+trait IndexedSeq[T] extends Seq[T]
+ with scala.collection.IndexedSeq[T]
+ with IndexedSeqOps[T, IndexedSeq, IndexedSeq[T]]
+ with IterableFactoryDefaults[T, IndexedSeq] {
+
+ override def iterableFactory: SeqFactory[IndexedSeq] = IndexedSeq
+}
+
+@SerialVersionUID(3L)
+object IndexedSeq extends SeqFactory.Delegate[IndexedSeq](ArrayBuffer)
+
+trait IndexedSeqOps[A, +CC[_], +C <: AnyRef]
+ extends scala.collection.IndexedSeqOps[A, CC, C]
+ with SeqOps[A, CC, C] {
+
+ /** Modifies this $coll by applying a function to all elements of this $coll.
+ *
+ * @param f the function to apply to each element.
+ * @return this $coll modified by replacing all elements with the
+ * result of applying the given function `f` to each element
+ * of this $coll.
+ */
+ def mapInPlace(f: A => A): this.type = {
+ var i = 0
+ val siz = size
+ while (i < siz) { this(i) = f(this(i)); i += 1 }
+ this
+ }
+
+ /** Sorts this $coll in place according to an Ordering.
+ *
+ * @see [[scala.collection.SeqOps.sorted]]
+ * @param ord the ordering to be used to compare elements.
+ * @return modified input $coll sorted according to the ordering `ord`.
+ */
+ def sortInPlace[B >: A]()(implicit ord: Ordering[B]): this.type = {
+ val len = this.length
+ if (len > 1) {
+ val arr = new Array[AnyRef](len)
+ var i = 0
+ for (x <- this) {
+ arr(i) = x.asInstanceOf[AnyRef]
+ i += 1
+ }
+ java.util.Arrays.sort(arr, ord.asInstanceOf[Ordering[Object]])
+ i = 0
+ while (i < arr.length) {
+ update(i, arr(i).asInstanceOf[A])
+ i += 1
+ }
+ }
+ this
+ }
+
+ /** Sorts this $coll in place according to a comparison function.
+ *
+ * @see [[scala.collection.SeqOps.sortWith]]
+ */
+ def sortInPlaceWith(lt: (A, A) => Boolean): this.type = sortInPlace()(Ordering.fromLessThan(lt))
+
+ /** Sorts this $coll in place according to the Ordering which results from transforming
+ * an implicitly given Ordering with a transformation function.
+ *
+ * @see [[scala.collection.SeqOps.sortBy]]
+ */
+ def sortInPlaceBy[B](f: A => B)(implicit ord: Ordering[B]): this.type = sortInPlace()(ord on f)
+
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala b/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala
new file mode 100644
index 000000000000..a253e8738b26
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/LinkedHashMap.scala
@@ -0,0 +1,510 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package mutable
+
+import scala.annotation.{nowarn, tailrec}
+import scala.collection.generic.DefaultSerializable
+import scala.util.hashing.MurmurHash3
+import language.experimental.captureChecking
+
+
+/** This class implements mutable maps using a hashtable.
+ * The iterator and all traversal methods of this class visit elements in the order they were inserted.
+ *
+ * @tparam K the type of the keys contained in this hash map.
+ * @tparam V the type of the values assigned to keys in this hash map.
+ *
+ * @define Coll `LinkedHashMap`
+ * @define coll linked hash map
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ * @define orderDependent
+ * @define orderDependentFold
+ */
+@deprecatedInheritance("LinkedHashMap will be made final; use .withDefault for the common use case of computing a default value", "2.13.11")
+class LinkedHashMap[sealed K, sealed V]
+ extends AbstractMap[K, V]
+ with SeqMap[K, V]
+ with MapOps[K, V, LinkedHashMap, LinkedHashMap[K, V]]
+ with StrictOptimizedIterableOps[(K, V), Iterable, LinkedHashMap[K, V]]
+ with StrictOptimizedMapOps[K, V, LinkedHashMap, LinkedHashMap[K, V]]
+ with MapFactoryDefaults[K, V, LinkedHashMap, Iterable]
+ with DefaultSerializable {
+
+ override def mapFactory: MapFactory[LinkedHashMap] = LinkedHashMap
+
+ // stepper / keyStepper / valueStepper are not overridden to use XTableStepper because that stepper
+ // would not return the elements in insertion order
+
+ private[collection] type Entry = LinkedHashMap.LinkedEntry[K, V]
+
+ private[collection] def _firstEntry: Entry = firstEntry
+
+ protected var firstEntry: Entry = null
+
+ protected var lastEntry: Entry = null
+
+ /* Uses the same implementation as mutable.HashMap. The hashtable holds the following invariant:
+ * - For each i between 0 and table.length, the bucket at table(i) only contains keys whose hash-index is i.
+ * - Every bucket is sorted in ascendant hash order
+ * - The sum of the lengths of all buckets is equal to contentSize.
+ */
+ private[this] var table = new Array[Entry](tableSizeFor(LinkedHashMap.defaultinitialSize))
+
+ private[this] var threshold: Int = newThreshold(table.length)
+
+ private[this] var contentSize = 0
+
+ override def last: (K, V) =
+ if (size > 0) (lastEntry.key, lastEntry.value)
+ else throw new NoSuchElementException("Cannot call .last on empty LinkedHashMap")
+
+ override def lastOption: Option[(K, V)] =
+ if (size > 0) Some((lastEntry.key, lastEntry.value))
+ else None
+
+ override def head: (K, V) =
+ if (size > 0) (firstEntry.key, firstEntry.value)
+ else throw new NoSuchElementException("Cannot call .head on empty LinkedHashMap")
+
+ override def headOption: Option[(K, V)] =
+ if (size > 0) Some((firstEntry.key, firstEntry.value))
+ else None
+
+ override def size = contentSize
+ override def knownSize: Int = size
+ override def isEmpty: Boolean = size == 0
+
+ def get(key: K): Option[V] = {
+ val e = findEntry(key)
+ if (e == null) None
+ else Some(e.value)
+ }
+ override def sizeHint(size: Int): Unit = {
+ val target = tableSizeFor(((size + 1).toDouble / LinkedHashMap.defaultLoadFactor).toInt)
+ if (target > table.length) growTable(target)
+ }
+
+ override def contains(key: K): Boolean = {
+ if (getClass eq classOf[LinkedHashMap[_, _]])
+ findEntry(key) != null
+ else
+ super.contains(key) // A subclass might override `get`, use the default implementation `contains`.
+ }
+
+ override def put(key: K, value: V): Option[V] = put0(key, value, true) match {
+ case null => None
+ case sm => sm
+ }
+
+ override def update(key: K, value: V): Unit = put0(key, value, false)
+
+ override def remove(key: K): Option[V] = removeEntry0(key) match {
+ case null => None
+ case nd => Some(nd.value)
+ }
+
+ override def getOrElse[V1 >: V](key: K, default: => V1): V1 = {
+ if (getClass != classOf[LinkedHashMap[_, _]]) {
+ // subclasses of LinkedHashMap might customise `get` ...
+ super.getOrElse(key, default)
+ } else {
+ // .. but in the common case, we can avoid the Option boxing.
+ val nd = findEntry(key)
+ if (nd eq null) default else nd.value
+ }
+ }
+
+ override def getOrElseUpdate(key: K, defaultValue: => V): V = {
+ if (getClass != classOf[LinkedHashMap[_, _]]) {
+ // subclasses of LinkedHashMap might customise `get` ...
+ super.getOrElseUpdate(key, defaultValue)
+ } else {
+ val hash = computeHash(key)
+ val idx = index(hash)
+ val nd = table(idx) match {
+ case null => null
+ case nd => nd.findEntry(key, hash)
+ }
+ if (nd != null) nd.value
+ else {
+ val table0 = table
+ val default = defaultValue
+ if (contentSize + 1 >= threshold) growTable(table.length * 2)
+ // Avoid recomputing index if the `defaultValue()` or new element hasn't triggered a table resize.
+ val newIdx = if (table0 eq table) idx else index(hash)
+ put0(key, default, false, hash, newIdx)
+ default
+ }
+ }
+ }
+
+ private[this] def removeEntry0(elem: K): Entry = removeEntry0(elem, computeHash(elem))
+
+ /** Removes a key from this map if it exists
+ *
+ * @param elem the element to remove
+ * @param hash the **improved** hashcode of `element` (see computeHash)
+ * @return the node that contained element if it was present, otherwise null
+ */
+ private[this] def removeEntry0(elem: K, hash: Int): Entry = {
+ val idx = index(hash)
+ table(idx) match {
+ case null => null
+ case nd if nd.hash == hash && nd.key == elem =>
+ // first element matches
+ table(idx) = nd.next
+ deleteEntry(nd)
+ contentSize -= 1
+ nd
+ case nd =>
+ // find an element that matches
+ var prev = nd
+ var next = nd.next
+ while ((next ne null) && next.hash <= hash) {
+ if (next.hash == hash && next.key == elem) {
+ prev.next = next.next
+ deleteEntry(next)
+ contentSize -= 1
+ return next
+ }
+ prev = next
+ next = next.next
+ }
+ null
+ }
+ }
+
+ /** Computes the improved hash of an original (`any.##`) hash. */
+ @`inline` private[this] def improveHash(originalHash: Int): Int = {
+ originalHash ^ (originalHash >>> 16)
+ }
+ @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash)
+
+ /** Computes the improved hash of this key */
+ @`inline` private[this] def computeHash(o: K): Int = improveHash(o.##)
+
+ @`inline` private[this] def index(hash: Int) = hash & (table.length - 1)
+
+ @`inline` private[this] def findEntry(key: K): Entry = {
+ val hash = computeHash(key)
+ table(index(hash)) match {
+ case null => null
+ case nd => nd.findEntry(key, hash)
+ }
+ }
+
+ def addOne(kv: (K, V)): this.type = {
+ put(kv._1, kv._2)
+ this
+ }
+
+ def subtractOne(key: K): this.type = {
+ remove(key)
+ this
+ }
+
+ private[this] abstract class LinkedHashMapIterator[T] extends AbstractIterator[T] {
+ private[this] var cur = firstEntry
+ def extract(nd: Entry): T
+ def hasNext: Boolean = cur ne null
+ def next(): T =
+ if (hasNext) { val r = extract(cur); cur = cur.later; r }
+ else Iterator.empty.next()
+ }
+
+ def iterator: Iterator[(K, V)] =
+ if (size == 0) Iterator.empty
+ else new LinkedHashMapIterator[(K, V)] {
+ def extract(nd: Entry): (K, V) = (nd.key, nd.value)
+ }
+
+ protected class LinkedKeySet extends KeySet {
+ override def iterableFactory: IterableFactory[collection.Set] = LinkedHashSet
+ }
+
+ override def keySet: collection.Set[K] = new LinkedKeySet
+
+ override def keysIterator: Iterator[K] =
+ if (size == 0) Iterator.empty
+ else new LinkedHashMapIterator[K] {
+ def extract(nd: Entry): K = nd.key
+ }
+
+ private[collection] def entryIterator: Iterator[Entry] =
+ if (size == 0) Iterator.empty
+ else new LinkedHashMapIterator[Entry] {
+ def extract(nd: Entry): Entry = nd
+ }
+
+
+ // Override updateWith for performance, so we can do the update while hashing
+ // the input key only once and performing one lookup into the hash table
+ override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = {
+ if (getClass != classOf[LinkedHashMap[_, _]]) {
+ // subclasses of LinkedHashMap might customise `get` ...
+ super.updateWith(key)(remappingFunction)
+ } else {
+ val hash = computeHash(key)
+ val indexedHash = index(hash)
+
+ var foundEntry: Entry = null
+ var previousEntry: Entry = null
+ table(indexedHash) match {
+ case null =>
+ case nd =>
+ @tailrec
+ def findEntry(prev: Entry, nd: Entry, k: K, h: Int): Unit = {
+ if (h == nd.hash && k == nd.key) {
+ previousEntry = prev
+ foundEntry = nd
+ }
+ else if ((nd.next eq null) || (nd.hash > h)) ()
+ else findEntry(nd, nd.next, k, h)
+ }
+
+ findEntry(null, nd, key, hash)
+ }
+
+ val previousValue = foundEntry match {
+ case null => None
+ case nd => Some(nd.value)
+ }
+
+ val nextValue = remappingFunction(previousValue)
+
+ (previousValue, nextValue) match {
+ case (None, None) => // do nothing
+
+ case (Some(_), None) =>
+ if (previousEntry != null) previousEntry.next = foundEntry.next
+ else table(indexedHash) = foundEntry.next
+ deleteEntry(foundEntry)
+ contentSize -= 1
+
+ case (None, Some(value)) =>
+ val newIndexedHash =
+ if (contentSize + 1 >= threshold) {
+ growTable(table.length * 2)
+ index(hash)
+ } else indexedHash
+ put0(key, value, false, hash, newIndexedHash)
+
+ case (Some(_), Some(newValue)) => foundEntry.value = newValue
+ }
+ nextValue
+ }
+ }
+
+ override def valuesIterator: Iterator[V] =
+ if (size == 0) Iterator.empty
+ else new LinkedHashMapIterator[V] {
+ def extract(nd: Entry): V = nd.value
+ }
+
+
+ override def foreach[U](f: ((K, V)) => U): Unit = {
+ var cur = firstEntry
+ while (cur ne null) {
+ f((cur.key, cur.value))
+ cur = cur.later
+ }
+ }
+
+ override def foreachEntry[U](f: (K, V) => U): Unit = {
+ var cur = firstEntry
+ while (cur ne null) {
+ f(cur.key, cur.value)
+ cur = cur.later
+ }
+ }
+
+ override def clear(): Unit = {
+ java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null)
+ contentSize = 0
+ firstEntry = null
+ lastEntry = null
+ }
+
+ private[this] def tableSizeFor(capacity: Int) =
+ (Integer.highestOneBit((capacity - 1).max(4)) * 2).min(1 << 30)
+
+ private[this] def newThreshold(size: Int) = (size.toDouble * LinkedHashMap.defaultLoadFactor).toInt
+
+ /*create a new entry. If table is empty(firstEntry is null), then the
+ * new entry will be the firstEntry. If not, just set the new entry to
+ * be the lastEntry.
+ * */
+ private[this] def createNewEntry(key: K, hash: Int, value: V): Entry = {
+ val e = new Entry(key, hash, value)
+ if (firstEntry eq null) firstEntry = e
+ else {
+ lastEntry.later = e
+ e.earlier = lastEntry
+ }
+ lastEntry = e
+ e
+ }
+
+ /** Delete the entry from the LinkedHashMap, set the `earlier` and `later` pointers correctly */
+ private[this] def deleteEntry(e: Entry): Unit = {
+ if (e.earlier eq null) firstEntry = e.later
+ else e.earlier.later = e.later
+ if (e.later eq null) lastEntry = e.earlier
+ else e.later.earlier = e.earlier
+ e.earlier = null
+ e.later = null
+ e.next = null
+ }
+
+ private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = {
+ if (contentSize + 1 >= threshold) growTable(table.length * 2)
+ val hash = computeHash(key)
+ val idx = index(hash)
+ put0(key, value, getOld, hash, idx)
+ }
+
+ private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = {
+ table(idx) match {
+ case null =>
+ table(idx) = createNewEntry(key, hash, value)
+ case old =>
+ var prev: Entry = null
+ var n = old
+ while ((n ne null) && n.hash <= hash) {
+ if (n.hash == hash && key == n.key) {
+ val old = n.value
+ n.value = value
+ return if (getOld) Some(old) else null
+ }
+ prev = n
+ n = n.next
+ }
+ val nnode = createNewEntry(key, hash, value)
+ if (prev eq null) {
+ nnode.next = old
+ table(idx) = nnode
+ } else {
+ nnode.next = prev.next
+ prev.next = nnode
+ }
+ }
+ contentSize += 1
+ null
+ }
+
+ private[this] def growTable(newlen: Int): Unit = {
+ if (newlen < 0)
+ throw new RuntimeException(s"new hash table size $newlen exceeds maximum")
+ var oldlen = table.length
+ threshold = newThreshold(newlen)
+ if (size == 0) table = new Array(newlen)
+ else {
+ table = java.util.Arrays.copyOf(table, newlen)
+ val preLow = new Entry(null.asInstanceOf[K], 0, null.asInstanceOf[V])
+ val preHigh = new Entry(null.asInstanceOf[K], 0, null.asInstanceOf[V])
+ // Split buckets until the new length has been reached. This could be done more
+ // efficiently when growing an already filled table to more than double the size.
+ while (oldlen < newlen) {
+ var i = 0
+ while (i < oldlen) {
+ val old = table(i)
+ if (old ne null) {
+ preLow.next = null
+ preHigh.next = null
+ var lastLow = preLow
+ var lastHigh = preHigh
+ var n = old
+ while (n ne null) {
+ val next = n.next
+ if ((n.hash & oldlen) == 0) { // keep low
+ lastLow.next = n
+ lastLow = n
+ } else { // move to high
+ lastHigh.next = n
+ lastHigh = n
+ }
+ n = next
+ }
+ lastLow.next = null
+ if (old ne preLow.next) table(i) = preLow.next
+ if (preHigh.next ne null) {
+ table(i + oldlen) = preHigh.next
+ lastHigh.next = null
+ }
+ }
+ i += 1
+ }
+ oldlen *= 2
+ }
+ }
+ }
+
+ override def hashCode: Int = {
+ if (isEmpty) MurmurHash3.emptyMapHash
+ else {
+ val tupleHashIterator = new LinkedHashMapIterator[Any] {
+ var hash: Int = 0
+ override def hashCode: Int = hash
+ override def extract(nd: Entry): Any = {
+ hash = MurmurHash3.tuple2Hash(unimproveHash(nd.hash), nd.value.##)
+ this
+ }
+ }
+ MurmurHash3.unorderedHash(tupleHashIterator, MurmurHash3.mapSeed)
+ }
+ }
+ @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""")
+ override protected[this] def stringPrefix = "LinkedHashMap"
+}
+
+/** $factoryInfo
+ * @define Coll `LinkedHashMap`
+ * @define coll linked hash map
+ */
+@SerialVersionUID(3L)
+object LinkedHashMap extends MapFactory[LinkedHashMap] {
+
+ def empty[sealed K, sealed V] = new LinkedHashMap[K, V]
+
+ def from[sealed K, sealed V](it: collection.IterableOnce[(K, V)]^) = {
+ val newlhm = empty[K, V]
+ newlhm.sizeHint(it.knownSize)
+ newlhm.addAll(it)
+ newlhm
+ }
+
+ def newBuilder[sealed K, sealed V] = new GrowableBuilder(empty[K, V])
+
+ /** Class for the linked hash map entry, used internally.
+ */
+ private[mutable] final class LinkedEntry[sealed K, sealed V](val key: K, val hash: Int, var value: V) {
+ var earlier: LinkedEntry[K, V] = null
+ var later: LinkedEntry[K, V] = null
+ var next: LinkedEntry[K, V] = null
+
+ @tailrec
+ final def findEntry(k: K, h: Int): LinkedEntry[K, V] =
+ if (h == hash && k == key) this
+ else if ((next eq null) || (hash > h)) null
+ else next.findEntry(k, h)
+ }
+
+ /** The default load factor for the hash table */
+ private[collection] final def defaultLoadFactor: Double = 0.75
+
+ /** The default initial capacity for the hash table */
+ private[collection] final def defaultinitialSize: Int = 16
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala b/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala
new file mode 100644
index 000000000000..a895034a852c
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/LinkedHashSet.scala
@@ -0,0 +1,349 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package mutable
+
+import scala.annotation.{nowarn, tailrec}
+import scala.collection.generic.DefaultSerializable
+import scala.util.hashing.MurmurHash3
+import language.experimental.captureChecking
+
+/** This class implements mutable sets using a hashtable.
+ * The iterator and all traversal methods of this class visit elements in the order they were inserted.
+ *
+ * @tparam A the type of the elements contained in this set.
+ *
+ * @define Coll `LinkedHashSet`
+ * @define coll linked hash set
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ * @define orderDependent
+ * @define orderDependentFold
+ */
+@deprecatedInheritance("LinkedHashSet will be made final", "2.13.11")
+class LinkedHashSet[sealed A]
+ extends AbstractSet[A]
+ with SetOps[A, LinkedHashSet, LinkedHashSet[A]]
+ with StrictOptimizedIterableOps[A, LinkedHashSet, LinkedHashSet[A]]
+ with IterableFactoryDefaults[A, LinkedHashSet]
+ with DefaultSerializable {
+
+ override def iterableFactory: IterableFactory[LinkedHashSet] = LinkedHashSet
+
+ // stepper is not overridden to use XTableStepper because that stepper would not return the
+ // elements in insertion order
+
+ /*private*/ type Entry = LinkedHashSet.Entry[A]
+
+ protected var firstEntry: Entry = null
+
+ protected var lastEntry: Entry = null
+
+ /* Uses the same implementation as mutable.HashSet. The hashtable holds the following invariant:
+ * - For each i between 0 and table.length, the bucket at table(i) only contains keys whose hash-index is i.
+ * - Every bucket is sorted in ascendant hash order
+ * - The sum of the lengths of all buckets is equal to contentSize.
+ */
+ private[this] var table = new Array[Entry](tableSizeFor(LinkedHashSet.defaultinitialSize))
+
+ private[this] var threshold: Int = newThreshold(table.length)
+
+ private[this] var contentSize = 0
+
+ override def last: A =
+ if (size > 0) lastEntry.key
+ else throw new NoSuchElementException("Cannot call .last on empty LinkedHashSet")
+
+ override def lastOption: Option[A] =
+ if (size > 0) Some(lastEntry.key)
+ else None
+
+ override def head: A =
+ if (size > 0) firstEntry.key
+ else throw new NoSuchElementException("Cannot call .head on empty LinkedHashSet")
+
+ override def headOption: Option[A] =
+ if (size > 0) Some(firstEntry.key)
+ else None
+
+ override def size: Int = contentSize
+ override def knownSize: Int = size
+ override def isEmpty: Boolean = size == 0
+
+ def contains(elem: A): Boolean = findEntry(elem) ne null
+
+ override def sizeHint(size: Int): Unit = {
+ val target = tableSizeFor(((size + 1).toDouble / LinkedHashSet.defaultLoadFactor).toInt)
+ if (target > table.length) growTable(target)
+ }
+
+ override def add(elem: A): Boolean = {
+ if (contentSize + 1 >= threshold) growTable(table.length * 2)
+ val hash = computeHash(elem)
+ put0(elem, hash, index(hash))
+ }
+
+ def addOne(elem: A): this.type = {
+ add(elem)
+ this
+ }
+
+ def subtractOne(elem: A): this.type = {
+ remove(elem)
+ this
+ }
+
+ override def remove(elem: A): Boolean = remove0(elem, computeHash(elem))
+
+ private[this] abstract class LinkedHashSetIterator[T] extends AbstractIterator[T] {
+ private[this] var cur = firstEntry
+ def extract(nd: Entry): T
+ def hasNext: Boolean = cur ne null
+ def next(): T =
+ if (hasNext) { val r = extract(cur); cur = cur.later; r }
+ else Iterator.empty.next()
+ }
+
+ def iterator: Iterator[A] = new LinkedHashSetIterator[A] {
+ override def extract(nd: Entry): A = nd.key
+ }
+
+ private[collection] def entryIterator: Iterator[Entry] = new LinkedHashSetIterator[Entry] {
+ override def extract(nd: Entry): Entry = nd
+ }
+
+ override def foreach[U](f: A => U): Unit = {
+ var cur = firstEntry
+ while (cur ne null) {
+ f(cur.key)
+ cur = cur.later
+ }
+ }
+
+ override def clear(): Unit = {
+ java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null)
+ contentSize = 0
+ firstEntry = null
+ lastEntry = null
+ }
+
+ private[this] def tableSizeFor(capacity: Int) =
+ (Integer.highestOneBit((capacity - 1).max(4)) * 2).min(1 << 30)
+
+ private[this] def newThreshold(size: Int) = (size.toDouble * LinkedHashSet.defaultLoadFactor).toInt
+
+ @`inline` private[this] def improveHash(originalHash: Int): Int = {
+ originalHash ^ (originalHash >>> 16)
+ }
+
+ @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash)
+
+ /** Computes the improved hash of this key */
+ @`inline` private[this] def computeHash(o: A): Int = improveHash(o.##)
+
+ @`inline` private[this] def index(hash: Int) = hash & (table.length - 1)
+
+ @`inline` private[this] def findEntry(key: A): Entry = {
+ val hash = computeHash(key)
+ table(index(hash)) match {
+ case null => null
+ case nd => nd.findEntry(key, hash)
+ }
+ }
+
+ /*create a new entry. If table is empty(firstEntry is null), then the
+ * new entry will be the firstEntry. If not, just set the new entry to
+ * be the lastEntry.
+ * */
+ private[this] def createNewEntry(key: A, hash: Int): Entry = {
+ val e = new Entry(key, hash)
+ if (firstEntry eq null) firstEntry = e
+ else {
+ lastEntry.later = e
+ e.earlier = lastEntry
+ }
+ lastEntry = e
+ e
+ }
+
+ /** Delete the entry from the LinkedHashSet, set the `earlier` and `later` pointers correctly */
+ private[this] def deleteEntry(e: Entry): Unit = {
+ if (e.earlier eq null) firstEntry = e.later
+ else e.earlier.later = e.later
+ if (e.later eq null) lastEntry = e.earlier
+ else e.later.earlier = e.earlier
+ e.earlier = null
+ e.later = null
+ e.next = null
+ }
+
+ private[this] def put0(elem: A, hash: Int, idx: Int): Boolean = {
+ table(idx) match {
+ case null =>
+ table(idx) = createNewEntry(elem, hash)
+ case old =>
+ var prev: Entry = null
+ var n = old
+ while ((n ne null) && n.hash <= hash) {
+ if (n.hash == hash && elem == n.key) return false
+ prev = n
+ n = n.next
+ }
+ val nnode = createNewEntry(elem, hash)
+ if (prev eq null) {
+ nnode.next = old
+ table(idx) = nnode
+ } else {
+ nnode.next = prev.next
+ prev.next = nnode
+ }
+ }
+ contentSize += 1
+ true
+ }
+
+ private[this] def remove0(elem: A, hash: Int): Boolean = {
+ val idx = index(hash)
+ table(idx) match {
+ case null => false
+ case nd if nd.hash == hash && nd.key == elem =>
+ // first element matches
+ table(idx) = nd.next
+ deleteEntry(nd)
+ contentSize -= 1
+ true
+ case nd =>
+ // find an element that matches
+ var prev = nd
+ var next = nd.next
+ while ((next ne null) && next.hash <= hash) {
+ if (next.hash == hash && next.key == elem) {
+ prev.next = next.next
+ deleteEntry(next)
+ contentSize -= 1
+ return true
+ }
+ prev = next
+ next = next.next
+ }
+ false
+ }
+ }
+
+ private[this] def growTable(newlen: Int): Unit = {
+ if (newlen < 0)
+ throw new RuntimeException(s"new hash table size $newlen exceeds maximum")
+ var oldlen = table.length
+ threshold = newThreshold(newlen)
+ if (size == 0) table = new Array(newlen)
+ else {
+ table = java.util.Arrays.copyOf(table, newlen)
+ val preLow = new Entry(null.asInstanceOf[A], 0)
+ val preHigh = new Entry(null.asInstanceOf[A], 0)
+ // Split buckets until the new length has been reached. This could be done more
+ // efficiently when growing an already filled table to more than double the size.
+ while (oldlen < newlen) {
+ var i = 0
+ while (i < oldlen) {
+ val old = table(i)
+ if (old ne null) {
+ preLow.next = null
+ preHigh.next = null
+ var lastLow = preLow
+ var lastHigh = preHigh
+ var n = old
+ while (n ne null) {
+ val next = n.next
+ if ((n.hash & oldlen) == 0) { // keep low
+ lastLow.next = n
+ lastLow = n
+ } else { // move to high
+ lastHigh.next = n
+ lastHigh = n
+ }
+ n = next
+ }
+ lastLow.next = null
+ if (old ne preLow.next) table(i) = preLow.next
+ if (preHigh.next ne null) {
+ table(i + oldlen) = preHigh.next
+ lastHigh.next = null
+ }
+ }
+ i += 1
+ }
+ oldlen *= 2
+ }
+ }
+ }
+
+ override def hashCode: Int = {
+ val setHashIterator =
+ if (isEmpty) this.iterator
+ else {
+ new LinkedHashSetIterator[Any] {
+ var hash: Int = 0
+ override def hashCode: Int = hash
+ override def extract(nd: Entry): Any = {
+ hash = unimproveHash(nd.hash)
+ this
+ }
+ }
+ }
+ MurmurHash3.unorderedHash(setHashIterator, MurmurHash3.setSeed)
+ }
+
+ @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""")
+ override protected[this] def stringPrefix = "LinkedHashSet"
+}
+
+/** $factoryInfo
+ * @define Coll `LinkedHashSet`
+ * @define coll linked hash set
+ */
+@SerialVersionUID(3L)
+object LinkedHashSet extends IterableFactory[LinkedHashSet] {
+
+ override def empty[sealed A]: LinkedHashSet[A] = new LinkedHashSet[A]
+
+ def from[sealed E](it: collection.IterableOnce[E]^) = {
+ val newlhs = empty[E]
+ newlhs.sizeHint(it.knownSize)
+ newlhs.addAll(it)
+ newlhs
+ }
+
+ def newBuilder[sealed A] = new GrowableBuilder(empty[A])
+
+ /** Class for the linked hash set entry, used internally.
+ */
+ private[mutable] final class Entry[sealed A](val key: A, val hash: Int) {
+ var earlier: Entry[A] = null
+ var later: Entry[A] = null
+ var next: Entry[A] = null
+
+ @tailrec
+ final def findEntry(k: A, h: Int): Entry[A] =
+ if (h == hash && k == key) this
+ else if ((next eq null) || (hash > h)) null
+ else next.findEntry(k, h)
+ }
+
+ /** The default load factor for the hash table */
+ private[collection] final def defaultLoadFactor: Double = 0.75
+
+ /** The default initial capacity for the hash table */
+ private[collection] final def defaultinitialSize: Int = 16
+}
+
diff --git a/tests/pos-special/stdlib/collection/mutable/ListMap.scala b/tests/pos-special/stdlib/collection/mutable/ListMap.scala
new file mode 100644
index 000000000000..8ddbc264e47b
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/ListMap.scala
@@ -0,0 +1,83 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+package mutable
+
+import scala.annotation.tailrec
+import scala.collection.generic.DefaultSerializable
+import scala.collection.immutable.List
+import language.experimental.captureChecking
+
+/** A simple mutable map backed by a list, so it preserves insertion order.
+ *
+ * @tparam K the type of the keys contained in this list map.
+ * @tparam V the type of the values assigned to keys in this list map.
+ *
+ * @define Coll `mutable.ListMap`
+ * @define coll mutable list map
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ * @define orderDependent
+ * @define orderDependentFold
+ */
+@deprecated("Use an immutable.ListMap assigned to a var instead of mutable.ListMap", "2.13.0")
+class ListMap[sealed K, sealed V]
+ extends AbstractMap[K, V]
+ with MapOps[K, V, ListMap, ListMap[K, V]]
+ with StrictOptimizedIterableOps[(K, V), Iterable, ListMap[K, V]]
+ with StrictOptimizedMapOps[K, V, ListMap, ListMap[K, V]]
+ with MapFactoryDefaults[K, V, ListMap, Iterable]
+ with DefaultSerializable {
+
+ override def mapFactory: MapFactory[ListMap] = ListMap
+
+ private[this] var elems: List[(K, V)] = List()
+ private[this] var siz: Int = 0
+
+ def get(key: K): Option[V] = elems find (_._1 == key) map (_._2)
+ def iterator: Iterator[(K, V)] = elems.iterator
+
+ final override def addOne(kv: (K, V)) = {
+ val (e, key0) = remove(kv._1, elems, List())
+ elems = (key0, kv._2) :: e
+ siz += 1; this
+ }
+
+ final override def subtractOne(key: K) = { elems = remove(key, elems, List())._1; this }
+
+ @tailrec
+ private def remove(key: K, elems: List[(K, V)], acc: List[(K, V)]): (List[(K, V)], K) = {
+ if (elems.isEmpty) (acc, key)
+ else if (elems.head._1 == key) { siz -= 1; (acc ::: elems.tail, elems.head._1) }
+ else remove(key, elems.tail, elems.head :: acc)
+ }
+
+ final override def clear(): Unit = { elems = List(); siz = 0 }
+
+ final override def size: Int = siz
+ override def knownSize: Int = size
+ override def isEmpty: Boolean = size == 0
+ override protected[this] def stringPrefix = "ListMap"
+}
+
+/** $factoryInfo
+ * @define Coll `mutable.ListMap`
+ * @define coll mutable list map
+ */
+@SerialVersionUID(3L)
+@deprecated("Use an immutable.ListMap assigned to a var instead of mutable.ListMap", "2.13.0")
+object ListMap extends MapFactory[ListMap] {
+ def empty[sealed K, sealed V]: ListMap[K, V] = new ListMap[K, V]
+ def from[sealed K, sealed V](it: IterableOnce[(K, V)]^): ListMap[K,V] = Growable.from(empty[K, V], it)
+ def newBuilder[sealed K, sealed V]: Builder[(K, V), ListMap[K,V]] = new GrowableBuilder(empty[K, V])
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/LongMap.scala b/tests/pos-special/stdlib/collection/mutable/LongMap.scala
new file mode 100644
index 000000000000..2c757160ec77
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/LongMap.scala
@@ -0,0 +1,674 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+package mutable
+
+import scala.collection.generic.DefaultSerializationProxy
+import scala.language.implicitConversions
+import language.experimental.captureChecking
+
+/** This class implements mutable maps with `Long` keys based on a hash table with open addressing.
+ *
+ * Basic map operations on single entries, including `contains` and `get`,
+ * are typically substantially faster with `LongMap` than [[HashMap]]. Methods
+ * that act on the whole map, including `foreach` and `map` are not in
+ * general expected to be faster than with a generic map, save for those
+ * that take particular advantage of the internal structure of the map:
+ * `foreachKey`, `foreachValue`, `mapValuesNow`, and `transformValues`.
+ *
+ * Maps with open addressing may become less efficient at lookup after
+ * repeated addition/removal of elements. Although `LongMap` makes a
+ * decent attempt to remain efficient regardless, calling `repack`
+ * on a map that will no longer have elements removed but will be
+ * used heavily may save both time and storage space.
+ *
+ * This map is not intended to contain more than 2^29 entries (approximately
+ * 500 million). The maximum capacity is 2^30, but performance will degrade
+ * rapidly as 2^30 is approached.
+ *
+ */
+final class LongMap[sealed V] private[collection] (defaultEntry: Long -> V, initialBufferSize: Int, initBlank: Boolean)
+ extends AbstractMap[Long, V]
+ with MapOps[Long, V, Map, LongMap[V]]
+ with StrictOptimizedIterableOps[(Long, V), Iterable, LongMap[V]]
+ with Serializable {
+ import LongMap._
+
+ def this() = this(LongMap.exceptionDefault, 16, true)
+
+ // TODO: override clear() with an optimization more tailored for efficiency.
+ override protected def fromSpecific(coll: scala.collection.IterableOnce[(Long, V)]^): LongMap[V] = {
+ //TODO should this be the default implementation of this method in StrictOptimizedIterableOps?
+ val b = newSpecificBuilder
+ b.sizeHint(coll)
+ b.addAll(coll)
+ b.result()
+ }
+ override protected def newSpecificBuilder: Builder[(Long, V),LongMap[V]] = new GrowableBuilder(LongMap.empty[V])
+
+ /** Creates a new `LongMap` that returns default values according to a supplied key-value mapping. */
+ def this(defaultEntry: Long -> V) = this(defaultEntry, 16, true)
+
+ /** Creates a new `LongMap` with an initial buffer of specified size.
+ *
+ * A LongMap can typically contain half as many elements as its buffer size
+ * before it requires resizing.
+ */
+ def this(initialBufferSize: Int) = this(LongMap.exceptionDefault, initialBufferSize, true)
+
+ /** Creates a new `LongMap` with specified default values and initial buffer size. */
+ def this(defaultEntry: Long -> V, initialBufferSize: Int) = this(defaultEntry, initialBufferSize, true)
+
+ private[this] var mask = 0
+ private[this] var extraKeys: Int = 0
+ private[this] var zeroValue: AnyRef = null
+ private[this] var minValue: AnyRef = null
+ private[this] var _size = 0
+ private[this] var _vacant = 0
+ private[this] var _keys: Array[Long] = null
+ private[this] var _values: Array[AnyRef] = null
+
+ if (initBlank) defaultInitialize(initialBufferSize)
+
+ private[this] def defaultInitialize(n: Int) = {
+ mask =
+ if (n<0) 0x7
+ else (((1 << (32 - java.lang.Integer.numberOfLeadingZeros(n-1))) - 1) & 0x3FFFFFFF) | 0x7
+ _keys = new Array[Long](mask+1)
+ _values = new Array[AnyRef](mask+1)
+ }
+
+ private[collection] def initializeTo(
+ m: Int, ek: Int, zv: AnyRef, mv: AnyRef, sz: Int, vc: Int, kz: Array[Long], vz: Array[AnyRef]
+ ): Unit = {
+ mask = m; extraKeys = ek; zeroValue = zv; minValue = mv; _size = sz; _vacant = vc; _keys = kz; _values = vz
+ }
+
+ override def size: Int = _size + (extraKeys+1)/2
+ override def knownSize: Int = size
+ override def isEmpty: Boolean = size == 0
+ override def empty: LongMap[V] = new LongMap()
+
+ private def imbalanced: Boolean =
+ (_size + _vacant) > 0.5*mask || _vacant > _size
+
+ private def toIndex(k: Long): Int = {
+ // Part of the MurmurHash3 32 bit finalizer
+ val h = ((k ^ (k >>> 32)) & 0xFFFFFFFFL).toInt
+ val x = (h ^ (h >>> 16)) * 0x85EBCA6B
+ (x ^ (x >>> 13)) & mask
+ }
+
+ private def seekEmpty(k: Long): Int = {
+ var e = toIndex(k)
+ var x = 0
+ while (_keys(e) != 0) { x += 1; e = (e + 2*(x+1)*x - 3) & mask }
+ e
+ }
+
+ private def seekEntry(k: Long): Int = {
+ var e = toIndex(k)
+ var x = 0
+ var q = 0L
+ while ({ q = _keys(e); if (q==k) return e; q != 0}) { x += 1; e = (e + 2*(x+1)*x - 3) & mask }
+ e | MissingBit
+ }
+
+ private def seekEntryOrOpen(k: Long): Int = {
+ var e = toIndex(k)
+ var x = 0
+ var q = 0L
+ while ({ q = _keys(e); if (q==k) return e; q+q != 0}) {
+ x += 1
+ e = (e + 2*(x+1)*x - 3) & mask
+ }
+ if (q == 0) return e | MissingBit
+ val o = e | MissVacant
+ while ({ q = _keys(e); if (q==k) return e; q != 0}) {
+ x += 1
+ e = (e + 2*(x+1)*x - 3) & mask
+ }
+ o
+ }
+
+ override def contains(key: Long): Boolean = {
+ if (key == -key) (((key>>>63).toInt+1) & extraKeys) != 0
+ else seekEntry(key) >= 0
+ }
+
+ override def get(key: Long): Option[V] = {
+ if (key == -key) {
+ if ((((key>>>63).toInt+1) & extraKeys) == 0) None
+ else if (key == 0) Some(zeroValue.asInstanceOf[V])
+ else Some(minValue.asInstanceOf[V])
+ }
+ else {
+ val i = seekEntry(key)
+ if (i < 0) None else Some(_values(i).asInstanceOf[V])
+ }
+ }
+
+ override def getOrElse[V1 >: V](key: Long, default: => V1): V1 = {
+ if (key == -key) {
+ if ((((key>>>63).toInt+1) & extraKeys) == 0) default
+ else if (key == 0) zeroValue.asInstanceOf[V1]
+ else minValue.asInstanceOf[V1]
+ }
+ else {
+ val i = seekEntry(key)
+ if (i < 0) default else _values(i).asInstanceOf[V1]
+ }
+ }
+
+ override def getOrElseUpdate(key: Long, defaultValue: => V): V = {
+ if (key == -key) {
+ val kbits = (key>>>63).toInt + 1
+ if ((kbits & extraKeys) == 0) {
+ val value = defaultValue
+ extraKeys |= kbits
+ if (key == 0) zeroValue = value.asInstanceOf[AnyRef]
+ else minValue = value.asInstanceOf[AnyRef]
+ value
+ }
+ else if (key == 0) zeroValue.asInstanceOf[V]
+ else minValue.asInstanceOf[V]
+ }
+ else {
+ var i = seekEntryOrOpen(key)
+ if (i < 0) {
+ // It is possible that the default value computation was side-effecting
+ // Our hash table may have resized or even contain what we want now
+ // (but if it does, we'll replace it)
+ val value = {
+ val ok = _keys
+ val ans = defaultValue
+ if (ok ne _keys) {
+ i = seekEntryOrOpen(key)
+ if (i >= 0) _size -= 1
+ }
+ ans
+ }
+ _size += 1
+ val j = i & IndexMask
+ _keys(j) = key
+ _values(j) = value.asInstanceOf[AnyRef]
+ if ((i & VacantBit) != 0) _vacant -= 1
+ else if (imbalanced) repack()
+ value
+ }
+ else _values(i).asInstanceOf[V]
+ }
+ }
+
+ /** Retrieves the value associated with a key, or the default for that type if none exists
+ * (null for AnyRef, 0 for floats and integers).
+ *
+ * Note: this is the fastest way to retrieve a value that may or
+ * may not exist, if the default null/zero is acceptable. For key/value
+ * pairs that do exist, `apply` (i.e. `map(key)`) is equally fast.
+ */
+ def getOrNull(key: Long): V = {
+ if (key == -key) {
+ if ((((key>>>63).toInt+1) & extraKeys) == 0) null.asInstanceOf[V]
+ else if (key == 0) zeroValue.asInstanceOf[V]
+ else minValue.asInstanceOf[V]
+ }
+ else {
+ val i = seekEntry(key)
+ if (i < 0) null.asInstanceOf[V] else _values(i).asInstanceOf[V]
+ }
+ }
+
+ /** Retrieves the value associated with a key.
+ * If the key does not exist in the map, the `defaultEntry` for that key
+ * will be returned instead.
+ */
+ override def apply(key: Long): V = {
+ if (key == -key) {
+ if ((((key>>>63).toInt+1) & extraKeys) == 0) defaultEntry(key)
+ else if (key == 0) zeroValue.asInstanceOf[V]
+ else minValue.asInstanceOf[V]
+ }
+ else {
+ val i = seekEntry(key)
+ if (i < 0) defaultEntry(key) else _values(i).asInstanceOf[V]
+ }
+ }
+
+ /** The user-supplied default value for the key. Throws an exception
+ * if no other default behavior was specified.
+ */
+ override def default(key: Long) = defaultEntry(key)
+
+ private def repack(newMask: Int): Unit = {
+ val ok = _keys
+ val ov = _values
+ mask = newMask
+ _keys = new Array[Long](mask+1)
+ _values = new Array[AnyRef](mask+1)
+ _vacant = 0
+ var i = 0
+ while (i < ok.length) {
+ val k = ok(i)
+ if (k != -k) {
+ val j = seekEmpty(k)
+ _keys(j) = k
+ _values(j) = ov(i)
+ }
+ i += 1
+ }
+ }
+
+ /** Repacks the contents of this `LongMap` for maximum efficiency of lookup.
+ *
+ * For maps that undergo a complex creation process with both addition and
+ * removal of keys, and then are used heavily with no further removal of
+ * elements, calling `repack` after the end of the creation can result in
+ * improved performance. Repacking takes time proportional to the number
+ * of entries in the map.
+ */
+ def repack(): Unit = {
+ var m = mask
+ if (_size + _vacant >= 0.5*mask && !(_vacant > 0.2*mask)) m = ((m << 1) + 1) & IndexMask
+ while (m > 8 && 8*_size < m) m = m >>> 1
+ repack(m)
+ }
+
+ override def put(key: Long, value: V): Option[V] = {
+ if (key == -key) {
+ if (key == 0) {
+ val ans = if ((extraKeys&1) == 1) Some(zeroValue.asInstanceOf[V]) else None
+ zeroValue = value.asInstanceOf[AnyRef]
+ extraKeys |= 1
+ ans
+ }
+ else {
+ val ans = if ((extraKeys&2) == 1) Some(minValue.asInstanceOf[V]) else None
+ minValue = value.asInstanceOf[AnyRef]
+ extraKeys |= 2
+ ans
+ }
+ }
+ else {
+ val i = seekEntryOrOpen(key)
+ if (i < 0) {
+ val j = i & IndexMask
+ _keys(j) = key
+ _values(j) = value.asInstanceOf[AnyRef]
+ _size += 1
+ if ((i & VacantBit) != 0) _vacant -= 1
+ else if (imbalanced) repack()
+ None
+ }
+ else {
+ val ans = Some(_values(i).asInstanceOf[V])
+ _keys(i) = key
+ _values(i) = value.asInstanceOf[AnyRef]
+ ans
+ }
+ }
+ }
+
+ /** Updates the map to include a new key-value pair.
+ *
+ * This is the fastest way to add an entry to a `LongMap`.
+ */
+ override def update(key: Long, value: V): Unit = {
+ if (key == -key) {
+ if (key == 0) {
+ zeroValue = value.asInstanceOf[AnyRef]
+ extraKeys |= 1
+ }
+ else {
+ minValue = value.asInstanceOf[AnyRef]
+ extraKeys |= 2
+ }
+ }
+ else {
+ val i = seekEntryOrOpen(key)
+ if (i < 0) {
+ val j = i & IndexMask
+ _keys(j) = key
+ _values(j) = value.asInstanceOf[AnyRef]
+ _size += 1
+ if ((i & VacantBit) != 0) _vacant -= 1
+ else if (imbalanced) repack()
+ }
+ else {
+ _keys(i) = key
+ _values(i) = value.asInstanceOf[AnyRef]
+ }
+ }
+ }
+
+ /** Adds a new key/value pair to this map and returns the map. */
+ @deprecated("Use `addOne` or `update` instead; infix operations with an operand of multiple args will be deprecated", "2.13.3")
+ def +=(key: Long, value: V): this.type = { update(key, value); this }
+
+ /** Adds a new key/value pair to this map and returns the map. */
+ @inline final def addOne(key: Long, value: V): this.type = { update(key, value); this }
+
+ @inline override final def addOne(kv: (Long, V)): this.type = { update(kv._1, kv._2); this }
+
+ def subtractOne(key: Long): this.type = {
+ if (key == -key) {
+ if (key == 0L) {
+ extraKeys &= 0x2
+ zeroValue = null
+ }
+ else {
+ extraKeys &= 0x1
+ minValue = null
+ }
+ }
+ else {
+ val i = seekEntry(key)
+ if (i >= 0) {
+ _size -= 1
+ _vacant += 1
+ _keys(i) = Long.MinValue
+ _values(i) = null
+ }
+ }
+ this
+ }
+
+ def iterator: Iterator[(Long, V)] = new AbstractIterator[(Long, V)] {
+ private[this] val kz = _keys
+ private[this] val vz = _values
+
+ private[this] var nextPair: (Long, V) =
+ if (extraKeys==0) null
+ else if ((extraKeys&1)==1) (0L, zeroValue.asInstanceOf[V])
+ else (Long.MinValue, minValue.asInstanceOf[V])
+
+ private[this] var anotherPair: (Long, V) =
+ if (extraKeys==3) (Long.MinValue, minValue.asInstanceOf[V])
+ else null
+
+ private[this] var index = 0
+
+ def hasNext: Boolean = nextPair != null || (index < kz.length && {
+ var q = kz(index)
+ while (q == -q) {
+ index += 1
+ if (index >= kz.length) return false
+ q = kz(index)
+ }
+ nextPair = (kz(index), vz(index).asInstanceOf[V])
+ index += 1
+ true
+ })
+ def next() = {
+ if (nextPair == null && !hasNext) throw new NoSuchElementException("next")
+ val ans = nextPair
+ if (anotherPair != null) {
+ nextPair = anotherPair
+ anotherPair = null
+ }
+ else nextPair = null
+ ans
+ }
+ }
+
+ // TODO PERF override these for efficiency. See immutable.LongMap for how to organize the code.
+ override def keysIterator: Iterator[Long] = super.keysIterator
+ override def valuesIterator: Iterator[V] = super.valuesIterator
+
+ override def foreach[U](f: ((Long,V)) => U): Unit = {
+ if ((extraKeys & 1) == 1) f((0L, zeroValue.asInstanceOf[V]))
+ if ((extraKeys & 2) == 2) f((Long.MinValue, minValue.asInstanceOf[V]))
+ var i,j = 0
+ while (i < _keys.length & j < _size) {
+ val k = _keys(i)
+ if (k != -k) {
+ j += 1
+ f((k, _values(i).asInstanceOf[V]))
+ }
+ i += 1
+ }
+ }
+
+ override def foreachEntry[U](f: (Long,V) => U): Unit = {
+ if ((extraKeys & 1) == 1) f(0L, zeroValue.asInstanceOf[V])
+ if ((extraKeys & 2) == 2) f(Long.MinValue, minValue.asInstanceOf[V])
+ var i,j = 0
+ while (i < _keys.length & j < _size) {
+ val k = _keys(i)
+ if (k != -k) {
+ j += 1
+ f(k, _values(i).asInstanceOf[V])
+ }
+ i += 1
+ }
+ }
+
+ override def clone(): LongMap[V] = {
+ val kz = java.util.Arrays.copyOf(_keys, _keys.length)
+ val vz = java.util.Arrays.copyOf(_values, _values.length)
+ val lm = new LongMap[V](defaultEntry, 1, false)
+ lm.initializeTo(mask, extraKeys, zeroValue, minValue, _size, _vacant, kz, vz)
+ lm
+ }
+
+ @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0")
+ override def +[V1 >: V](kv: (Long, V1)): LongMap[V1] = {
+ val lm = clone().asInstanceOf[LongMap[V1]]
+ lm += kv
+ lm
+ }
+
+ @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0")
+ override def + [sealed V1 >: V](elem1: (Long, V1), elem2: (Long, V1), elems: (Long, V1)*): LongMap[V1] = {
+ val m = this + elem1 + elem2
+ if(elems.isEmpty) m else m.concat(elems)
+ }
+
+ override def concat[sealed V1 >: V](xs: scala.collection.IterableOnce[(Long, V1)]^): LongMap[V1] = {
+ val lm = clone().asInstanceOf[LongMap[V1]]
+ xs.iterator.foreach(kv => lm += kv)
+ lm
+ }
+
+ override def ++ [sealed V1 >: V](xs: scala.collection.IterableOnce[(Long, V1)]^): LongMap[V1] = concat(xs)
+
+ @deprecated("Use m.clone().addOne(k,v) instead of m.updated(k, v)", "2.13.0")
+ override def updated[V1 >: V](key: Long, value: V1): LongMap[V1] =
+ clone().asInstanceOf[LongMap[V1]].addOne(key, value)
+
+ /** Applies a function to all keys of this map. */
+ def foreachKey[A](f: Long => A): Unit = {
+ if ((extraKeys & 1) == 1) f(0L)
+ if ((extraKeys & 2) == 2) f(Long.MinValue)
+ var i,j = 0
+ while (i < _keys.length & j < _size) {
+ val k = _keys(i)
+ if (k != -k) {
+ j += 1
+ f(k)
+ }
+ i += 1
+ }
+ }
+
+ /** Applies a function to all values of this map. */
+ def foreachValue[A](f: V => A): Unit = {
+ if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V])
+ if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V])
+ var i,j = 0
+ while (i < _keys.length & j < _size) {
+ val k = _keys(i)
+ if (k != -k) {
+ j += 1
+ f(_values(i).asInstanceOf[V])
+ }
+ i += 1
+ }
+ }
+
+ /** Creates a new `LongMap` with different values.
+ * Unlike `mapValues`, this method generates a new
+ * collection immediately.
+ */
+ def mapValuesNow[sealed V1](f: V => V1): LongMap[V1] = {
+ val zv = if ((extraKeys & 1) == 1) f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null
+ val mv = if ((extraKeys & 2) == 2) f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef] else null
+ val lm = new LongMap[V1](LongMap.exceptionDefault, 1, false)
+ val kz = java.util.Arrays.copyOf(_keys, _keys.length)
+ val vz = new Array[AnyRef](_values.length)
+ var i,j = 0
+ while (i < _keys.length & j < _size) {
+ val k = _keys(i)
+ if (k != -k) {
+ j += 1
+ vz(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef]
+ }
+ i += 1
+ }
+ lm.initializeTo(mask, extraKeys, zv, mv, _size, _vacant, kz, vz)
+ lm
+ }
+
+ /** Applies a transformation function to all values stored in this map.
+ * Note: the default, if any, is not transformed.
+ */
+ @deprecated("Use transformValuesInPlace instead of transformValues", "2.13.0")
+ @`inline` final def transformValues(f: V => V): this.type = transformValuesInPlace(f)
+
+ /** Applies a transformation function to all values stored in this map.
+ * Note: the default, if any, is not transformed.
+ */
+ def transformValuesInPlace(f: V => V): this.type = {
+ if ((extraKeys & 1) == 1) zeroValue = f(zeroValue.asInstanceOf[V]).asInstanceOf[AnyRef]
+ if ((extraKeys & 2) == 2) minValue = f(minValue.asInstanceOf[V]).asInstanceOf[AnyRef]
+ var i,j = 0
+ while (i < _keys.length & j < _size) {
+ val k = _keys(i)
+ if (k != -k) {
+ j += 1
+ _values(i) = f(_values(i).asInstanceOf[V]).asInstanceOf[AnyRef]
+ }
+ i += 1
+ }
+ this
+ }
+
+ def map[sealed V2](f: ((Long, V)) => (Long, V2)): LongMap[V2] = LongMap.from(new View.Map(coll, f))
+
+ def flatMap[sealed V2](f: ((Long, V)) => IterableOnce[(Long, V2)]): LongMap[V2] = LongMap.from(new View.FlatMap(coll, f))
+
+ def collect[sealed V2](pf: PartialFunction[(Long, V), (Long, V2)]): LongMap[V2] =
+ strictOptimizedCollect(LongMap.newBuilder[V2], pf)
+
+ protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(LongMap.toFactory[V](LongMap), this)
+
+ override protected[this] def className = "LongMap"
+}
+
+object LongMap {
+ private final val IndexMask = 0x3FFFFFFF
+ private final val MissingBit = 0x80000000
+ private final val VacantBit = 0x40000000
+ private final val MissVacant = 0xC0000000
+
+ private val exceptionDefault: Long -> Nothing = (k: Long) => throw new NoSuchElementException(k.toString)
+
+ /** A builder for instances of `LongMap`.
+ *
+ * This builder can be reused to create multiple instances.
+ */
+ final class LongMapBuilder[sealed V] extends ReusableBuilder[(Long, V), LongMap[V]] {
+ private[collection] var elems: LongMap[V] = new LongMap[V]
+ override def addOne(entry: (Long, V)): this.type = {
+ elems += entry
+ this
+ }
+ def clear(): Unit = elems = new LongMap[V]
+ def result(): LongMap[V] = elems
+ override def knownSize: Int = elems.knownSize
+ }
+
+ /** Creates a new `LongMap` with zero or more key/value pairs. */
+ def apply[sealed V](elems: (Long, V)*): LongMap[V] = buildFromIterableOnce(elems)
+
+ private def buildFromIterableOnce[sealed V](elems: IterableOnce[(Long, V)]^): LongMap[V] = {
+ var sz = elems.knownSize
+ if(sz < 0) sz = 4
+ val lm = new LongMap[V](sz * 2)
+ elems.iterator.foreach{ case (k,v) => lm(k) = v }
+ if (lm.size < (sz>>3)) lm.repack()
+ lm
+ }
+
+ /** Creates a new empty `LongMap`. */
+ def empty[sealed V]: LongMap[V] = new LongMap[V]
+
+ /** Creates a new empty `LongMap` with the supplied default */
+ def withDefault[sealed V](default: Long -> V): LongMap[V] = new LongMap[V](default)
+
+ /** Creates a new `LongMap` from an existing source collection. A source collection
+ * which is already a `LongMap` gets cloned.
+ *
+ * @param source Source collection
+ * @tparam A the type of the collection’s elements
+ * @return a new `LongMap` with the elements of `source`
+ */
+ def from[sealed V](source: IterableOnce[(Long, V)]^): LongMap[V] = source match {
+ case source: LongMap[_] => source.clone().asInstanceOf[LongMap[V]]
+ case _ => buildFromIterableOnce(source)
+ }
+
+ def newBuilder[sealed V]: ReusableBuilder[(Long, V), LongMap[V]] = new LongMapBuilder[V]
+
+ /** Creates a new `LongMap` from arrays of keys and values.
+ * Equivalent to but more efficient than `LongMap((keys zip values): _*)`.
+ */
+ def fromZip[sealed V](keys: Array[Long], values: Array[V]): LongMap[V] = {
+ val sz = math.min(keys.length, values.length)
+ val lm = new LongMap[V](sz * 2)
+ var i = 0
+ while (i < sz) { lm(keys(i)) = values(i); i += 1 }
+ if (lm.size < (sz>>3)) lm.repack()
+ lm
+ }
+
+ /** Creates a new `LongMap` from keys and values.
+ * Equivalent to but more efficient than `LongMap((keys zip values): _*)`.
+ */
+ def fromZip[sealed V](keys: scala.collection.Iterable[Long], values: scala.collection.Iterable[V]): LongMap[V] = {
+ val sz = math.min(keys.size, values.size)
+ val lm = new LongMap[V](sz * 2)
+ val ki = keys.iterator
+ val vi = values.iterator
+ while (ki.hasNext && vi.hasNext) lm(ki.next()) = vi.next()
+ if (lm.size < (sz >> 3)) lm.repack()
+ lm
+ }
+
+ implicit def toFactory[sealed V](dummy: LongMap.type): Factory[(Long, V), LongMap[V]] = ToFactory.asInstanceOf[Factory[(Long, V), LongMap[V]]]
+
+ @SerialVersionUID(3L)
+ private[this] object ToFactory extends Factory[(Long, AnyRef), LongMap[AnyRef]] with Serializable {
+ def fromSpecific(it: IterableOnce[(Long, AnyRef)]^): LongMap[AnyRef] = LongMap.from[AnyRef](it)
+ def newBuilder: Builder[(Long, AnyRef), LongMap[AnyRef]] = LongMap.newBuilder[AnyRef]
+ }
+
+ implicit def toBuildFrom[V](factory: LongMap.type): BuildFrom[Any, (Long, V), LongMap[V]] = ToBuildFrom.asInstanceOf[BuildFrom[Any, (Long, V), LongMap[V]]]
+ private object ToBuildFrom extends BuildFrom[Any, (Long, AnyRef), LongMap[AnyRef]] {
+ def fromSpecific(from: Any)(it: IterableOnce[(Long, AnyRef)]^) = LongMap.from(it)
+ def newBuilder(from: Any) = LongMap.newBuilder[AnyRef]
+ }
+
+ implicit def iterableFactory[sealed V]: Factory[(Long, V), LongMap[V]] = toFactory(this)
+ implicit def buildFromLongMap[V]: BuildFrom[LongMap[_], (Long, V), LongMap[V]] = toBuildFrom(this)
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/Map.scala b/tests/pos-special/stdlib/collection/mutable/Map.scala
new file mode 100644
index 000000000000..dab64ddb1f58
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/Map.scala
@@ -0,0 +1,271 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package mutable
+
+import language.experimental.captureChecking
+
+/** Base type of mutable Maps */
+trait Map[K, V]
+ extends Iterable[(K, V)]
+ with collection.Map[K, V]
+ with MapOps[K, V, Map, Map[K, V]]
+ with Growable[(K, V)]
+ with Shrinkable[K]
+ with MapFactoryDefaults[K, V, Map, Iterable] {
+
+ override def mapFactory: scala.collection.MapFactory[Map] = Map
+
+ /*
+ //TODO consider keeping `remove` because it returns the removed entry
+ @deprecated("Use subtract or -= instead of remove", "2.13.0")
+ def remove(key: K): Option[V] = {
+ val old = get(key)
+ if(old.isDefined) subtract(key)
+ old
+ }
+ */
+
+ /** The same map with a given default function.
+ * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc.
+ * are not affected by `withDefaultValue`.
+ *
+ * Invoking transformer methods (e.g. `map`) will not preserve the default value.
+ *
+ * @param d the function mapping keys to values, used for non-present keys
+ * @return a wrapper of the map with a default value
+ */
+ def withDefault(d: K -> V): Map[K, V] = new Map.WithDefault[K, V](this, d)
+
+ /** The same map with a given default value.
+ * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc.
+ * are not affected by `withDefaultValue`.
+ *
+ * Invoking transformer methods (e.g. `map`) will not preserve the default value.
+ *
+ * @param d default value used for non-present keys
+ * @return a wrapper of the map with a default value
+ */
+ def withDefaultValue(d: V): Map[K, V] = new Map.WithDefault[K, V](this, x => d)
+}
+
+/**
+ * @define coll mutable map
+ * @define Coll `mutable.Map`
+ */
+trait MapOps[K, V, +CC[X, Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]]
+ extends IterableOps[(K, V), Iterable, C]
+ with collection.MapOps[K, V, CC, C]
+ with Cloneable[C]
+ with Builder[(K, V), C]
+ with Growable[(K, V)]
+ with Shrinkable[K]
+ with Pure {
+
+ def result(): C = coll
+
+ @deprecated("Use - or remove on an immutable Map", "2.13.0")
+ final def - (key: K): C = clone() -= key
+
+ @deprecated("Use -- or removeAll on an immutable Map", "2.13.0")
+ final def - (key1: K, key2: K, keys: K*): C = clone() -= key1 -= key2 --= keys
+
+ /** Adds a new key/value pair to this map and optionally returns previously bound value.
+ * If the map already contains a
+ * mapping for the key, it will be overridden by the new value.
+ *
+ * @param key the key to update
+ * @param value the new value
+ * @return an option value containing the value associated with the key
+ * before the `put` operation was executed, or `None` if `key`
+ * was not defined in the map before.
+ */
+ def put(key: K, value: V): Option[V] = {
+ val r = get(key)
+ update(key, value)
+ r
+ }
+
+ /** Adds a new key/value pair to this map.
+ * If the map already contains a
+ * mapping for the key, it will be overridden by the new value.
+ *
+ * @param key The key to update
+ * @param value The new value
+ */
+ def update(key: K, value: V): Unit = { coll += ((key, value)) }
+
+ /**
+ * Update a mapping for the specified key and its current optionally-mapped value
+ * (`Some` if there is current mapping, `None` if not).
+ *
+ * If the remapping function returns `Some(v)`, the mapping is updated with the new value `v`.
+ * If the remapping function returns `None`, the mapping is removed (or remains absent if initially absent).
+ * If the function itself throws an exception, the exception is rethrown, and the current mapping is left unchanged.
+ *
+ * @param key the key value
+ * @param remappingFunction a partial function that receives current optionally-mapped value and return a new mapping
+ * @return the new value associated with the specified key
+ */
+ def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = {
+ val previousValue = this.get(key)
+ val nextValue = remappingFunction(previousValue)
+ (previousValue, nextValue) match {
+ case (None, None) => // do nothing
+ case (Some(_), None) => this.remove(key)
+ case (_, Some(v)) => this.update(key,v)
+ }
+ nextValue
+ }
+
+ /** If given key is already in this map, returns associated value.
+ *
+ * Otherwise, computes value from given expression `op`, stores with key
+ * in map and returns that value.
+ *
+ * Concurrent map implementations may evaluate the expression `op`
+ * multiple times, or may evaluate `op` without inserting the result.
+ *
+ * @param key the key to test
+ * @param op the computation yielding the value to associate with `key`, if
+ * `key` is previously unbound.
+ * @return the value associated with key (either previously or as a result
+ * of executing the method).
+ */
+ def getOrElseUpdate(key: K, op: => V): V =
+ get(key) match {
+ case Some(v) => v
+ case None => val d = op; this(key) = d; d
+ }
+
+ /** Removes a key from this map, returning the value associated previously
+ * with that key as an option.
+ * @param key the key to be removed
+ * @return an option value containing the value associated previously with `key`,
+ * or `None` if `key` was not defined in the map before.
+ */
+ def remove(key: K): Option[V] = {
+ val r = get(key)
+ if (r.isDefined) this -= key
+ r
+ }
+
+ def clear(): Unit = { keysIterator foreach -= }
+
+ override def clone(): C = empty ++= this
+
+ @deprecated("Use filterInPlace instead", "2.13.0")
+ @inline final def retain(p: (K, V) => Boolean): this.type = filterInPlace(p)
+
+ /** Retains only those mappings for which the predicate
+ * `p` returns `true`.
+ *
+ * @param p The test predicate
+ */
+ def filterInPlace(p: (K, V) => Boolean): this.type = {
+ if (!isEmpty) this match {
+ case tm: concurrent.Map[_, _] => tm.asInstanceOf[concurrent.Map[K, V]].filterInPlaceImpl(p)
+ case _ =>
+ val array = this.toArray[Any] // scala/bug#7269 toArray avoids ConcurrentModificationException
+ val arrayLength = array.length
+ var i = 0
+ while (i < arrayLength) {
+ val (k, v) = array(i).asInstanceOf[(K, V)]
+ if (!p(k, v)) {
+ this -= k
+ }
+ i += 1
+ }
+ }
+ this
+ }
+
+ @deprecated("Use mapValuesInPlace instead", "2.13.0")
+ @inline final def transform(f: (K, V) => V): this.type = mapValuesInPlace(f)
+
+ /** Applies a transformation function to all values contained in this map.
+ * The transformation function produces new values from existing keys
+ * associated values.
+ *
+ * @param f the transformation to apply
+ * @return the map itself.
+ */
+ def mapValuesInPlace(f: (K, V) => V): this.type = {
+ if (!isEmpty) this match {
+ case hm: mutable.HashMap[_, _] => hm.asInstanceOf[mutable.HashMap[K, V]].mapValuesInPlaceImpl(f)
+ case tm: concurrent.Map[_, _] => tm.asInstanceOf[concurrent.Map[K, V]].mapValuesInPlaceImpl(f)
+ case _ =>
+ val array = this.toArray[Any]
+ val arrayLength = array.length
+ var i = 0
+ while (i < arrayLength) {
+ val (k, v) = array(i).asInstanceOf[(K, V)]
+ update(k, f(k, v))
+ i += 1
+ }
+ }
+ this
+ }
+
+ @deprecated("Use m.clone().addOne((k,v)) instead of m.updated(k, v)", "2.13.0")
+ def updated[V1 >: V](key: K, value: V1): CC[K, V1] =
+ clone().asInstanceOf[CC[K, V1]].addOne((key, value))
+
+ override def knownSize: Int = super[IterableOps].knownSize
+}
+
+/**
+ * $factoryInfo
+ * @define coll mutable map
+ * @define Coll `mutable.Map`
+ */
+@SerialVersionUID(3L)
+object Map extends MapFactory.Delegate[Map](HashMap) {
+
+ @SerialVersionUID(3L)
+ class WithDefault[K, V](val underlying: Map[K, V], val defaultValue: K -> V)
+ extends AbstractMap[K, V]
+ with MapOps[K, V, Map, WithDefault[K, V]] with Serializable {
+
+ override def default(key: K): V = defaultValue(key)
+
+ def iterator: scala.collection.Iterator[(K, V)] = underlying.iterator
+ override def isEmpty: Boolean = underlying.isEmpty
+ override def knownSize: Int = underlying.knownSize
+ override def mapFactory: MapFactory[Map] = underlying.mapFactory
+
+ override def clear(): Unit = underlying.clear()
+
+ def get(key: K): Option[V] = underlying.get(key)
+
+ def subtractOne(elem: K): WithDefault.this.type = { underlying.subtractOne(elem); this }
+
+ def addOne(elem: (K, V)): WithDefault.this.type = { underlying.addOne(elem); this }
+
+ override def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]^): Map[K, V2] =
+ underlying.concat(suffix).withDefault(defaultValue)
+
+ override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue)
+
+ override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]^): WithDefault[K, V] =
+ new WithDefault[K, V](mapFactory.from(coll), defaultValue)
+
+ override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] =
+ Map.newBuilder.mapResult((p: Map[K, V]) => new WithDefault[K, V](p, defaultValue))
+ }
+
+}
+
+/** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */
+abstract class AbstractMap[K, V] extends scala.collection.AbstractMap[K, V] with Map[K, V]
diff --git a/tests/pos-special/stdlib/collection/mutable/MultiMap.scala b/tests/pos-special/stdlib/collection/mutable/MultiMap.scala
new file mode 100644
index 000000000000..281631c92298
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/MultiMap.scala
@@ -0,0 +1,116 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection.mutable
+
+import language.experimental.captureChecking
+
+/** A trait for mutable maps with multiple values assigned to a key.
+ *
+ * This class is typically used as a mixin. It turns maps which map `K`
+ * to `Set[V]` objects into multimaps that map `K` to `V` objects.
+ *
+ * @example {{{
+ * // first import all necessary types from package `collection.mutable`
+ * import collection.mutable.{ HashMap, MultiMap, Set }
+ *
+ * // to create a `MultiMap` the easiest way is to mixin it into a normal
+ * // `Map` instance
+ * val mm = new HashMap[Int, Set[String]] with MultiMap[Int, String]
+ *
+ * // to add key-value pairs to a multimap it is important to use
+ * // the method `addBinding` because standard methods like `+` will
+ * // overwrite the complete key-value pair instead of adding the
+ * // value to the existing key
+ * mm.addBinding(1, "a")
+ * mm.addBinding(2, "b")
+ * mm.addBinding(1, "c")
+ *
+ * // mm now contains `Map(2 -> Set(b), 1 -> Set(c, a))`
+ *
+ * // to check if the multimap contains a value there is method
+ * // `entryExists`, which allows to traverse the including set
+ * mm.entryExists(1, _ == "a") == true
+ * mm.entryExists(1, _ == "b") == false
+ * mm.entryExists(2, _ == "b") == true
+ *
+ * // to remove a previous added value there is the method `removeBinding`
+ * mm.removeBinding(1, "a")
+ * mm.entryExists(1, _ == "a") == false
+ * }}}
+ *
+ * @define coll multimap
+ * @define Coll `MultiMap`
+ */
+@deprecated("Use a scala.collection.mutable.MultiDict in the scala-collection-contrib module", "2.13.0")
+trait MultiMap[K, sealed V] extends Map[K, Set[V]] {
+ /** Creates a new set.
+ *
+ * Classes that use this trait as a mixin can override this method
+ * to have the desired implementation of sets assigned to new keys.
+ * By default this is `HashSet`.
+ *
+ * @return An empty set of values of type `V`.
+ */
+ protected def makeSet: Set[V] = new HashSet[V]
+
+ /** Assigns the specified `value` to a specified `key`. If the key
+ * already has a binding to equal to `value`, nothing is changed;
+ * otherwise a new binding is added for that `key`.
+ *
+ * @param key The key to which to bind the new value.
+ * @param value The value to bind to the key.
+ * @return A reference to this multimap.
+ */
+ def addBinding(key: K, value: V): this.type = {
+ get(key) match {
+ case None =>
+ val set = makeSet
+ set += value
+ this(key) = set
+ case Some(set) =>
+ set += value
+ }
+ this
+ }
+
+ /** Removes the binding of `value` to `key` if it exists, otherwise this
+ * operation doesn't have any effect.
+ *
+ * If this was the last value assigned to the specified key, the
+ * set assigned to that key will be removed as well.
+ *
+ * @param key The key of the binding.
+ * @param value The value to remove.
+ * @return A reference to this multimap.
+ */
+ def removeBinding(key: K, value: V): this.type = {
+ get(key) match {
+ case None =>
+ case Some(set) =>
+ set -= value
+ if (set.isEmpty) this -= key
+ }
+ this
+ }
+
+ /** Checks if there exists a binding to `key` such that it satisfies the predicate `p`.
+ *
+ * @param key The key for which the predicate is checked.
+ * @param p The predicate which a value assigned to the key must satisfy.
+ * @return A boolean if such a binding exists
+ */
+ def entryExists(key: K, p: V => Boolean): Boolean = get(key) match {
+ case None => false
+ case Some(set) => set exists p
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala b/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala
new file mode 100644
index 000000000000..f1deb25b6a8a
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/OpenHashMap.scala
@@ -0,0 +1,307 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+package mutable
+
+import java.lang.Integer.numberOfLeadingZeros
+import java.util.ConcurrentModificationException
+import scala.collection.generic.DefaultSerializable
+import language.experimental.captureChecking
+
+/**
+ * @define Coll `OpenHashMap`
+ * @define coll open hash map
+ */
+@deprecated("Use HashMap or one of the specialized versions (LongMap, AnyRefMap) instead of OpenHashMap", "2.13.0")
+@SerialVersionUID(3L)
+object OpenHashMap extends MapFactory[OpenHashMap] {
+
+ def empty[sealed K, sealed V] = new OpenHashMap[K, V]
+ def from[sealed K, sealed V](it: IterableOnce[(K, V)]^): OpenHashMap[K,V] = empty ++= it
+
+ def newBuilder[sealed K, sealed V]: Builder[(K, V), OpenHashMap[K,V]] =
+ new GrowableBuilder[(K, V), OpenHashMap[K, V]](empty)
+
+ /** A hash table entry.
+ *
+ * The entry is occupied if and only if its `value` is a `Some`;
+ * deleted if and only if its `value` is `None`.
+ * If its `key` is not the default value of type `Key`, the entry is occupied.
+ * If the entry is occupied, `hash` contains the hash value of `key`.
+ */
+ final private class OpenEntry[sealed Key, sealed Value](var key: Key,
+ var hash: Int,
+ var value: Option[Value])
+
+ private[mutable] def nextPositivePowerOfTwo(target: Int): Int = 1 << -numberOfLeadingZeros(target - 1)
+}
+
+/** A mutable hash map based on an open addressing method. The precise scheme is
+ * undefined, but it should make a reasonable effort to ensure that an insert
+ * with consecutive hash codes is not unnecessarily penalised. In particular,
+ * mappings of consecutive integer keys should work without significant
+ * performance loss.
+ *
+ * @tparam Key type of the keys in this map.
+ * @tparam Value type of the values in this map.
+ * @param initialSize the initial size of the internal hash table.
+ *
+ * @define Coll `OpenHashMap`
+ * @define coll open hash map
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ */
+@deprecated("Use HashMap or one of the specialized versions (LongMap, AnyRefMap) instead of OpenHashMap", "2.13.0")
+class OpenHashMap[sealed Key, sealed Value](initialSize : Int)
+ extends AbstractMap[Key, Value]
+ with MapOps[Key, Value, OpenHashMap, OpenHashMap[Key, Value]]
+ with StrictOptimizedIterableOps[(Key, Value), Iterable, OpenHashMap[Key, Value]]
+ with MapFactoryDefaults[Key, Value, OpenHashMap, Iterable]
+ with DefaultSerializable {
+
+ import OpenHashMap.OpenEntry
+ private type Entry = OpenEntry[Key, Value]
+
+ /** A default constructor creates a hashmap with initial size `8`.
+ */
+ def this() = this(8)
+
+ override def mapFactory: MapFactory[OpenHashMap] = OpenHashMap
+
+ private[this] val actualInitialSize = OpenHashMap.nextPositivePowerOfTwo(initialSize)
+
+ private[this] var mask = actualInitialSize - 1
+
+ /** The hash table.
+ *
+ * The table's entries are initialized to `null`, indication of an empty slot.
+ * A slot is either deleted or occupied if and only if the entry is non-`null`.
+ */
+ private[this] var table = new Array[Entry](actualInitialSize)
+
+ private[this] var _size = 0
+ private[this] var deleted = 0
+
+ // Used for tracking inserts so that iterators can determine if concurrent modification has occurred.
+ private[this] var modCount = 0
+
+ override def size = _size
+ override def knownSize: Int = size
+ private[this] def size_=(s : Int): Unit = _size = s
+ override def isEmpty: Boolean = _size == 0
+ /** Returns a mangled hash code of the provided key. */
+ protected def hashOf(key: Key) = {
+ var h = key.##
+ h ^= ((h >>> 20) ^ (h >>> 12))
+ h ^ (h >>> 7) ^ (h >>> 4)
+ }
+
+ /** Increase the size of the table.
+ * Copy only the occupied slots, effectively eliminating the deleted slots.
+ */
+ private[this] def growTable() = {
+ val oldSize = mask + 1
+ val newSize = 4 * oldSize
+ val oldTable = table
+ table = new Array[Entry](newSize)
+ mask = newSize - 1
+ oldTable.foreach( entry =>
+ if (entry != null && entry.value != None)
+ table(findIndex(entry.key, entry.hash)) = entry )
+ deleted = 0
+ }
+
+ /** Return the index of the first slot in the hash table (in probe order)
+ * that is, in order of preference, either occupied by the given key, deleted, or empty.
+ *
+ * @param hash hash value for `key`
+ */
+ private[this] def findIndex(key: Key, hash: Int): Int = {
+ var index = hash & mask
+ var j = 0
+
+ // Index of the first slot containing a deleted entry, or -1 if none found yet
+ var firstDeletedIndex = -1
+
+ var entry = table(index)
+ while (entry != null) {
+ if (entry.hash == hash && entry.key == key && entry.value != None)
+ return index
+
+ if (firstDeletedIndex == -1 && entry.value == None)
+ firstDeletedIndex = index
+
+ j += 1
+ index = (index + j) & mask
+ entry = table(index)
+ }
+
+ if (firstDeletedIndex == -1) index else firstDeletedIndex
+ }
+
+ // TODO refactor `put` to extract `findOrAddEntry` and implement this in terms of that to avoid Some boxing.
+ override def update(key: Key, value: Value): Unit = put(key, value)
+
+ @deprecatedOverriding("addOne should not be overridden in order to maintain consistency with put.", "2.11.0")
+ def addOne (kv: (Key, Value)): this.type = { put(kv._1, kv._2); this }
+
+ @deprecatedOverriding("subtractOne should not be overridden in order to maintain consistency with remove.", "2.11.0")
+ def subtractOne (key: Key): this.type = { remove(key); this }
+
+ override def put(key: Key, value: Value): Option[Value] =
+ put(key, hashOf(key), value)
+
+ private def put(key: Key, hash: Int, value: Value): Option[Value] = {
+ if (2 * (size + deleted) > mask) growTable()
+ val index = findIndex(key, hash)
+ val entry = table(index)
+ if (entry == null) {
+ table(index) = new OpenEntry(key, hash, Some(value))
+ modCount += 1
+ size += 1
+ None
+ } else {
+ val res = entry.value
+ if (entry.value == None) {
+ entry.key = key
+ entry.hash = hash
+ size += 1
+ deleted -= 1
+ modCount += 1
+ }
+ entry.value = Some(value)
+ res
+ }
+ }
+
+ /** Delete the hash table slot contained in the given entry. */
+ @`inline`
+ private[this] def deleteSlot(entry: Entry) = {
+ entry.key = null.asInstanceOf[Key]
+ entry.hash = 0
+ entry.value = None
+
+ size -= 1
+ deleted += 1
+ }
+
+ override def remove(key : Key): Option[Value] = {
+ val entry = table(findIndex(key, hashOf(key)))
+ if (entry != null && entry.value != None) {
+ val res = entry.value
+ deleteSlot(entry)
+ res
+ } else None
+ }
+
+ def get(key : Key) : Option[Value] = {
+ val hash = hashOf(key)
+ var index = hash & mask
+ var entry = table(index)
+ var j = 0
+ while(entry != null){
+ if (entry.hash == hash &&
+ entry.key == key){
+ return entry.value
+ }
+
+ j += 1
+ index = (index + j) & mask
+ entry = table(index)
+ }
+ None
+ }
+
+ /** An iterator over the elements of this map. Use of this iterator follows
+ * the same contract for concurrent modification as the foreach method.
+ *
+ * @return the iterator
+ */
+ def iterator: Iterator[(Key, Value)] = new OpenHashMapIterator[(Key, Value)] {
+ override protected def nextResult(node: Entry): (Key, Value) = (node.key, node.value.get)
+ }
+
+ override def keysIterator: Iterator[Key] = new OpenHashMapIterator[Key] {
+ override protected def nextResult(node: Entry): Key = node.key
+ }
+ override def valuesIterator: Iterator[Value] = new OpenHashMapIterator[Value] {
+ override protected def nextResult(node: Entry): Value = node.value.get
+ }
+
+ private abstract class OpenHashMapIterator[A] extends AbstractIterator[A] {
+ private[this] var index = 0
+ private[this] val initialModCount = modCount
+
+ private[this] def advance(): Unit = {
+ if (initialModCount != modCount) throw new ConcurrentModificationException
+ while((index <= mask) && (table(index) == null || table(index).value == None)) index+=1
+ }
+
+ def hasNext = {advance(); index <= mask }
+
+ def next() = {
+ advance()
+ val result = table(index)
+ index += 1
+ nextResult(result)
+ }
+ protected def nextResult(node: Entry): A
+ }
+
+ override def clone() = {
+ val it = new OpenHashMap[Key, Value]
+ foreachUndeletedEntry(entry => it.put(entry.key, entry.hash, entry.value.get))
+ it
+ }
+
+ /** Loop over the key, value mappings of this map.
+ *
+ * The behaviour of modifying the map during an iteration is as follows:
+ * - Deleting a mapping is always permitted.
+ * - Changing the value of mapping which is already present is permitted.
+ * - Anything else is not permitted. It will usually, but not always, throw an exception.
+ *
+ * @tparam U The return type of the specified function `f`, return result of which is ignored.
+ * @param f The function to apply to each key, value mapping.
+ */
+ override def foreach[U](f : ((Key, Value)) => U): Unit = {
+ val startModCount = modCount
+ foreachUndeletedEntry(entry => {
+ if (modCount != startModCount) throw new ConcurrentModificationException
+ f((entry.key, entry.value.get))}
+ )
+ }
+ override def foreachEntry[U](f : (Key, Value) => U): Unit = {
+ val startModCount = modCount
+ foreachUndeletedEntry(entry => {
+ if (modCount != startModCount) throw new ConcurrentModificationException
+ f(entry.key, entry.value.get)}
+ )
+ }
+
+ private[this] def foreachUndeletedEntry(f : Entry => Unit): Unit = {
+ table.foreach(entry => if (entry != null && entry.value != None) f(entry))
+ }
+
+ override def mapValuesInPlace(f : (Key, Value) => Value): this.type = {
+ foreachUndeletedEntry(entry => entry.value = Some(f(entry.key, entry.value.get)))
+ this
+ }
+
+ override def filterInPlace(f : (Key, Value) => Boolean): this.type = {
+ foreachUndeletedEntry(entry => if (!f(entry.key, entry.value.get)) deleteSlot(entry))
+ this
+ }
+
+ override protected[this] def stringPrefix = "OpenHashMap"
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala b/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala
new file mode 100644
index 000000000000..a395fac4a44a
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/PriorityQueue.scala
@@ -0,0 +1,403 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+package mutable
+
+import scala.collection.generic.DefaultSerializationProxy
+import scala.math.Ordering
+import language.experimental.captureChecking
+
+/** A heap-based priority queue.
+ *
+ * To prioritize elements of type `A` there must be an implicit
+ * `Ordering[A]` available at creation. Elements are retrieved
+ * in priority order by using [[dequeue]] or [[dequeueAll]].
+ *
+ * If multiple elements have the same priority as determined by the ordering for this
+ * `PriorityQueue`, no guarantees are made regarding the order in which those elements
+ * are returned by `dequeue` or `dequeueAll`. In particular, that means this
+ * class does not guarantee first-in-first-out behavior, as may be
+ * incorrectly inferred from the fact that this data structure is
+ * called a "queue".
+ *
+ * Only the `dequeue` and `dequeueAll` methods will return elements in priority
+ * order (while removing elements from the heap). Standard collection methods
+ * such as `drop`, `iterator`, `toList` and `toString` use an arbitrary
+ * iteration order: they will traverse the heap or remove elements
+ * in whichever order seems most convenient.
+ *
+ * Therefore, printing a `PriorityQueue` will not show elements in priority order,
+ * though the highest-priority element will be printed first.
+ * To print the elements in order, it's necessary to `dequeue` them.
+ * To do this non-destructively, duplicate the `PriorityQueue` first;
+ * the `clone` method is a suitable way to obtain a disposable copy.
+ *
+ * Client keys are assumed to be immutable. Mutating keys may violate
+ * the invariant of the underlying heap-ordered tree. Note that [[clone]]
+ * does not rebuild the underlying tree.
+ *
+ * {{{
+ * scala> val pq = collection.mutable.PriorityQueue(1, 2, 5, 3, 7)
+ * val pq: scala.collection.mutable.PriorityQueue[Int] = PriorityQueue(7, 3, 5, 1, 2)
+ *
+ * scala> pq.toList // also not in order
+ * val res0: List[Int] = List(7, 3, 5, 1, 2)
+ *
+ * scala> pq.clone.dequeueAll
+ * val res1: Seq[Int] = ArraySeq(7, 5, 3, 2, 1)
+ * }}}
+ *
+ * @tparam A type of the elements in this priority queue.
+ * @param ord implicit ordering used to compare the elements of type `A`.
+ *
+ * @define Coll PriorityQueue
+ * @define coll priority queue
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ */
+sealed class PriorityQueue[sealed A](implicit val ord: Ordering[A])
+ extends AbstractIterable[A]
+ with Iterable[A]
+ with IterableOps[A, Iterable, PriorityQueue[A]]
+ with StrictOptimizedIterableOps[A, Iterable, PriorityQueue[A]]
+ with Builder[A, PriorityQueue[A]]
+ with Cloneable[PriorityQueue[A]]
+ with Growable[A]
+ with Serializable
+{
+
+ private class ResizableArrayAccess[sealed A0] extends ArrayBuffer[A0] {
+ override def mapInPlace(f: A0 => A0): this.type = {
+ var i = 1 // see "we do not use array(0)" comment below (???)
+ val siz = this.size
+ while (i < siz) { this(i) = f(this(i)); i += 1 }
+ this
+ }
+
+ def p_size0 = size0
+ def p_size0_=(s: Int) = size0 = s
+ def p_array = array
+ def p_ensureSize(n: Int) = super.ensureSize(n)
+ def p_ensureAdditionalSize(n: Int) = super.ensureAdditionalSize(n)
+ def p_swap(a: Int, b: Int): Unit = {
+ val h = array(a)
+ array(a) = array(b)
+ array(b) = h
+ }
+ }
+
+ private val resarr = new ResizableArrayAccess[A]
+
+ resarr.p_size0 += 1 // we do not use array(0) TODO: explain -- what is the first element even for?
+ def length: Int = resarr.length - 1 // adjust length accordingly
+ override def size: Int = length
+ override def knownSize: Int = length
+ override def isEmpty: Boolean = resarr.p_size0 < 2
+
+ // not eligible for EvidenceIterableFactoryDefaults since C != CC[A] (PriorityQueue[A] != Iterable[A])
+ override protected def fromSpecific(coll: scala.collection.IterableOnce[A]^): PriorityQueue[A] = PriorityQueue.from(coll)
+ override protected def newSpecificBuilder: Builder[A, PriorityQueue[A]] = PriorityQueue.newBuilder
+ override def empty: PriorityQueue[A] = PriorityQueue.empty
+
+ def mapInPlace(f: A => A): this.type = {
+ resarr.mapInPlace(f)
+ heapify(1)
+ this
+ }
+
+ def result() = this
+
+ private def toA(x: AnyRef): A = x.asInstanceOf[A]
+ protected def fixUp(as: Array[AnyRef], m: Int): Unit = {
+ var k: Int = m
+ // use `ord` directly to avoid allocating `OrderingOps`
+ while (k > 1 && ord.lt(toA(as(k / 2)), toA(as(k)))) {
+ resarr.p_swap(k, k / 2)
+ k = k / 2
+ }
+ }
+
+ protected def fixDown(as: Array[AnyRef], m: Int, n: Int): Boolean = {
+ // returns true if any swaps were done (used in heapify)
+ var k: Int = m
+ while (n >= 2 * k) {
+ var j = 2 * k
+ // use `ord` directly to avoid allocating `OrderingOps`
+ if (j < n && ord.lt(toA(as(j)), toA(as(j + 1))))
+ j += 1
+ if (ord.gteq(toA(as(k)), toA(as(j))))
+ return k != m
+ else {
+ val h = as(k)
+ as(k) = as(j)
+ as(j) = h
+ k = j
+ }
+ }
+ k != m
+ }
+
+ /** Inserts a single element into the priority queue.
+ *
+ * @param elem the element to insert.
+ * @return this $coll.
+ */
+ def addOne(elem: A): this.type = {
+ resarr.p_ensureAdditionalSize(1)
+ resarr.p_array(resarr.p_size0) = elem.asInstanceOf[AnyRef]
+ fixUp(resarr.p_array, resarr.p_size0)
+ resarr.p_size0 += 1
+ this
+ }
+
+ override def addAll(xs: IterableOnce[A]^): this.type = {
+ val from = resarr.p_size0
+ for (x <- xs.iterator) unsafeAdd(x)
+ heapify(from)
+ this
+ }
+
+ private def unsafeAdd(elem: A): Unit = {
+ // like += but skips fixUp, which breaks the ordering invariant
+ // a series of unsafeAdds MUST be followed by heapify
+ resarr.p_ensureAdditionalSize(1)
+ resarr.p_array(resarr.p_size0) = elem.asInstanceOf[AnyRef]
+ resarr.p_size0 += 1
+ }
+
+ private def heapify(from: Int): Unit = {
+ // elements at indices 1..from-1 were already in heap order before any adds
+ // elements at indices from..n are newly added, their order must be fixed
+ val n = length
+
+ if (from <= 2) {
+ // no pre-existing order to maintain, do the textbook heapify algorithm
+ for (i <- n/2 to 1 by -1) fixDown(resarr.p_array, i, n)
+ }
+ else if (n - from < 4) {
+ // for very small adds, doing the simplest fix is faster
+ for (i <- from to n) fixUp(resarr.p_array, i)
+ }
+ else {
+ var min = from/2 // tracks the minimum element in the queue
+ val queue = scala.collection.mutable.Queue[Int](min)
+
+ // do fixDown on the parents of all the new elements
+ // except the parent of the first new element, which is in the queue
+ // (that parent is treated specially because it might be the root)
+ for (i <- n/2 until min by -1) {
+ if (fixDown(resarr.p_array, i, n)) {
+ // there was a swap, so also need to fixDown i's parent
+ val parent = i/2
+ if (parent < min) { // make sure same parent isn't added twice
+ min = parent
+ queue += parent
+ }
+ }
+ }
+
+ while (queue.nonEmpty) {
+ val i = queue.dequeue()
+ if (fixDown(resarr.p_array, i, n)) {
+ val parent = i/2
+ if (parent < min && parent > 0) {
+ // the "parent > 0" is to avoid adding the parent of the root
+ min = parent
+ queue += parent
+ }
+ }
+ }
+ }
+ }
+
+ /** Adds all elements provided by a `IterableOnce` object
+ * into the priority queue.
+ *
+ * @param xs a iterable object.
+ * @return a new priority queue containing elements of both `xs` and `this`.
+ */
+ def ++(xs: IterableOnce[A]): PriorityQueue[A] = { this.clone() ++= xs }
+
+ /** Adds all elements to the queue.
+ *
+ * @param elems the elements to add.
+ */
+ def enqueue(elems: A*): Unit = { this ++= elems }
+
+ /** Returns the element with the highest priority in the queue,
+ * and removes this element from the queue.
+ *
+ * @throws NoSuchElementException
+ * @return the element with the highest priority.
+ */
+ def dequeue(): A =
+ if (resarr.p_size0 > 1) {
+ resarr.p_size0 = resarr.p_size0 - 1
+ val result = resarr.p_array(1)
+ resarr.p_array(1) = resarr.p_array(resarr.p_size0)
+ resarr.p_array(resarr.p_size0) = null // erase reference from array
+ fixDown(resarr.p_array, 1, resarr.p_size0 - 1)
+ toA(result)
+ } else
+ throw new NoSuchElementException("no element to remove from heap")
+
+ def dequeueAll[A1 >: A]: immutable.Seq[A1] = {
+ val b = ArrayBuilder.make[Any]
+ b.sizeHint(size)
+ while (nonEmpty) {
+ b += dequeue()
+ }
+ immutable.ArraySeq.unsafeWrapArray(b.result()).asInstanceOf[immutable.ArraySeq[A1]]
+ }
+
+ /** Returns the element with the highest priority in the queue,
+ * or throws an error if there is no element contained in the queue.
+ *
+ * @return the element with the highest priority.
+ */
+ override def head: A = if (resarr.p_size0 > 1) toA(resarr.p_array(1)) else throw new NoSuchElementException("queue is empty")
+
+ /** Removes all elements from the queue. After this operation is completed,
+ * the queue will be empty.
+ */
+ def clear(): Unit = {
+ resarr.clear()
+ resarr.p_size0 = 1
+ }
+
+ /** Returns an iterator which yields all the elements.
+ *
+ * Note: The order of elements returned is undefined.
+ * If you want to traverse the elements in priority queue
+ * order, use `clone().dequeueAll.iterator`.
+ *
+ * @return an iterator over all the elements.
+ */
+ override def iterator: Iterator[A] = resarr.iterator.drop(1)
+
+ /** Returns the reverse of this priority queue. The new priority queue has
+ * the same elements as the original, but the opposite ordering.
+ *
+ * For example, the element with the highest priority in `pq` has the lowest
+ * priority in `pq.reverse`, and vice versa.
+ *
+ * Ties are handled arbitrarily. Elements with equal priority may or
+ * may not be reversed with respect to each other.
+ *
+ * @return the reversed priority queue.
+ */
+ def reverse: PriorityQueue[A] = {
+ val revq = new PriorityQueue[A]()(ord.reverse)
+ // copy the existing data into the new array backwards
+ // this won't put it exactly into the correct order,
+ // but will require less fixing than copying it in
+ // the original order
+ val n = resarr.p_size0
+ revq.resarr.p_ensureSize(n)
+ revq.resarr.p_size0 = n
+ val from = resarr.p_array
+ val to = revq.resarr.p_array
+ for (i <- 1 until n) to(i) = from(n-i)
+ revq.heapify(1)
+ revq
+ }
+
+
+ /** Returns an iterator which yields all the elements in the reverse order
+ * than that returned by the method `iterator`.
+ *
+ * Note: The order of elements returned is undefined.
+ *
+ * @return an iterator over all elements sorted in descending order.
+ */
+ def reverseIterator: Iterator[A] = new AbstractIterator[A] {
+ private[this] var i = resarr.p_size0 - 1
+ def hasNext: Boolean = i >= 1
+ def next(): A = {
+ val n = resarr.p_array(i)
+ i -= 1
+ toA(n)
+ }
+ }
+
+ /** Returns a regular queue containing the same elements.
+ *
+ * Note: the order of elements is undefined.
+ */
+ def toQueue: Queue[A] = new Queue[A] ++= this.iterator
+
+ /** Returns a textual representation of a queue as a string.
+ *
+ * @return the string representation of this queue.
+ */
+ override def toString() = toList.mkString("PriorityQueue(", ", ", ")")
+
+ /** Converts this $coll to a list.
+ *
+ * Note: the order of elements is undefined.
+ *
+ * @return a list containing all elements of this $coll.
+ */
+ override def toList: immutable.List[A] = immutable.List.from(this.iterator)
+
+ /** This method clones the priority queue.
+ *
+ * @return a priority queue with the same elements.
+ */
+ override def clone(): PriorityQueue[A] = {
+ val pq = new PriorityQueue[A]
+ val n = resarr.p_size0
+ pq.resarr.p_ensureSize(n)
+ java.lang.System.arraycopy(resarr.p_array, 1, pq.resarr.p_array, 1, n-1)
+ pq.resarr.p_size0 = n
+ pq
+ }
+
+ override def copyToArray[sealed B >: A](xs: Array[B], start: Int, len: Int): Int = {
+ val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len)
+ if (copied > 0) {
+ Array.copy(resarr.p_array, 1, xs, start, copied)
+ }
+ copied
+ }
+
+ @deprecated("Use `PriorityQueue` instead", "2.13.0")
+ def orderedCompanion: PriorityQueue.type = PriorityQueue
+
+ protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(PriorityQueue.evidenceIterableFactory[A], this)
+
+ override protected[this] def className = "PriorityQueue"
+}
+
+
+@SerialVersionUID(3L)
+object PriorityQueue extends SortedIterableFactory[PriorityQueue] {
+ def newBuilder[sealed A : Ordering]: Builder[A, PriorityQueue[A]] = {
+ new Builder[A, PriorityQueue[A]] {
+ val pq = new PriorityQueue[A]
+ def addOne(elem: A): this.type = { pq.unsafeAdd(elem); this }
+ def result(): PriorityQueue[A] = { pq.heapify(1); pq }
+ def clear(): Unit = pq.clear()
+ }
+ }
+
+ def empty[sealed A : Ordering]: PriorityQueue[A] = new PriorityQueue[A]
+
+ def from[sealed E : Ordering](it: IterableOnce[E]^): PriorityQueue[E] = {
+ val b = newBuilder[E]
+ b ++= it
+ b.result()
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/Queue.scala b/tests/pos-special/stdlib/collection/mutable/Queue.scala
new file mode 100644
index 000000000000..a578b0742009
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/Queue.scala
@@ -0,0 +1,139 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+package mutable
+
+import scala.annotation.nowarn
+import scala.collection.generic.DefaultSerializable
+import language.experimental.captureChecking
+
+
+/** `Queue` objects implement data structures that allow to
+ * insert and retrieve elements in a first-in-first-out (FIFO) manner.
+ *
+ * @define Coll `mutable.Queue`
+ * @define coll mutable queue
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ */
+class Queue[sealed A] protected (array: Array[AnyRef], start: Int, end: Int)
+ extends ArrayDeque[A](array, start, end)
+ with IndexedSeqOps[A, Queue, Queue[A]]
+ with StrictOptimizedSeqOps[A, Queue, Queue[A]]
+ with IterableFactoryDefaults[A, Queue]
+ with ArrayDequeOps[A, Queue, Queue[A]]
+ with Cloneable[Queue[A]]
+ with DefaultSerializable {
+
+ def this(initialSize: Int = ArrayDeque.DefaultInitialSize) =
+ this(ArrayDeque.alloc(initialSize), start = 0, end = 0)
+
+ override def iterableFactory: SeqFactory[Queue] = Queue
+
+ @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""")
+ override protected[this] def stringPrefix = "Queue"
+
+ /**
+ * Add elements to the end of this queue
+ *
+ * @param elem
+ * @return this
+ */
+ def enqueue(elem: A): this.type = this += elem
+
+ /** Enqueue two or more elements at the end of the queue. The last element
+ * of the sequence will be on end of the queue.
+ *
+ * @param elems the element sequence.
+ * @return this
+ */
+ def enqueue(elem1: A, elem2: A, elems: A*): this.type = enqueue(elem1).enqueue(elem2).enqueueAll(elems)
+
+ /** Enqueues all elements in the given iterable object into the queue. The
+ * last element in the iterable object will be on front of the new queue.
+ *
+ * @param elems the iterable object.
+ * @return this
+ */
+ def enqueueAll(elems: scala.collection.IterableOnce[A]): this.type = this ++= elems
+
+ /**
+ * Removes the first element from this queue and returns it
+ *
+ * @return
+ * @throws NoSuchElementException when queue is empty
+ */
+ def dequeue(): A = removeHead()
+
+ /** Returns the first element in the queue which satisfies the
+ * given predicate, and removes this element from the queue.
+ *
+ * @param p the predicate used for choosing the first element
+ * @return the first element of the queue for which p yields true
+ */
+ def dequeueFirst(p: A => Boolean): Option[A] =
+ removeFirst(p)
+
+ /** Returns all elements in the queue which satisfy the
+ * given predicate, and removes those elements from the queue.
+ *
+ * @param p the predicate used for choosing elements
+ * @return a sequence of all elements in the queue for which
+ * p yields true.
+ */
+ def dequeueAll(p: A => Boolean): scala.collection.immutable.Seq[A] =
+ removeAll(p)
+
+ /**
+ * Returns and dequeues all elements from the queue which satisfy the given predicate
+ *
+ * @param f the predicate used for choosing elements
+ * @return The removed elements
+ */
+ def dequeueWhile(f: A => Boolean): scala.collection.Seq[A] = removeHeadWhile(f)
+
+ /** Returns the first element in the queue, or throws an error if there
+ * is no element contained in the queue.
+ *
+ * @return the first element.
+ */
+ @`inline` final def front: A = head
+
+ override protected def klone(): Queue[A] = {
+ val bf = newSpecificBuilder
+ bf ++= this
+ bf.result()
+ }
+
+ override protected def ofArray(array: Array[AnyRef], end: Int): Queue[A] =
+ new Queue(array, start = 0, end)
+
+}
+
+/**
+ * $factoryInfo
+ * @define coll queue
+ * @define Coll `Queue`
+ */
+@SerialVersionUID(3L)
+object Queue extends StrictOptimizedSeqFactory[Queue] {
+
+ def from[sealed A](source: IterableOnce[A]^): Queue[A] = empty ++= source
+
+ def empty[sealed A]: Queue[A] = new Queue
+
+ def newBuilder[sealed A]: Builder[A, Queue[A]] = new GrowableBuilder[A, Queue[A]](empty)
+
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala b/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala
new file mode 100644
index 000000000000..1f320f832cdf
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/RedBlackTree.scala
@@ -0,0 +1,653 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection.mutable
+
+import scala.annotation.tailrec
+import collection.{AbstractIterator, Iterator}
+import java.lang.String
+import language.experimental.captureChecking
+
+/**
+ * An object containing the red-black tree implementation used by mutable `TreeMaps`.
+ *
+ * The trees implemented in this object are *not* thread safe.
+ */
+private[collection] object RedBlackTree {
+
+ // ---- class structure ----
+
+ // For performance reasons, this implementation uses `null` references to represent leaves instead of a sentinel node.
+ // Currently, the internal nodes do not store their subtree size - only the tree object keeps track of their size.
+ // Therefore, while obtaining the size of the whole tree is O(1), knowing the number of entries inside a range is O(n)
+ // on the size of the range.
+
+ final class Tree[sealed A, sealed B](var root: Node[A, B], var size: Int) {
+ def treeCopy(): Tree[A, B] = new Tree(copyTree(root), size)
+ }
+
+ final class Node[sealed A, sealed B](var key: A, var value: B, var red: Boolean, var left: Node[A, B], var right: Node[A, B], var parent: Node[A, B]) {
+ override def toString: String = "Node(" + key + ", " + value + ", " + red + ", " + left + ", " + right + ")"
+ }
+
+ object Tree {
+ def empty[sealed A, sealed B]: Tree[A, B] = new Tree(null, 0)
+ }
+
+ object Node {
+
+ @`inline` def apply[sealed A, sealed B](key: A, value: B, red: Boolean,
+ left: Node[A, B], right: Node[A, B], parent: Node[A, B]): Node[A, B] =
+ new Node(key, value, red, left, right, parent)
+
+ @`inline` def leaf[sealed A, sealed B](key: A, value: B, red: Boolean, parent: Node[A, B]): Node[A, B] =
+ new Node(key, value, red, null, null, parent)
+
+ def unapply[A, B](t: Node[A, B]) = Some((t.key, t.value, t.left, t.right, t.parent))
+ }
+
+ // ---- getters ----
+
+ def isRed(node: Node[_, _]) = (node ne null) && node.red
+ def isBlack(node: Node[_, _]) = (node eq null) || !node.red
+
+ // ---- size ----
+
+ def size(node: Node[_, _]): Int = if (node eq null) 0 else 1 + size(node.left) + size(node.right)
+ def size(tree: Tree[_, _]): Int = tree.size
+ def isEmpty(tree: Tree[_, _]) = tree.root eq null
+ def clear(tree: Tree[_, _]): Unit = { tree.root = null; tree.size = 0 }
+
+ // ---- search ----
+
+ def get[A: Ordering, B](tree: Tree[A, B], key: A): Option[B] = getNode(tree.root, key) match {
+ case null => None
+ case node => Some(node.value)
+ }
+
+ @tailrec private[this] def getNode[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] =
+ if (node eq null) null
+ else {
+ val cmp = ord.compare(key, node.key)
+ if (cmp < 0) getNode(node.left, key)
+ else if (cmp > 0) getNode(node.right, key)
+ else node
+ }
+
+ def contains[A: Ordering](tree: Tree[A, _], key: A): Boolean = getNode(tree.root, key) ne null
+
+ def min[A, B](tree: Tree[A, B]): Option[(A, B)] = minNode(tree.root) match {
+ case null => None
+ case node => Some((node.key, node.value))
+ }
+
+ def minKey[A](tree: Tree[A, _]): Option[A] = minNode(tree.root) match {
+ case null => None
+ case node => Some(node.key)
+ }
+
+ private def minNode[A, B](node: Node[A, B]): Node[A, B] =
+ if (node eq null) null else minNodeNonNull(node)
+
+ @tailrec def minNodeNonNull[A, B](node: Node[A, B]): Node[A, B] =
+ if (node.left eq null) node else minNodeNonNull(node.left)
+
+ def max[A, B](tree: Tree[A, B]): Option[(A, B)] = maxNode(tree.root) match {
+ case null => None
+ case node => Some((node.key, node.value))
+ }
+
+ def maxKey[A](tree: Tree[A, _]): Option[A] = maxNode(tree.root) match {
+ case null => None
+ case node => Some(node.key)
+ }
+
+ private def maxNode[A, B](node: Node[A, B]): Node[A, B] =
+ if (node eq null) null else maxNodeNonNull(node)
+
+ @tailrec def maxNodeNonNull[A, B](node: Node[A, B]): Node[A, B] =
+ if (node.right eq null) node else maxNodeNonNull(node.right)
+
+ /**
+ * Returns the first (lowest) map entry with a key equal or greater than `key`. Returns `None` if there is no such
+ * node.
+ */
+ def minAfter[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Option[(A, B)] =
+ minNodeAfter(tree.root, key) match {
+ case null => None
+ case node => Some((node.key, node.value))
+ }
+
+ def minKeyAfter[A](tree: Tree[A, _], key: A)(implicit ord: Ordering[A]): Option[A] =
+ minNodeAfter(tree.root, key) match {
+ case null => None
+ case node => Some(node.key)
+ }
+
+ private[this] def minNodeAfter[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] = {
+ if (node eq null) null
+ else {
+ var y: Node[A, B] = null
+ var x = node
+ var cmp = 1
+ while ((x ne null) && cmp != 0) {
+ y = x
+ cmp = ord.compare(key, x.key)
+ x = if (cmp < 0) x.left else x.right
+ }
+ if (cmp <= 0) y else successor(y)
+ }
+ }
+
+ /**
+ * Returns the last (highest) map entry with a key smaller than `key`. Returns `None` if there is no such node.
+ */
+ def maxBefore[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Option[(A, B)] =
+ maxNodeBefore(tree.root, key) match {
+ case null => None
+ case node => Some((node.key, node.value))
+ }
+
+ def maxKeyBefore[A](tree: Tree[A, _], key: A)(implicit ord: Ordering[A]): Option[A] =
+ maxNodeBefore(tree.root, key) match {
+ case null => None
+ case node => Some(node.key)
+ }
+
+ private[this] def maxNodeBefore[A, B](node: Node[A, B], key: A)(implicit ord: Ordering[A]): Node[A, B] = {
+ if (node eq null) null
+ else {
+ var y: Node[A, B] = null
+ var x = node
+ var cmp = 1
+ while ((x ne null) && cmp != 0) {
+ y = x
+ cmp = ord.compare(key, x.key)
+ x = if (cmp < 0) x.left else x.right
+ }
+ if (cmp > 0) y else predecessor(y)
+ }
+ }
+
+ // ---- insertion ----
+
+ def insert[sealed A, sealed B](tree: Tree[A, B], key: A, value: B)(implicit ord: Ordering[A]): Unit = {
+ var y: Node[A, B] = null
+ var x = tree.root
+ var cmp = 1
+ while ((x ne null) && cmp != 0) {
+ y = x
+ cmp = ord.compare(key, x.key)
+ x = if (cmp < 0) x.left else x.right
+ }
+
+ if (cmp == 0) y.value = value
+ else {
+ val z = Node.leaf(key, value, red = true, y)
+
+ if (y eq null) tree.root = z
+ else if (cmp < 0) y.left = z
+ else y.right = z
+
+ fixAfterInsert(tree, z)
+ tree.size += 1
+ }
+ }
+
+ private[this] def fixAfterInsert[A, B](tree: Tree[A, B], node: Node[A, B]): Unit = {
+ var z = node
+ while (isRed(z.parent)) {
+ if (z.parent eq z.parent.parent.left) {
+ val y = z.parent.parent.right
+ if (isRed(y)) {
+ z.parent.red = false
+ y.red = false
+ z.parent.parent.red = true
+ z = z.parent.parent
+ } else {
+ if (z eq z.parent.right) {
+ z = z.parent
+ rotateLeft(tree, z)
+ }
+ z.parent.red = false
+ z.parent.parent.red = true
+ rotateRight(tree, z.parent.parent)
+ }
+ } else { // symmetric cases
+ val y = z.parent.parent.left
+ if (isRed(y)) {
+ z.parent.red = false
+ y.red = false
+ z.parent.parent.red = true
+ z = z.parent.parent
+ } else {
+ if (z eq z.parent.left) {
+ z = z.parent
+ rotateRight(tree, z)
+ }
+ z.parent.red = false
+ z.parent.parent.red = true
+ rotateLeft(tree, z.parent.parent)
+ }
+ }
+ }
+ tree.root.red = false
+ }
+
+ // ---- deletion ----
+
+ def delete[A, B](tree: Tree[A, B], key: A)(implicit ord: Ordering[A]): Unit = {
+ val z = getNode(tree.root, key)
+ if (z ne null) {
+ var y = z
+ var yIsRed = y.red
+ var x: Node[A, B] = null
+ var xParent: Node[A, B] = null
+
+ if (z.left eq null) {
+ x = z.right
+ transplant(tree, z, z.right)
+ xParent = z.parent
+ }
+ else if (z.right eq null) {
+ x = z.left
+ transplant(tree, z, z.left)
+ xParent = z.parent
+ }
+ else {
+ y = minNodeNonNull(z.right)
+ yIsRed = y.red
+ x = y.right
+
+ if (y.parent eq z) xParent = y
+ else {
+ xParent = y.parent
+ transplant(tree, y, y.right)
+ y.right = z.right
+ y.right.parent = y
+ }
+ transplant(tree, z, y)
+ y.left = z.left
+ y.left.parent = y
+ y.red = z.red
+ }
+
+ if (!yIsRed) fixAfterDelete(tree, x, xParent)
+ tree.size -= 1
+ }
+ }
+
+ private[this] def fixAfterDelete[A, B](tree: Tree[A, B], node: Node[A, B], parent: Node[A, B]): Unit = {
+ var x = node
+ var xParent = parent
+ while ((x ne tree.root) && isBlack(x)) {
+ if (x eq xParent.left) {
+ var w = xParent.right
+ // assert(w ne null)
+
+ if (w.red) {
+ w.red = false
+ xParent.red = true
+ rotateLeft(tree, xParent)
+ w = xParent.right
+ }
+ if (isBlack(w.left) && isBlack(w.right)) {
+ w.red = true
+ x = xParent
+ } else {
+ if (isBlack(w.right)) {
+ w.left.red = false
+ w.red = true
+ rotateRight(tree, w)
+ w = xParent.right
+ }
+ w.red = xParent.red
+ xParent.red = false
+ w.right.red = false
+ rotateLeft(tree, xParent)
+ x = tree.root
+ }
+ } else { // symmetric cases
+ var w = xParent.left
+ // assert(w ne null)
+
+ if (w.red) {
+ w.red = false
+ xParent.red = true
+ rotateRight(tree, xParent)
+ w = xParent.left
+ }
+ if (isBlack(w.right) && isBlack(w.left)) {
+ w.red = true
+ x = xParent
+ } else {
+ if (isBlack(w.left)) {
+ w.right.red = false
+ w.red = true
+ rotateLeft(tree, w)
+ w = xParent.left
+ }
+ w.red = xParent.red
+ xParent.red = false
+ w.left.red = false
+ rotateRight(tree, xParent)
+ x = tree.root
+ }
+ }
+ xParent = x.parent
+ }
+ if (x ne null) x.red = false
+ }
+
+ // ---- helpers ----
+
+ /**
+ * Returns the node that follows `node` in an in-order tree traversal. If `node` has the maximum key (and is,
+ * therefore, the last node), this method returns `null`.
+ */
+ private[this] def successor[A, B](node: Node[A, B]): Node[A, B] = {
+ if (node.right ne null) minNodeNonNull(node.right)
+ else {
+ var x = node
+ var y = x.parent
+ while ((y ne null) && (x eq y.right)) {
+ x = y
+ y = y.parent
+ }
+ y
+ }
+ }
+
+ /**
+ * Returns the node that precedes `node` in an in-order tree traversal. If `node` has the minimum key (and is,
+ * therefore, the first node), this method returns `null`.
+ */
+ private[this] def predecessor[A, B](node: Node[A, B]): Node[A, B] = {
+ if (node.left ne null) maxNodeNonNull(node.left)
+ else {
+ var x = node
+ var y = x.parent
+ while ((y ne null) && (x eq y.left)) {
+ x = y
+ y = y.parent
+ }
+ y
+ }
+ }
+
+ private[this] def rotateLeft[A, B](tree: Tree[A, B], x: Node[A, B]): Unit = if (x ne null) {
+ // assert(x.right ne null)
+ val y = x.right
+ x.right = y.left
+
+ if (y.left ne null) y.left.parent = x
+ y.parent = x.parent
+
+ if (x.parent eq null) tree.root = y
+ else if (x eq x.parent.left) x.parent.left = y
+ else x.parent.right = y
+
+ y.left = x
+ x.parent = y
+ }
+
+ private[this] def rotateRight[A, B](tree: Tree[A, B], x: Node[A, B]): Unit = if (x ne null) {
+ // assert(x.left ne null)
+ val y = x.left
+ x.left = y.right
+
+ if (y.right ne null) y.right.parent = x
+ y.parent = x.parent
+
+ if (x.parent eq null) tree.root = y
+ else if (x eq x.parent.right) x.parent.right = y
+ else x.parent.left = y
+
+ y.right = x
+ x.parent = y
+ }
+
+ /**
+ * Transplant the node `from` to the place of node `to`. This is done by setting `from` as a child of `to`'s previous
+ * parent and setting `from`'s parent to the `to`'s previous parent. The children of `from` are left unchanged.
+ */
+ private[this] def transplant[A, B](tree: Tree[A, B], to: Node[A, B], from: Node[A, B]): Unit = {
+ if (to.parent eq null) tree.root = from
+ else if (to eq to.parent.left) to.parent.left = from
+ else to.parent.right = from
+
+ if (from ne null) from.parent = to.parent
+ }
+
+ // ---- tree traversal ----
+
+ def foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U): Unit = foreachNode(tree.root, f)
+
+ private[this] def foreachNode[A, B, U](node: Node[A, B], f: ((A, B)) => U): Unit =
+ if (node ne null) foreachNodeNonNull(node, f)
+
+ private[this] def foreachNodeNonNull[A, B, U](node: Node[A, B], f: ((A, B)) => U): Unit = {
+ if (node.left ne null) foreachNodeNonNull(node.left, f)
+ f((node.key, node.value))
+ if (node.right ne null) foreachNodeNonNull(node.right, f)
+ }
+
+ def foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = {
+ def g(node: Node[A, _]): Unit = {
+ val l = node.left
+ if(l ne null) g(l)
+ f(node.key)
+ val r = node.right
+ if(r ne null) g(r)
+ }
+ val r = tree.root
+ if(r ne null) g(r)
+ }
+
+ def foreachEntry[A, B, U](tree: Tree[A, B], f: (A, B) => U): Unit = {
+ def g(node: Node[A, B]): Unit = {
+ val l = node.left
+ if(l ne null) g(l)
+ f(node.key, node.value)
+ val r = node.right
+ if(r ne null) g(r)
+ }
+ val r = tree.root
+ if(r ne null) g(r)
+ }
+
+ def transform[A, B](tree: Tree[A, B], f: (A, B) => B): Unit = transformNode(tree.root, f)
+
+ private[this] def transformNode[A, B, U](node: Node[A, B], f: (A, B) => B): Unit =
+ if (node ne null) transformNodeNonNull(node, f)
+
+ private[this] def transformNodeNonNull[A, B, U](node: Node[A, B], f: (A, B) => B): Unit = {
+ if (node.left ne null) transformNodeNonNull(node.left, f)
+ node.value = f(node.key, node.value)
+ if (node.right ne null) transformNodeNonNull(node.right, f)
+ }
+
+ def iterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A] = None, end: Option[A] = None): Iterator[(A, B)] =
+ new EntriesIterator(tree, start, end)
+
+ def keysIterator[sealed A: Ordering](tree: Tree[A, _], start: Option[A] = None, end: Option[A] = None): Iterator[A] =
+ new KeysIterator(tree, start, end)
+
+ def valuesIterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A] = None, end: Option[A] = None): Iterator[B] =
+ new ValuesIterator(tree, start, end)
+
+ private[this] abstract class TreeIterator[sealed A, sealed B, R](tree: Tree[A, B], start: Option[A], end: Option[A])
+ (implicit ord: Ordering[A]) extends AbstractIterator[R] {
+
+ protected def nextResult(node: Node[A, B]): R
+
+ def hasNext: Boolean = nextNode ne null
+
+ @throws[NoSuchElementException]
+ def next(): R = nextNode match {
+ case null => throw new NoSuchElementException("next on empty iterator")
+ case node =>
+ nextNode = successor(node)
+ setNullIfAfterEnd()
+ nextResult(node)
+ }
+
+ private[this] var nextNode: Node[A, B] = start match {
+ case None => minNode(tree.root)
+ case Some(from) => minNodeAfter(tree.root, from)
+ }
+
+ private[this] def setNullIfAfterEnd(): Unit =
+ if (end.isDefined && (nextNode ne null) && ord.compare(nextNode.key, end.get) >= 0)
+ nextNode = null
+
+ setNullIfAfterEnd()
+ }
+
+ private[this] final class EntriesIterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A], end: Option[A])
+ extends TreeIterator[A, B, (A, B)](tree, start, end) {
+
+ def nextResult(node: Node[A, B]) = (node.key, node.value)
+ }
+
+ private[this] final class KeysIterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A], end: Option[A])
+ extends TreeIterator[A, B, A](tree, start, end) {
+
+ def nextResult(node: Node[A, B]) = node.key
+ }
+
+ private[this] final class ValuesIterator[sealed A: Ordering, sealed B](tree: Tree[A, B], start: Option[A], end: Option[A])
+ extends TreeIterator[A, B, B](tree, start, end) {
+
+ def nextResult(node: Node[A, B]) = node.value
+ }
+
+ // ---- debugging ----
+
+ /**
+ * Checks if the tree is in a valid state. That happens if:
+ * - It is a valid binary search tree;
+ * - All red-black properties are satisfied;
+ * - All non-null nodes have their `parent` reference correct;
+ * - The size variable in `tree` corresponds to the actual size of the tree.
+ */
+ def isValid[A: Ordering, B](tree: Tree[A, B]): Boolean =
+ isValidBST(tree.root) && hasProperParentRefs(tree) && isValidRedBlackTree(tree) && size(tree.root) == tree.size
+
+ /**
+ * Returns true if all non-null nodes have their `parent` reference correct.
+ */
+ private[this] def hasProperParentRefs[A, B](tree: Tree[A, B]): Boolean = {
+
+ def hasProperParentRefs(node: Node[A, B]): Boolean = {
+ if (node eq null) true
+ else {
+ if ((node.left ne null) && (node.left.parent ne node) ||
+ (node.right ne null) && (node.right.parent ne node)) false
+ else hasProperParentRefs(node.left) && hasProperParentRefs(node.right)
+ }
+ }
+
+ if(tree.root eq null) true
+ else (tree.root.parent eq null) && hasProperParentRefs(tree.root)
+ }
+
+ /**
+ * Returns true if this node follows the properties of a binary search tree.
+ */
+ private[this] def isValidBST[A, B](node: Node[A, B])(implicit ord: Ordering[A]): Boolean = {
+ if (node eq null) true
+ else {
+ if ((node.left ne null) && (ord.compare(node.key, node.left.key) <= 0) ||
+ (node.right ne null) && (ord.compare(node.key, node.right.key) >= 0)) false
+ else isValidBST(node.left) && isValidBST(node.right)
+ }
+ }
+
+ /**
+ * Returns true if the tree has all the red-black tree properties: if the root node is black, if all children of red
+ * nodes are black and if the path from any node to any of its null children has the same number of black nodes.
+ */
+ private[this] def isValidRedBlackTree[A, B](tree: Tree[A, B]): Boolean = {
+
+ def noRedAfterRed(node: Node[A, B]): Boolean = {
+ if (node eq null) true
+ else if (node.red && (isRed(node.left) || isRed(node.right))) false
+ else noRedAfterRed(node.left) && noRedAfterRed(node.right)
+ }
+
+ def blackHeight(node: Node[A, B]): Int = {
+ if (node eq null) 1
+ else {
+ val lh = blackHeight(node.left)
+ val rh = blackHeight(node.right)
+
+ if (lh == -1 || lh != rh) -1
+ else if (isRed(node)) lh
+ else lh + 1
+ }
+ }
+
+ isBlack(tree.root) && noRedAfterRed(tree.root) && blackHeight(tree.root) >= 0
+ }
+
+ // building
+
+ /** Build a Tree suitable for a TreeSet from an ordered sequence of keys */
+ def fromOrderedKeys[sealed A](xs: Iterator[A], size: Int): Tree[A, Null] = {
+ val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes
+ def f(level: Int, size: Int): Node[A, Null] = size match {
+ case 0 => null
+ case 1 => new Node(xs.next(), null, level == maxUsedDepth && level != 1, null, null, null)
+ case n =>
+ val leftSize = (size-1)/2
+ val left = f(level+1, leftSize)
+ val x = xs.next()
+ val right = f(level+1, size-1-leftSize)
+ val n = new Node(x, null, false, left, right, null)
+ if(left ne null) left.parent = n
+ right.parent = n
+ n
+ }
+ new Tree(f(1, size), size)
+ }
+
+ /** Build a Tree suitable for a TreeMap from an ordered sequence of key/value pairs */
+ def fromOrderedEntries[sealed A, sealed B](xs: Iterator[(A, B)], size: Int): Tree[A, B] = {
+ val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes
+ def f(level: Int, size: Int): Node[A, B] = size match {
+ case 0 => null
+ case 1 =>
+ val (k, v) = xs.next()
+ new Node(k, v, level == maxUsedDepth && level != 1, null, null, null)
+ case n =>
+ val leftSize = (size-1)/2
+ val left = f(level+1, leftSize)
+ val (k, v) = xs.next()
+ val right = f(level+1, size-1-leftSize)
+ val n = new Node(k, v, false, left, right, null)
+ if(left ne null) left.parent = n
+ right.parent = n
+ n
+ }
+ new Tree(f(1, size), size)
+ }
+
+ def copyTree[sealed A, sealed B](n: Node[A, B]): Node[A, B] =
+ if(n eq null) null else {
+ val c = new Node(n.key, n.value, n.red, copyTree(n.left), copyTree(n.right), null)
+ if(c.left != null) c.left.parent = c
+ if(c.right != null) c.right.parent = c
+ c
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala b/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala
new file mode 100644
index 000000000000..246e525e37d9
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/ReusableBuilder.scala
@@ -0,0 +1,56 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package mutable
+
+import language.experimental.captureChecking
+
+/** `ReusableBuilder` is a marker trait that indicates that a `Builder`
+ * can be reused to build more than one instance of a collection. In
+ * particular, calling `result()` followed by `clear()` will produce a
+ * collection and reset the builder to begin building a new collection
+ * of the same type.
+ *
+ * In general no method other than `clear()` may be called after `result()`.
+ * It is up to subclasses to implement and to document other allowed sequences
+ * of operations (e.g. calling other methods after `result()` in order to obtain
+ * different snapshots of a collection under construction).
+ *
+ * @tparam Elem the type of elements that get added to the builder.
+ * @tparam To the type of collection that it produced.
+ *
+ * @define multipleResults
+ *
+ * This Builder can be reused after calling `result()` without an
+ * intermediate call to `clear()` in order to build multiple related results.
+ */
+trait ReusableBuilder[-Elem, +To] extends Builder[Elem, To] {
+ /** Clears the contents of this builder.
+ * After execution of this method, the builder will contain no elements.
+ *
+ * If executed immediately after a call to `result()`, this allows a new
+ * instance of the same type of collection to be built.
+ */
+ override def clear(): Unit // Note: overriding for Scaladoc only!
+
+ /** Produces a collection from the added elements.
+ *
+ * After a call to `result`, the behavior of all other methods is undefined
+ * save for `clear()`. If `clear()` is called, then the builder is reset and
+ * may be used to build another instance.
+ *
+ * @return a collection containing the elements added to this builder.
+ */
+ override def result(): To // Note: overriding for Scaladoc only!
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/Set.scala b/tests/pos-special/stdlib/collection/mutable/Set.scala
new file mode 100644
index 000000000000..01384e993e89
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/Set.scala
@@ -0,0 +1,123 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection.mutable
+
+import scala.collection.{IterableFactory, IterableFactoryDefaults, IterableOps}
+import language.experimental.captureChecking
+
+/** Base trait for mutable sets */
+trait Set[A]
+ extends Iterable[A]
+ with collection.Set[A]
+ with SetOps[A, Set, Set[A]]
+ with IterableFactoryDefaults[A, Set] {
+
+ override def iterableFactory: IterableFactory[Set] = Set
+}
+
+/**
+ * @define coll mutable set
+ * @define Coll `mutable.Set`
+ */
+trait SetOps[A, +CC[X], +C <: SetOps[A, CC, C]]
+ extends collection.SetOps[A, CC, C]
+ with IterableOps[A, CC, C] // only needed so we can use super[IterableOps] below
+ with Cloneable[C]
+ with Builder[A, C]
+ with Growable[A]
+ with Shrinkable[A] {
+
+ def result(): C = coll
+
+ /** Check whether the set contains the given element, and add it if not.
+ *
+ * @param elem the element to be added
+ * @return true if the element was added
+ */
+ def add(elem: A): Boolean =
+ !contains(elem) && {
+ coll += elem; true
+ }
+
+ /** Updates the presence of a single element in this set.
+ *
+ * This method allows one to add or remove an element `elem`
+ * from this set depending on the value of parameter `included`.
+ * Typically, one would use the following syntax:
+ * {{{
+ * set(elem) = true // adds element
+ * set(elem) = false // removes element
+ * }}}
+ *
+ * @param elem the element to be added or removed
+ * @param included a flag indicating whether element should be included or excluded.
+ */
+ def update(elem: A, included: Boolean): Unit = {
+ if (included) add(elem)
+ else remove(elem)
+ }
+
+ /** Removes an element from this set.
+ *
+ * @param elem the element to be removed
+ * @return true if this set contained the element before it was removed
+ */
+ def remove(elem: A): Boolean = {
+ val res = contains(elem)
+ coll -= elem
+ res
+ }
+
+ def diff(that: collection.Set[A]): C =
+ foldLeft(empty)((result, elem) => if (that contains elem) result else result += elem)
+
+ @deprecated("Use filterInPlace instead", "2.13.0")
+ @inline final def retain(p: A => Boolean): Unit = filterInPlace(p)
+
+ /** Removes all elements from the set for which do not satisfy a predicate.
+ * @param p the predicate used to test elements. Only elements for
+ * which `p` returns `true` are retained in the set; all others
+ * are removed.
+ */
+ def filterInPlace(p: A => Boolean): this.type = {
+ if (nonEmpty) {
+ val array = this.toArray[Any] // scala/bug#7269 toArray avoids ConcurrentModificationException
+ val arrayLength = array.length
+ var i = 0
+ while (i < arrayLength) {
+ val elem = array(i).asInstanceOf[A]
+ if (!p(elem)) {
+ this -= elem
+ }
+ i += 1
+ }
+ }
+ this
+ }
+
+ override def clone(): C = empty ++= this
+
+ override def knownSize: Int = super[IterableOps].knownSize
+}
+
+/**
+ * $factoryInfo
+ * @define coll mutable set
+ * @define Coll `mutable.Set`
+ */
+@SerialVersionUID(3L)
+object Set extends IterableFactory.Delegate[Set](HashSet)
+
+
+/** Explicit instantiation of the `Set` trait to reduce class file size in subclasses. */
+abstract class AbstractSet[A] extends scala.collection.AbstractSet[A] with Set[A]
diff --git a/tests/pos-special/stdlib/collection/mutable/SortedMap.scala b/tests/pos-special/stdlib/collection/mutable/SortedMap.scala
new file mode 100644
index 000000000000..8017177f5720
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/SortedMap.scala
@@ -0,0 +1,104 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection.mutable
+
+import scala.collection.{SortedMapFactory, SortedMapFactoryDefaults}
+import language.experimental.captureChecking
+
+/**
+ * Base type for mutable sorted map collections
+ */
+trait SortedMap[K, V]
+ extends collection.SortedMap[K, V]
+ with Map[K, V]
+ with SortedMapOps[K, V, SortedMap, SortedMap[K, V]]
+ with SortedMapFactoryDefaults[K, V, SortedMap, Iterable, Map] {
+
+ override def unsorted: Map[K, V] = this
+
+ override def sortedMapFactory: SortedMapFactory[SortedMap] = SortedMap
+
+ /** The same sorted map with a given default function.
+ * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc.
+ * are not affected by `withDefault`.
+ *
+ * Invoking transformer methods (e.g. `map`) will not preserve the default value.
+ *
+ * @param d the function mapping keys to values, used for non-present keys
+ * @return a wrapper of the map with a default value
+ */
+ override def withDefault(d: K -> V): SortedMap[K, V] = new SortedMap.WithDefault[K, V](this, d)
+
+ /** The same map with a given default value.
+ * Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc.
+ * are not affected by `withDefaultValue`.
+ *
+ * Invoking transformer methods (e.g. `map`) will not preserve the default value.
+ *
+ * @param d default value used for non-present keys
+ * @return a wrapper of the map with a default value
+ */
+ override def withDefaultValue(d: V): SortedMap[K, V] = new SortedMap.WithDefault[K, V](this, _ => d)
+}
+
+trait SortedMapOps[K, V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]]
+ extends collection.SortedMapOps[K, V, CC, C]
+ with MapOps[K, V, Map, C] {
+
+ def unsorted: Map[K, V]
+
+ @deprecated("Use m.clone().addOne((k,v)) instead of m.updated(k, v)", "2.13.0")
+ override def updated[V1 >: V](key: K, value: V1): CC[K, V1] =
+ clone().asInstanceOf[CC[K, V1]].addOne((key, value))
+}
+
+@SerialVersionUID(3L)
+object SortedMap extends SortedMapFactory.Delegate[SortedMap](TreeMap) {
+
+ @SerialVersionUID(3L)
+ final class WithDefault[K, V](underlying: SortedMap[K, V], defaultValue: K -> V)
+ extends Map.WithDefault[K, V](underlying, defaultValue)
+ with SortedMap[K, V]
+ with SortedMapOps[K, V, SortedMap, WithDefault[K, V]]
+ with Serializable {
+
+ override def sortedMapFactory: SortedMapFactory[SortedMap] = underlying.sortedMapFactory
+
+ def iteratorFrom(start: K): scala.collection.Iterator[(K, V)] = underlying.iteratorFrom(start)
+
+ def keysIteratorFrom(start: K): scala.collection.Iterator[K] = underlying.keysIteratorFrom(start)
+
+ implicit def ordering: Ordering[K] = underlying.ordering
+
+ def rangeImpl(from: Option[K], until: Option[K]): WithDefault[K, V] =
+ new WithDefault[K, V](underlying.rangeImpl(from, until), defaultValue)
+
+ // Need to override following methods to match type signatures of `SortedMap.WithDefault`
+ // for operations preserving default value
+ override def subtractOne(elem: K): WithDefault.this.type = { underlying.subtractOne(elem); this }
+
+ override def addOne(elem: (K, V)): WithDefault.this.type = { underlying.addOne(elem); this }
+
+ override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue)
+
+ override def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]^): SortedMap[K, V2] =
+ underlying.concat(suffix).withDefault(defaultValue)
+
+ override protected def fromSpecific(coll: scala.collection.IterableOnce[(K, V)]^): WithDefault[K, V] =
+ new WithDefault[K, V](sortedMapFactory.from(coll), defaultValue)
+
+ override protected def newSpecificBuilder: Builder[(K, V), WithDefault[K, V]] =
+ SortedMap.newBuilder.mapResult((p: SortedMap[K, V]) => new WithDefault[K, V](p, defaultValue))
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/SortedSet.scala b/tests/pos-special/stdlib/collection/mutable/SortedSet.scala
new file mode 100644
index 000000000000..e657fb749d7d
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/SortedSet.scala
@@ -0,0 +1,49 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package mutable
+import language.experimental.captureChecking
+
+/**
+ * Base type for mutable sorted set collections
+ */
+trait SortedSet[A]
+ extends Set[A]
+ with collection.SortedSet[A]
+ with SortedSetOps[A, SortedSet, SortedSet[A]]
+ with SortedSetFactoryDefaults[A, SortedSet, Set] {
+
+ override def unsorted: Set[A] = this
+
+ override def sortedIterableFactory: SortedIterableFactory[SortedSet] = SortedSet
+}
+
+/**
+ * @define coll mutable sorted set
+ * @define Coll `mutable.Sortedset`
+ */
+trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]]
+ extends SetOps[A, Set, C]
+ with collection.SortedSetOps[A, CC, C] {
+
+ def unsorted: Set[A]
+}
+
+/**
+ * $factoryInfo
+ * @define coll mutable sorted set
+ * @define Coll `mutable.Sortedset`
+ */
+@SerialVersionUID(3L)
+object SortedSet extends SortedIterableFactory.Delegate[SortedSet](TreeSet)
diff --git a/tests/pos-special/stdlib/collection/mutable/Stack.scala b/tests/pos-special/stdlib/collection/mutable/Stack.scala
new file mode 100644
index 000000000000..4efa9621f374
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/Stack.scala
@@ -0,0 +1,144 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection.mutable
+
+import scala.annotation.{migration, nowarn}
+import scala.collection.generic.DefaultSerializable
+import scala.collection.{IterableFactoryDefaults, IterableOnce, SeqFactory, StrictOptimizedSeqFactory, StrictOptimizedSeqOps}
+
+import language.experimental.captureChecking
+
+/** A stack implements a data structure which allows to store and retrieve
+ * objects in a last-in-first-out (LIFO) fashion.
+ *
+ * Note that operations which consume and produce iterables preserve order,
+ * rather than reversing it (as would be expected from building a new stack
+ * by pushing an element at a time).
+ *
+ * @tparam A type of the elements contained in this stack.
+ *
+ * @define Coll `Stack`
+ * @define coll stack
+ * @define orderDependent
+ * @define orderDependentFold
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ */
+@migration("Stack is now based on an ArrayDeque instead of a linked list", "2.13.0")
+class Stack[sealed A] protected (array: Array[AnyRef], start: Int, end: Int)
+ extends ArrayDeque[A](array, start, end)
+ with IndexedSeqOps[A, Stack, Stack[A]]
+ with StrictOptimizedSeqOps[A, Stack, Stack[A]]
+ with IterableFactoryDefaults[A, Stack]
+ with ArrayDequeOps[A, Stack, Stack[A]]
+ with Cloneable[Stack[A]]
+ with DefaultSerializable {
+
+ def this(initialSize: Int = ArrayDeque.DefaultInitialSize) =
+ this(ArrayDeque.alloc(initialSize), start = 0, end = 0)
+
+ override def iterableFactory: SeqFactory[Stack] = Stack
+
+ @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""")
+ override protected[this] def stringPrefix = "Stack"
+
+ /**
+ * Add elements to the top of this stack
+ *
+ * @param elem
+ * @return
+ */
+ def push(elem: A): this.type = prepend(elem)
+
+ /** Push two or more elements onto the stack. The last element
+ * of the sequence will be on top of the new stack.
+ *
+ * @param elems the element sequence.
+ * @return the stack with the new elements on top.
+ */
+ def push(elem1: A, elem2: A, elems: A*): this.type = {
+ val k = elems.knownSize
+ ensureSize(length + (if(k >= 0) k + 2 else 3))
+ prepend(elem1).prepend(elem2).pushAll(elems)
+ }
+
+ /** Push all elements in the given iterable object onto the stack. The
+ * last element in the iterable object will be on top of the new stack.
+ *
+ * @param elems the iterable object.
+ * @return the stack with the new elements on top.
+ */
+ def pushAll(elems: scala.collection.IterableOnce[A]): this.type =
+ prependAll(elems match {
+ case it: scala.collection.Seq[A] => it.view.reverse
+ case it => IndexedSeq.from(it).view.reverse
+ })
+
+ /**
+ * Removes the top element from this stack and return it
+ *
+ * @return
+ * @throws NoSuchElementException when stack is empty
+ */
+ def pop(): A = removeHead()
+
+ /**
+ * Pop all elements from this stack and return it
+ *
+ * @return The removed elements
+ */
+ def popAll(): scala.collection.Seq[A] = removeAll()
+
+ /**
+ * Returns and removes all elements from the top of this stack which satisfy the given predicate
+ *
+ * @param f the predicate used for choosing elements
+ * @return The removed elements
+ */
+ def popWhile(f: A => Boolean): scala.collection.Seq[A] = removeHeadWhile(f)
+
+ /** Returns the top element of the stack. This method will not remove
+ * the element from the stack. An error is signaled if there is no
+ * element on the stack.
+ *
+ * @throws NoSuchElementException
+ * @return the top element
+ */
+ @`inline` final def top: A = head
+
+ override protected def klone(): Stack[A] = {
+ val bf = newSpecificBuilder
+ bf ++= this
+ bf.result()
+ }
+
+ override protected def ofArray(array: Array[AnyRef], end: Int): Stack[A] =
+ new Stack(array, start = 0, end)
+
+}
+
+/**
+ * $factoryInfo
+ * @define coll stack
+ * @define Coll `Stack`
+ */
+@SerialVersionUID(3L)
+object Stack extends StrictOptimizedSeqFactory[Stack] {
+
+ def from[sealed A](source: IterableOnce[A]^): Stack[A] = empty ++= source
+
+ def empty[sealed A]: Stack[A] = new Stack
+
+ def newBuilder[sealed A]: Builder[A, Stack[A]] = new GrowableBuilder[A, Stack[A]](empty)
+
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala b/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala
index c7859214821d..5320fa1dabb0 100644
--- a/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala
+++ b/tests/pos-special/stdlib/collection/mutable/StringBuilder.scala
@@ -110,7 +110,7 @@ final class StringBuilder(val underlying: java.lang.StringBuilder) extends Abstr
override def toString: String = result()
- override def toArray[B >: Char](implicit ct: scala.reflect.ClassTag[B]) =
+ override def toArray[sealed B >: Char](implicit ct: scala.reflect.ClassTag[B]) =
ct.runtimeClass match {
case java.lang.Character.TYPE => toCharArray.asInstanceOf[Array[B]]
case _ => super.toArray
diff --git a/tests/pos-special/stdlib/collection/mutable/TreeMap.scala b/tests/pos-special/stdlib/collection/mutable/TreeMap.scala
new file mode 100644
index 000000000000..f714a9ed46c2
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/TreeMap.scala
@@ -0,0 +1,258 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package mutable
+
+import scala.collection.Stepper.EfficientSplit
+import scala.collection.generic.DefaultSerializable
+import scala.collection.mutable.{RedBlackTree => RB}
+import language.experimental.captureChecking
+
+/**
+ * A mutable sorted map implemented using a mutable red-black tree as underlying data structure.
+ *
+ * @param ordering the implicit ordering used to compare objects of type `A`.
+ * @tparam K the type of the keys contained in this tree map.
+ * @tparam V the type of the values associated with the keys.
+ *
+ * @define Coll mutable.TreeMap
+ * @define coll mutable tree map
+ */
+sealed class TreeMap[sealed K, sealed V] private (tree: RB.Tree[K, V])(implicit val ordering: Ordering[K])
+ extends AbstractMap[K, V]
+ with SortedMap[K, V]
+ with SortedMapOps[K, V, TreeMap, TreeMap[K, V]]
+ with StrictOptimizedIterableOps[(K, V), Iterable, TreeMap[K, V]]
+ with StrictOptimizedMapOps[K, V, Map, TreeMap[K, V]]
+ with StrictOptimizedSortedMapOps[K, V, TreeMap, TreeMap[K, V]]
+ with SortedMapFactoryDefaults[K, V, TreeMap, Iterable, Map]
+ with DefaultSerializable {
+
+ override def sortedMapFactory = TreeMap
+
+ /**
+ * Creates an empty `TreeMap`.
+ * @param ord the implicit ordering used to compare objects of type `K`.
+ * @return an empty `TreeMap`.
+ */
+ def this()(implicit ord: Ordering[K]) = this(RB.Tree.empty)(ord)
+
+ def iterator: Iterator[(K, V)] = {
+ if (isEmpty) Iterator.empty
+ else RB.iterator(tree)
+ }
+
+ override def keysIterator: Iterator[K] = {
+ if (isEmpty) Iterator.empty
+ else RB.keysIterator(tree, None)
+ }
+
+ override def valuesIterator: Iterator[V] = {
+ if (isEmpty) Iterator.empty
+ else RB.valuesIterator(tree, None)
+ }
+
+ def keysIteratorFrom(start: K): Iterator[K] = {
+ if (isEmpty) Iterator.empty
+ else RB.keysIterator(tree, Some(start))
+ }
+
+ def iteratorFrom(start: K): Iterator[(K, V)] = {
+ if (isEmpty) Iterator.empty
+ else RB.iterator(tree, Some(start))
+ }
+
+ override def valuesIteratorFrom(start: K): Iterator[V] = {
+ if (isEmpty) Iterator.empty
+ else RB.valuesIterator(tree, Some(start))
+ }
+
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit =
+ shape.parUnbox(
+ scala.collection.convert.impl.AnyBinaryTreeStepper.from[(K, V), RB.Node[K, V]](
+ size, tree.root, _.left, _.right, x => (x.key, x.value)
+ )
+ )
+
+ override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = {
+ import scala.collection.convert.impl._
+ type T = RB.Node[K, V]
+ val s = shape.shape match {
+ case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Int])
+ case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Long])
+ case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree.root, _.left, _.right, _.key.asInstanceOf[Double])
+ case _ => shape.parUnbox(AnyBinaryTreeStepper.from[K, T](size, tree.root, _.left, _.right, _.key))
+ }
+ s.asInstanceOf[S with EfficientSplit]
+ }
+
+ override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = {
+ import scala.collection.convert.impl._
+ type T = RB.Node[K, V]
+ val s = shape.shape match {
+ case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.value.asInstanceOf[Int])
+ case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.value.asInstanceOf[Long])
+ case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.value.asInstanceOf[Double])
+ case _ => shape.parUnbox(AnyBinaryTreeStepper.from[V, T] (size, tree.root, _.left, _.right, _.value))
+ }
+ s.asInstanceOf[S with EfficientSplit]
+ }
+
+ def addOne(elem: (K, V)): this.type = { RB.insert(tree, elem._1, elem._2); this }
+
+ def subtractOne(elem: K): this.type = { RB.delete(tree, elem); this }
+
+ override def clear(): Unit = RB.clear(tree)
+
+ def get(key: K): Option[V] = RB.get(tree, key)
+
+ /**
+ * Creates a ranged projection of this map. Any mutations in the ranged projection will update the original map and
+ * vice versa.
+ *
+ * Only entries with keys between this projection's key range will ever appear as elements of this map, independently
+ * of whether the entries are added through the original map or through this view. That means that if one inserts a
+ * key-value in a view whose key is outside the view's bounds, calls to `get` or `contains` will _not_ consider the
+ * newly added entry. Mutations are always reflected in the original map, though.
+ *
+ * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower
+ * bound.
+ * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper
+ * bound.
+ */
+ def rangeImpl(from: Option[K], until: Option[K]): TreeMap[K, V] = new TreeMapProjection(from, until)
+
+ override def foreach[U](f: ((K, V)) => U): Unit = RB.foreach(tree, f)
+ override def foreachEntry[U](f: (K, V) => U): Unit = RB.foreachEntry(tree, f)
+
+ override def size: Int = RB.size(tree)
+ override def knownSize: Int = size
+ override def isEmpty: Boolean = RB.isEmpty(tree)
+
+ override def contains(key: K): Boolean = RB.contains(tree, key)
+
+ override def head: (K, V) = RB.min(tree).get
+
+ override def last: (K, V) = RB.max(tree).get
+
+ override def minAfter(key: K): Option[(K, V)] = RB.minAfter(tree, key)
+
+ override def maxBefore(key: K): Option[(K, V)] = RB.maxBefore(tree, key)
+
+ override protected[this] def className: String = "TreeMap"
+
+
+ /**
+ * A ranged projection of a [[TreeMap]]. Mutations on this map affect the original map and vice versa.
+ *
+ * Only entries with keys between this projection's key range will ever appear as elements of this map, independently
+ * of whether the entries are added through the original map or through this view. That means that if one inserts a
+ * key-value in a view whose key is outside the view's bounds, calls to `get` or `contains` will _not_ consider the
+ * newly added entry. Mutations are always reflected in the original map, though.
+ *
+ * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower
+ * bound.
+ * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper
+ * bound.
+ */
+ private[this] final class TreeMapProjection(from: Option[K], until: Option[K]) extends TreeMap[K, V](tree) {
+
+ /**
+ * Given a possible new lower bound, chooses and returns the most constraining one (the maximum).
+ */
+ private[this] def pickLowerBound(newFrom: Option[K]): Option[K] = (from, newFrom) match {
+ case (Some(fr), Some(newFr)) => Some(ordering.max(fr, newFr))
+ case (None, _) => newFrom
+ case _ => from
+ }
+
+ /**
+ * Given a possible new upper bound, chooses and returns the most constraining one (the minimum).
+ */
+ private[this] def pickUpperBound(newUntil: Option[K]): Option[K] = (until, newUntil) match {
+ case (Some(unt), Some(newUnt)) => Some(ordering.min(unt, newUnt))
+ case (None, _) => newUntil
+ case _ => until
+ }
+
+ /**
+ * Returns true if the argument is inside the view bounds (between `from` and `until`).
+ */
+ private[this] def isInsideViewBounds(key: K): Boolean = {
+ val afterFrom = from.isEmpty || ordering.compare(from.get, key) <= 0
+ val beforeUntil = until.isEmpty || ordering.compare(key, until.get) < 0
+ afterFrom && beforeUntil
+ }
+
+ override def rangeImpl(from: Option[K], until: Option[K]): TreeMap[K, V] =
+ new TreeMapProjection(pickLowerBound(from), pickUpperBound(until))
+
+ override def get(key: K) = if (isInsideViewBounds(key)) RB.get(tree, key) else None
+
+ override def iterator = if (RB.size(tree) == 0) Iterator.empty else RB.iterator(tree, from, until)
+ override def keysIterator: Iterator[K] = if (RB.size(tree) == 0) Iterator.empty else RB.keysIterator(tree, from, until)
+ override def valuesIterator: Iterator[V] = if (RB.size(tree) == 0) Iterator.empty else RB.valuesIterator(tree, from, until)
+ override def keysIteratorFrom(start: K) = if (RB.size(tree) == 0) Iterator.empty else RB.keysIterator(tree, pickLowerBound(Some(start)), until)
+ override def iteratorFrom(start: K) = if (RB.size(tree) == 0) Iterator.empty else RB.iterator(tree, pickLowerBound(Some(start)), until)
+ override def valuesIteratorFrom(start: K) = if (RB.size(tree) == 0) Iterator.empty else RB.valuesIterator(tree, pickLowerBound(Some(start)), until)
+ override def size = if (RB.size(tree) == 0) 0 else iterator.length
+ override def knownSize: Int = if (RB.size(tree) == 0) 0 else -1
+ override def isEmpty = RB.size(tree) == 0 || !iterator.hasNext
+ override def contains(key: K) = isInsideViewBounds(key) && RB.contains(tree, key)
+
+ override def head = headOption.get
+ override def headOption = {
+ val entry = if (from.isDefined) RB.minAfter(tree, from.get) else RB.min(tree)
+ (entry, until) match {
+ case (Some(e), Some(unt)) if ordering.compare(e._1, unt) >= 0 => None
+ case _ => entry
+ }
+ }
+
+ override def last = lastOption.get
+ override def lastOption = {
+ val entry = if (until.isDefined) RB.maxBefore(tree, until.get) else RB.max(tree)
+ (entry, from) match {
+ case (Some(e), Some(fr)) if ordering.compare(e._1, fr) < 0 => None
+ case _ => entry
+ }
+ }
+
+ // Using the iterator should be efficient enough; if performance is deemed a problem later, specialized
+ // `foreach(f, from, until)` and `transform(f, from, until)` methods can be created in `RedBlackTree`. See
+ // https://github.com/scala/scala/pull/4608#discussion_r34307985 for a discussion about this.
+ override def foreach[U](f: ((K, V)) => U): Unit = iterator.foreach(f)
+
+ override def clone() = super.clone().rangeImpl(from, until)
+ }
+
+}
+
+/**
+ * $factoryInfo
+ *
+ * @define Coll mutable.TreeMap
+ * @define coll mutable tree map
+ */
+@SerialVersionUID(3L)
+object TreeMap extends SortedMapFactory[TreeMap] {
+
+ def from[sealed K : Ordering, sealed V](it: IterableOnce[(K, V)]^): TreeMap[K, V] =
+ Growable.from(empty[K, V], it)
+
+ def empty[sealed K : Ordering, sealed V]: TreeMap[K, V] = new TreeMap[K, V]()
+
+ def newBuilder[sealed K: Ordering, sealed V]: Builder[(K, V), TreeMap[K, V]] = new GrowableBuilder(empty[K, V])
+
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/TreeSet.scala b/tests/pos-special/stdlib/collection/mutable/TreeSet.scala
new file mode 100644
index 000000000000..9ba439bea041
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/TreeSet.scala
@@ -0,0 +1,219 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection.mutable
+
+import scala.collection.Stepper.EfficientSplit
+import scala.collection.generic.DefaultSerializable
+import scala.collection.mutable.{RedBlackTree => RB}
+import scala.collection.{SortedIterableFactory, SortedSetFactoryDefaults, Stepper, StepperShape, StrictOptimizedIterableOps, StrictOptimizedSortedSetOps, mutable}
+import language.experimental.captureChecking
+
+/**
+ * A mutable sorted set implemented using a mutable red-black tree as underlying data structure.
+ *
+ * @param ordering the implicit ordering used to compare objects of type `A`.
+ * @tparam A the type of the keys contained in this tree set.
+ *
+ * @define Coll mutable.TreeSet
+ * @define coll mutable tree set
+ */
+// Original API designed in part by Lucien Pereira
+sealed class TreeSet[sealed A] private (private val tree: RB.Tree[A, Null])(implicit val ordering: Ordering[A])
+ extends AbstractSet[A]
+ with SortedSet[A]
+ with SortedSetOps[A, TreeSet, TreeSet[A]]
+ with StrictOptimizedIterableOps[A, Set, TreeSet[A]]
+ with StrictOptimizedSortedSetOps[A, TreeSet, TreeSet[A]]
+ with SortedSetFactoryDefaults[A, TreeSet, Set]
+ with DefaultSerializable {
+
+ if (ordering eq null)
+ throw new NullPointerException("ordering must not be null")
+
+ /**
+ * Creates an empty `TreeSet`.
+ * @param ord the implicit ordering used to compare objects of type `A`.
+ * @return an empty `TreeSet`.
+ */
+ def this()(implicit ord: Ordering[A]) = this(RB.Tree.empty)(ord)
+
+ override def sortedIterableFactory: SortedIterableFactory[TreeSet] = TreeSet
+
+ def iterator: collection.Iterator[A] = RB.keysIterator(tree)
+
+ def iteratorFrom(start: A): collection.Iterator[A] = RB.keysIterator(tree, Some(start))
+
+ override def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = {
+ import scala.collection.convert.impl._
+ type T = RB.Node[A, Null]
+ val s = shape.shape match {
+ case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Int])
+ case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree.root, _.left, _.right, _.key.asInstanceOf[Long])
+ case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree.root, _.left, _.right, _.key.asInstanceOf[Double])
+ case _ => shape.parUnbox(AnyBinaryTreeStepper.from[A, T](size, tree.root, _.left, _.right, _.key))
+ }
+ s.asInstanceOf[S with EfficientSplit]
+ }
+
+ def addOne(elem: A): this.type = {
+ RB.insert(tree, elem, null)
+ this
+ }
+
+ def subtractOne(elem: A): this.type = {
+ RB.delete(tree, elem)
+ this
+ }
+
+ def clear(): Unit = RB.clear(tree)
+
+ def contains(elem: A): Boolean = RB.contains(tree, elem)
+
+ def unconstrained: collection.Set[A] = this
+
+ def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = new TreeSetProjection(from, until)
+
+ override protected[this] def className: String = "TreeSet"
+
+ override def size: Int = RB.size(tree)
+ override def knownSize: Int = size
+ override def isEmpty: Boolean = RB.isEmpty(tree)
+
+ override def head: A = RB.minKey(tree).get
+
+ override def last: A = RB.maxKey(tree).get
+
+ override def minAfter(key: A): Option[A] = RB.minKeyAfter(tree, key)
+
+ override def maxBefore(key: A): Option[A] = RB.maxKeyBefore(tree, key)
+
+ override def foreach[U](f: A => U): Unit = RB.foreachKey(tree, f)
+
+
+ /**
+ * A ranged projection of a [[TreeSet]]. Mutations on this set affect the original set and vice versa.
+ *
+ * Only keys between this projection's key range will ever appear as elements of this set, independently of whether
+ * the elements are added through the original set or through this view. That means that if one inserts an element in
+ * a view whose key is outside the view's bounds, calls to `contains` will _not_ consider the newly added element.
+ * Mutations are always reflected in the original set, though.
+ *
+ * @param from the lower bound (inclusive) of this projection wrapped in a `Some`, or `None` if there is no lower
+ * bound.
+ * @param until the upper bound (exclusive) of this projection wrapped in a `Some`, or `None` if there is no upper
+ * bound.
+ */
+ private[this] final class TreeSetProjection(from: Option[A], until: Option[A]) extends TreeSet[A](tree) {
+
+ /**
+ * Given a possible new lower bound, chooses and returns the most constraining one (the maximum).
+ */
+ private[this] def pickLowerBound(newFrom: Option[A]): Option[A] = (from, newFrom) match {
+ case (Some(fr), Some(newFr)) => Some(ordering.max(fr, newFr))
+ case (None, _) => newFrom
+ case _ => from
+ }
+
+ /**
+ * Given a possible new upper bound, chooses and returns the most constraining one (the minimum).
+ */
+ private[this] def pickUpperBound(newUntil: Option[A]): Option[A] = (until, newUntil) match {
+ case (Some(unt), Some(newUnt)) => Some(ordering.min(unt, newUnt))
+ case (None, _) => newUntil
+ case _ => until
+ }
+
+ /**
+ * Returns true if the argument is inside the view bounds (between `from` and `until`).
+ */
+ private[this] def isInsideViewBounds(key: A): Boolean = {
+ val afterFrom = from.isEmpty || ordering.compare(from.get, key) <= 0
+ val beforeUntil = until.isEmpty || ordering.compare(key, until.get) < 0
+ afterFrom && beforeUntil
+ }
+
+ override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] =
+ new TreeSetProjection(pickLowerBound(from), pickUpperBound(until))
+
+ override def contains(key: A) = isInsideViewBounds(key) && RB.contains(tree, key)
+
+ override def iterator = RB.keysIterator(tree, from, until)
+ override def iteratorFrom(start: A) = RB.keysIterator(tree, pickLowerBound(Some(start)), until)
+
+ override def size = if (RB.size(tree) == 0) 0 else iterator.length
+ override def knownSize: Int = if (RB.size(tree) == 0) 0 else -1
+ override def isEmpty: Boolean = RB.size(tree) == 0 || !iterator.hasNext
+
+ override def head: A = headOption.get
+ override def headOption: Option[A] = {
+ val elem = if (from.isDefined) RB.minKeyAfter(tree, from.get) else RB.minKey(tree)
+ (elem, until) match {
+ case (Some(e), Some(unt)) if ordering.compare(e, unt) >= 0 => None
+ case _ => elem
+ }
+ }
+
+ override def last: A = lastOption.get
+ override def lastOption = {
+ val elem = if (until.isDefined) RB.maxKeyBefore(tree, until.get) else RB.maxKey(tree)
+ (elem, from) match {
+ case (Some(e), Some(fr)) if ordering.compare(e, fr) < 0 => None
+ case _ => elem
+ }
+ }
+
+ // Using the iterator should be efficient enough; if performance is deemed a problem later, a specialized
+ // `foreachKey(f, from, until)` method can be created in `RedBlackTree`. See
+ // https://github.com/scala/scala/pull/4608#discussion_r34307985 for a discussion about this.
+ override def foreach[U](f: A => U): Unit = iterator.foreach(f)
+
+ override def clone(): mutable.TreeSet[A] = super.clone().rangeImpl(from, until)
+
+ }
+
+}
+
+/**
+ * $factoryInfo
+ * @define Coll `mutable.TreeSet`
+ * @define coll mutable tree set
+ */
+@SerialVersionUID(3L)
+object TreeSet extends SortedIterableFactory[TreeSet] {
+
+ def empty[sealed A : Ordering]: TreeSet[A] = new TreeSet[A]()
+
+ def from[sealed E](it: IterableOnce[E]^)(implicit ordering: Ordering[E]): TreeSet[E] =
+ it match {
+ case ts: TreeSet[E] if ordering == ts.ordering =>
+ new TreeSet[E](ts.tree.treeCopy())
+ case ss: scala.collection.SortedSet[E] if ordering == ss.ordering =>
+ new TreeSet[E](RB.fromOrderedKeys(ss.iterator, ss.size))
+ case r: Range if (ordering eq Ordering.Int) || (ordering eq Ordering.Int.reverse) =>
+ val it = if((ordering eq Ordering.Int) == (r.step > 0)) r.iterator else r.reverseIterator
+ new TreeSet[E](RB.fromOrderedKeys(it.asInstanceOf[Iterator[E]], r.size))
+ case _ =>
+ val t: RB.Tree[E, Null] = RB.Tree.empty
+ val i = it.iterator
+ while (i.hasNext) RB.insert(t, i.next(), null)
+ new TreeSet[E](t)
+ }
+
+ def newBuilder[sealed A](implicit ordering: Ordering[A]): Builder[A, TreeSet[A]] = new ReusableBuilder[A, TreeSet[A]] {
+ private[this] var tree: RB.Tree[A, Null] = RB.Tree.empty
+ def addOne(elem: A): this.type = { RB.insert(tree, elem, null); this }
+ def result(): TreeSet[A] = new TreeSet[A](tree)
+ def clear(): Unit = { tree = RB.Tree.empty }
+ }
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala b/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala
new file mode 100644
index 000000000000..2015b76a31b8
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/UnrolledBuffer.scala
@@ -0,0 +1,443 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+package mutable
+
+import scala.annotation.tailrec
+import scala.collection.generic.DefaultSerializable
+import scala.reflect.ClassTag
+import scala.collection.immutable.Nil
+import language.experimental.captureChecking
+
+/** A buffer that stores elements in an unrolled linked list.
+ *
+ * Unrolled linked lists store elements in linked fixed size
+ * arrays.
+ *
+ * Unrolled buffers retain locality and low memory overhead
+ * properties of array buffers, but offer much more efficient
+ * element addition, since they never reallocate and copy the
+ * internal array.
+ *
+ * However, they provide `O(n/m)` complexity random access,
+ * where `n` is the number of elements, and `m` the size of
+ * internal array chunks.
+ *
+ * Ideal to use when:
+ * - elements are added to the buffer and then all of the
+ * elements are traversed sequentially
+ * - two unrolled buffers need to be concatenated (see `concat`)
+ *
+ * Better than singly linked lists for random access, but
+ * should still be avoided for such a purpose.
+ *
+ * @define coll unrolled buffer
+ * @define Coll `UnrolledBuffer`
+ *
+ */
+@SerialVersionUID(3L)
+sealed class UnrolledBuffer[sealed T](implicit val tag: ClassTag[T])
+ extends AbstractBuffer[T]
+ with Buffer[T]
+ with Seq[T]
+ with SeqOps[T, UnrolledBuffer, UnrolledBuffer[T]]
+ with StrictOptimizedSeqOps[T, UnrolledBuffer, UnrolledBuffer[T]]
+ with EvidenceIterableFactoryDefaults[T, UnrolledBuffer, ClassTag]
+ with Builder[T, UnrolledBuffer[T]]
+ with DefaultSerializable {
+
+ import UnrolledBuffer.Unrolled
+
+ @transient private var headptr = newUnrolled
+ @transient private var lastptr = headptr
+ @transient private var sz = 0
+
+ private[collection] def headPtr = headptr
+ private[collection] def headPtr_=(head: Unrolled[T]) = headptr = head
+ private[collection] def lastPtr = lastptr
+ private[collection] def lastPtr_=(last: Unrolled[T]) = lastptr = last
+ private[collection] def size_=(s: Int) = sz = s
+
+ protected def evidenceIterableFactory: UnrolledBuffer.type = UnrolledBuffer
+ protected def iterableEvidence: ClassTag[T] = tag
+
+ override def iterableFactory: SeqFactory[UnrolledBuffer] = UnrolledBuffer.untagged
+
+ protected def newUnrolled = new Unrolled[T](this)
+
+ // The below would allow more flexible behavior without requiring inheritance
+ // that is risky because all the important internals are private.
+ // private var myLengthPolicy: Int => Int = x => x
+ //
+ // /** Specifies how the array lengths should vary.
+ // *
+ // * By default, `UnrolledBuffer` uses arrays of a fixed size. A length
+ // * policy can be given that changes this scheme to, for instance, an
+ // * exponential growth.
+ // *
+ // * @param nextLength computes the length of the next array from the length of the latest one
+ // */
+ // def setLengthPolicy(nextLength: Int => Int): Unit = { myLengthPolicy = nextLength }
+ private[collection] def calcNextLength(sz: Int) = sz // myLengthPolicy(sz)
+
+ def classTagCompanion = UnrolledBuffer
+
+ /** Concatenates the target unrolled buffer to this unrolled buffer.
+ *
+ * The specified buffer `that` is cleared after this operation. This is
+ * an O(1) operation.
+ *
+ * @param that the unrolled buffer whose elements are added to this buffer
+ */
+ def concat(that: UnrolledBuffer[T]) = {
+ // bind the two together
+ if (!lastptr.bind(that.headptr)) lastptr = that.lastPtr
+
+ // update size
+ sz += that.sz
+
+ // `that` is no longer usable, so clear it
+ // here we rely on the fact that `clear` allocates
+ // new nodes instead of modifying the previous ones
+ that.clear()
+
+ // return a reference to this
+ this
+ }
+
+ def addOne(elem: T) = {
+ lastptr = lastptr.append(elem)
+ sz += 1
+ this
+ }
+
+ def clear(): Unit = {
+ headptr = newUnrolled
+ lastptr = headptr
+ sz = 0
+ }
+
+ def iterator: Iterator[T] = new AbstractIterator[T] {
+ var pos: Int = -1
+ var node: Unrolled[T] = headptr
+ scan()
+
+ private def scan(): Unit = {
+ pos += 1
+ while (pos >= node.size) {
+ pos = 0
+ node = node.next
+ if (node eq null) return
+ }
+ }
+ def hasNext = node ne null
+ def next() = if (hasNext) {
+ val r = node.array(pos)
+ scan()
+ r
+ } else Iterator.empty.next()
+ }
+
+ // this should be faster than the iterator
+ override def foreach[U](f: T => U) = headptr.foreach(f)
+
+ def result() = this
+
+ def length = sz
+
+ override def knownSize: Int = sz
+
+ def apply(idx: Int) =
+ if (idx >= 0 && idx < sz) headptr(idx)
+ else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})")
+
+ def update(idx: Int, newelem: T) =
+ if (idx >= 0 && idx < sz) headptr(idx) = newelem
+ else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})")
+
+ def mapInPlace(f: T => T): this.type = {
+ headptr.mapInPlace(f)
+ this
+ }
+
+ def remove(idx: Int) =
+ if (idx >= 0 && idx < sz) {
+ sz -= 1
+ headptr.remove(idx, this)
+ } else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})")
+
+ @tailrec final def remove(idx: Int, count: Int): Unit =
+ if (count > 0) {
+ remove(idx)
+ remove(idx, count-1)
+ }
+
+ def prepend(elem: T) = {
+ headptr = headptr prepend elem
+ sz += 1
+ this
+ }
+
+ def insert(idx: Int, elem: T): Unit =
+ insertAll(idx, elem :: Nil)
+
+ def insertAll(idx: Int, elems: IterableOnce[T]^): Unit =
+ if (idx >= 0 && idx <= sz) {
+ sz += headptr.insertAll(idx, elems, this)
+ } else throw new IndexOutOfBoundsException(s"$idx is out of bounds (min 0, max ${sz-1})")
+
+ override def subtractOne(elem: T): this.type = {
+ if (headptr.subtractOne(elem, this)) {
+ sz -= 1
+ }
+ this
+ }
+
+ def patchInPlace(from: Int, patch: collection.IterableOnce[T]^, replaced: Int): this.type = {
+ remove(from, replaced)
+ insertAll(from, patch)
+ this
+ }
+
+ private def writeObject(out: java.io.ObjectOutputStream): Unit = {
+ out.defaultWriteObject
+ out writeInt sz
+ for (elem <- this) out writeObject elem
+ }
+
+ private def readObject(in: java.io.ObjectInputStream): Unit = {
+ in.defaultReadObject
+
+ val num = in.readInt
+
+ headPtr = newUnrolled
+ lastPtr = headPtr
+ sz = 0
+ var i = 0
+ while (i < num) {
+ this += in.readObject.asInstanceOf[T]
+ i += 1
+ }
+ }
+
+ override def clone(): UnrolledBuffer[T] = new UnrolledBuffer[T] ++= this
+
+ override protected[this] def className = "UnrolledBuffer"
+}
+
+
+@SerialVersionUID(3L)
+object UnrolledBuffer extends StrictOptimizedClassTagSeqFactory[UnrolledBuffer] { self =>
+
+ val untagged: SeqFactory[UnrolledBuffer] = new ClassTagSeqFactory.AnySeqDelegate(self)
+
+ def empty[sealed A : ClassTag]: UnrolledBuffer[A] = new UnrolledBuffer[A]
+
+ def from[sealed A : ClassTag](source: scala.collection.IterableOnce[A]^): UnrolledBuffer[A] = newBuilder[A].addAll(source)
+
+ def newBuilder[sealed A : ClassTag]: UnrolledBuffer[A] = new UnrolledBuffer[A]
+
+ final val waterline: Int = 50
+
+ final def waterlineDenom: Int = 100
+
+ @deprecated("Use waterlineDenom instead.", "2.13.0")
+ final val waterlineDelim: Int = waterlineDenom
+
+ private[collection] val unrolledlength = 32
+
+ /** Unrolled buffer node.
+ */
+ class Unrolled[sealed T: ClassTag] private[collection] (var size: Int, var array: Array[T], var next: Unrolled[T], val buff: UnrolledBuffer[T] = null) {
+ private[collection] def this() = this(0, new Array[T](unrolledlength), null, null)
+ private[collection] def this(b: UnrolledBuffer[T]) = this(0, new Array[T](unrolledlength), null, b)
+
+ private def nextlength = if (buff eq null) unrolledlength else buff.calcNextLength(array.length)
+
+ // adds and returns itself or the new unrolled if full
+ @tailrec final def append(elem: T): Unrolled[T] = if (size < array.length) {
+ array(size) = elem
+ size += 1
+ this
+ } else {
+ next = new Unrolled[T](0, new Array[T](nextlength), null, buff)
+ next append elem
+ }
+ def foreach[U](f: T => U): Unit = {
+ var unrolled = this
+ var i = 0
+ while (unrolled ne null) {
+ val chunkarr = unrolled.array
+ val chunksz = unrolled.size
+ while (i < chunksz) {
+ val elem = chunkarr(i)
+ f(elem)
+ i += 1
+ }
+ i = 0
+ unrolled = unrolled.next
+ }
+ }
+ def mapInPlace(f: T => T): Unit = {
+ var unrolled = this
+ var i = 0
+ while (unrolled ne null) {
+ val chunkarr = unrolled.array
+ val chunksz = unrolled.size
+ while (i < chunksz) {
+ val elem = chunkarr(i)
+ chunkarr(i) = f(elem)
+ i += 1
+ }
+ i = 0
+ unrolled = unrolled.next
+ }
+ }
+ @tailrec final def apply(idx: Int): T =
+ if (idx < size) array(idx) else next.apply(idx - size)
+ @tailrec final def update(idx: Int, newelem: T): Unit =
+ if (idx < size) array(idx) = newelem else next.update(idx - size, newelem)
+ @tailrec final def locate(idx: Int): Unrolled[T] =
+ if (idx < size) this else next.locate(idx - size)
+ def prepend(elem: T) = if (size < array.length) {
+ // shift the elements of the array right
+ // then insert the element
+ shiftright()
+ array(0) = elem
+ size += 1
+ this
+ } else {
+ // allocate a new node and store element
+ // then make it point to this
+ val newhead = new Unrolled[T](buff)
+ newhead append elem
+ newhead.next = this
+ newhead
+ }
+ // shifts right assuming enough space
+ private def shiftright(): Unit = {
+ var i = size - 1
+ while (i >= 0) {
+ array(i + 1) = array(i)
+ i -= 1
+ }
+ }
+ // returns pointer to new last if changed
+ @tailrec final def remove(idx: Int, buffer: UnrolledBuffer[T]): T =
+ if (idx < size) {
+ // remove the element
+ // then try to merge with the next bucket
+ val r = array(idx)
+ shiftleft(idx)
+ size -= 1
+ if (tryMergeWithNext()) buffer.lastPtr = this
+ r
+ } else next.remove(idx - size, buffer)
+
+ @tailrec final def subtractOne(elem: T, buffer: UnrolledBuffer[T]): Boolean = {
+ var i = 0
+ while (i < size) {
+ if(array(i) == elem) {
+ remove(i, buffer)
+ return true
+ }
+ i += 1
+ }
+ if(next ne null) next.subtractOne(elem, buffer) else false
+ }
+
+ // shifts left elements after `leftb` (overwrites `leftb`)
+ private def shiftleft(leftb: Int): Unit = {
+ var i = leftb
+ while (i < (size - 1)) {
+ array(i) = array(i + 1)
+ i += 1
+ }
+ nullout(i, i + 1)
+ }
+ protected def tryMergeWithNext() = if (next != null && (size + next.size) < (array.length * waterline / waterlineDenom)) {
+ // copy the next array, then discard the next node
+ Array.copy(next.array, 0, array, size, next.size)
+ size = size + next.size
+ next = next.next
+ if (next eq null) true else false // checks if last node was thrown out
+ } else false
+
+ @tailrec final def insertAll(idx: Int, t: scala.collection.IterableOnce[T]^, buffer: UnrolledBuffer[T]): Int = {
+ if (idx < size) {
+ // divide this node at the appropriate position and insert all into head
+ // update new next
+ val newnextnode = new Unrolled[T](0, new Array(array.length), null, buff)
+ Array.copy(array, idx, newnextnode.array, 0, size - idx)
+ newnextnode.size = size - idx
+ newnextnode.next = next
+
+ // update this
+ nullout(idx, size)
+ size = idx
+ next = null
+
+ // insert everything from iterable to this
+ var curr = this
+ var appended = 0
+ for (elem <- t.iterator) {
+ curr = curr append elem
+ appended += 1
+ }
+ curr.next = newnextnode
+
+ // try to merge the last node of this with the newnextnode and fix tail pointer if needed
+ if (curr.tryMergeWithNext()) buffer.lastPtr = curr
+ else if (newnextnode.next eq null) buffer.lastPtr = newnextnode
+ appended
+ }
+ else if (idx == size || (next eq null)) {
+ var curr = this
+ var appended = 0
+ for (elem <- t.iterator) {
+ curr = curr append elem
+ appended += 1
+ }
+ appended
+ }
+ else next.insertAll(idx - size, t, buffer)
+ }
+
+ private def nullout(from: Int, until: Int): Unit = {
+ var idx = from
+ while (idx < until) {
+ array(idx) = null.asInstanceOf[T] // TODO find a way to assign a default here!!
+ idx += 1
+ }
+ }
+
+ // assumes this is the last node
+ // `thathead` and `thatlast` are head and last node
+ // of the other unrolled list, respectively
+ def bind(thathead: Unrolled[T]) = {
+ assert(next eq null)
+ next = thathead
+ tryMergeWithNext()
+ }
+
+ override def toString: String =
+ array.take(size).mkString("Unrolled@%08x".format(System.identityHashCode(this)) + "[" + size + "/" + array.length + "](", ", ", ")") + " -> " + (if (next ne null) next.toString else "")
+ }
+}
+
+// This is used by scala.collection.parallel.mutable.UnrolledParArrayCombiner:
+// Todo -- revisit whether inheritance is the best way to achieve this functionality
+private[collection] class DoublingUnrolledBuffer[sealed T](implicit t: ClassTag[T]) extends UnrolledBuffer[T]()(t) {
+ override def calcNextLength(sz: Int) = if (sz < 10000) sz * 2 else sz
+ override protected def newUnrolled = new UnrolledBuffer.Unrolled[T](0, new Array[T](4), null, this)
+}
diff --git a/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala b/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala
new file mode 100644
index 000000000000..a9498b7fc69b
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/WeakHashMap.scala
@@ -0,0 +1,56 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+package collection
+package mutable
+
+import scala.annotation.nowarn
+import scala.collection.convert.JavaCollectionWrappers.{JMapWrapper, JMapWrapperLike}
+import language.experimental.captureChecking
+
+/** A hash map with references to entries which are weakly reachable. Entries are
+ * removed from this map when the key is no longer (strongly) referenced. This class wraps
+ * `java.util.WeakHashMap`.
+ *
+ * @tparam K type of keys contained in this map
+ * @tparam V type of values associated with the keys
+ *
+ * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#weak-hash-maps "Scala's Collection Library overview"]]
+ * section on `Weak Hash Maps` for more information.
+ *
+ * @define Coll `WeakHashMap`
+ * @define coll weak hash map
+ * @define mayNotTerminateInf
+ * @define willNotTerminateInf
+ */
+@SerialVersionUID(3L)
+class WeakHashMap[sealed K, sealed V] extends JMapWrapper[K, V](new java.util.WeakHashMap)
+ with JMapWrapperLike[K, V, WeakHashMap, WeakHashMap[K, V]]
+ with MapFactoryDefaults[K, V, WeakHashMap, Iterable] {
+ override def empty = new WeakHashMap[K, V]
+ override def mapFactory: MapFactory[WeakHashMap] = WeakHashMap
+ @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""")
+ override protected[this] def stringPrefix = "WeakHashMap"
+}
+
+/** $factoryInfo
+ * @define Coll `WeakHashMap`
+ * @define coll weak hash map
+ */
+@SerialVersionUID(3L)
+object WeakHashMap extends MapFactory[WeakHashMap] {
+ def empty[sealed K, sealed V]: WeakHashMap[K,V] = new WeakHashMap[K, V]
+ def from[sealed K, sealed V](it: collection.IterableOnce[(K, V)]^): WeakHashMap[K,V] = Growable.from(empty[K, V], it)
+ def newBuilder[sealed K, sealed V]: Builder[(K, V), WeakHashMap[K,V]] = new GrowableBuilder(WeakHashMap.empty[K, V])
+}
+
diff --git a/tests/pos-special/stdlib/collection/mutable/package.scala b/tests/pos-special/stdlib/collection/mutable/package.scala
new file mode 100644
index 000000000000..d658ca5bc65a
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/mutable/package.scala
@@ -0,0 +1,42 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala.collection
+import language.experimental.captureChecking
+
+
+package object mutable {
+ @deprecated("Use ArraySeq instead of WrappedArray; it can represent both, boxed and unboxed arrays", "2.13.0")
+ type WrappedArray[X] = ArraySeq[X]
+ @deprecated("Use ArraySeq instead of WrappedArray; it can represent both, boxed and unboxed arrays", "2.13.0")
+ val WrappedArray = ArraySeq
+ @deprecated("Use Iterable instead of Traversable", "2.13.0")
+ type Traversable[X] = Iterable[X]
+ @deprecated("Use Iterable instead of Traversable", "2.13.0")
+ val Traversable = Iterable
+ @deprecated("Use Stack instead of ArrayStack; it now uses an array-based implementation", "2.13.0")
+ type ArrayStack[X] = Stack[X]
+ @deprecated("Use Stack instead of ArrayStack; it now uses an array-based implementation", "2.13.0")
+ val ArrayStack = Stack
+
+ @deprecated("mutable.LinearSeq has been removed; use LinearSeq with mutable.Seq instead", "2.13.0")
+ type LinearSeq[X] = Seq[X] with scala.collection.LinearSeq[X]
+
+ @deprecated("GrowingBuilder has been renamed to GrowableBuilder", "2.13.0")
+ type GrowingBuilder[Elem, To <: Growable[Elem]] = GrowableBuilder[Elem, To]
+
+ @deprecated("IndexedOptimizedSeq has been renamed to IndexedSeq", "2.13.0")
+ type IndexedOptimizedSeq[A] = IndexedSeq[A]
+
+ @deprecated("IndexedOptimizedBuffer has been renamed to IndexedBuffer", "2.13.0")
+ type IndexedOptimizedBuffer[A] = IndexedBuffer[A]
+}
diff --git a/tests/pos-special/stdlib/collection/package.scala b/tests/pos-special/stdlib/collection/package.scala
new file mode 100644
index 000000000000..ad4686be1fb2
--- /dev/null
+++ b/tests/pos-special/stdlib/collection/package.scala
@@ -0,0 +1,81 @@
+/*
+ * Scala (https://www.scala-lang.org)
+ *
+ * Copyright EPFL and Lightbend, Inc.
+ *
+ * Licensed under Apache License 2.0
+ * (http://www.apache.org/licenses/LICENSE-2.0).
+ *
+ * See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.
+ */
+
+package scala
+import language.experimental.captureChecking
+
+package object collection {
+ @deprecated("Use Iterable instead of Traversable", "2.13.0")
+ type Traversable[+X] = Iterable[X]
+ @deprecated("Use Iterable instead of Traversable", "2.13.0")
+ val Traversable = Iterable
+ @deprecated("Use IterableOnce instead of TraversableOnce", "2.13.0")
+ type TraversableOnce[+X] = IterableOnce[X]
+ @deprecated("Use IterableOnce instead of TraversableOnce", "2.13.0")
+ val TraversableOnce = IterableOnce
+ @deprecated("Use SeqOps instead of SeqLike", "2.13.0")
+ type SeqLike[A, T] = SeqOps[A, Seq, T]
+ @deprecated("Use SeqOps (for the methods) or IndexedSeqOps (for fast indexed access) instead of ArrayLike", "2.13.0")
+ type ArrayLike[A] = SeqOps[A, Seq, Seq[A]]
+
+ @deprecated("Gen* collection types have been removed", "2.13.0")
+ type GenTraversableOnce[+X] = IterableOnce[X]
+ @deprecated("Gen* collection types have been removed", "2.13.0")
+ val GenTraversableOnce = IterableOnce
+ @deprecated("Gen* collection types have been removed", "2.13.0")
+ type GenTraversable[+X] = Iterable[X]
+ @deprecated("Gen* collection types have been removed", "2.13.0")
+ val GenTraversable = Iterable
+ @deprecated("Gen* collection types have been removed", "2.13.0")
+ type GenIterable[+X] = Iterable[X]
+ @deprecated("Gen* collection types have been removed", "2.13.0")
+ val GenIterable = Iterable
+ @deprecated("Gen* collection types have been removed", "2.13.0")
+ type GenSeq[+X] = Seq[X]
+ @deprecated("Gen* collection types have been removed", "2.13.0")
+ val GenSeq = Seq
+ @deprecated("Gen* collection types have been removed", "2.13.0")
+ type GenSet[X] = Set[X]
+ @deprecated("Gen* collection types have been removed", "2.13.0")
+ val GenSet = Set
+ @deprecated("Gen* collection types have been removed", "2.13.0")
+ type GenMap[K, +V] = Map[K, V]
+ @deprecated("Gen* collection types have been removed", "2.13.0")
+ val GenMap = Map
+
+ /** Needed to circumvent a difficulty between dotty and scalac concerning
+ * the right top type for a type parameter of kind * -> *.
+ * In Scalac, we can provide `Any`, as `Any` is kind-polymorphic. In dotty this is not allowed.
+ * In dotty, we can provide `[X] => Any`. But Scalac does not know lambda syntax.
+ */
+ private[scala] type AnyConstr[X] = Any
+
+ /** An extractor used to head/tail deconstruct sequences. */
+ object +: {
+ /** Splits a sequence into head +: tail.
+ * @return Some((head, tail)) if sequence is non-empty. None otherwise.
+ */
+ def unapply[A, CC[_] <: Seq[_], C <: SeqOps[A, CC, C]](t: C with SeqOps[A, CC, C]): Option[(A, C)] =
+ if(t.isEmpty) None
+ else Some(t.head -> t.tail)
+ }
+
+ /** An extractor used to init/last deconstruct sequences. */
+ object :+ {
+ /** Splits a sequence into init :+ last.
+ * @return Some((init, last)) if sequence is non-empty. None otherwise.
+ */
+ def unapply[A, CC[_] <: Seq[_], C <: SeqOps[A, CC, C]](t: C with SeqOps[A, CC, C]): Option[(C, A)] =
+ if(t.isEmpty) None
+ else Some(t.init -> t.last)
+ }
+}
diff --git a/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala b/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala
index 71d007370fe7..1af7e5dd705a 100644
--- a/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala
+++ b/tests/pos-with-compiler-cc/backend/jvm/GenBCode.scala
@@ -26,7 +26,8 @@ import Decorators.em
import java.io.DataOutputStream
import java.nio.channels.ClosedByInterruptException
-import dotty.tools.tasty.{ TastyBuffer, TastyHeaderUnpickler }
+import dotty.tools.tasty.{ TastyBuffer, TastyHeaderUnpickler, UnpicklerConfig }
+import dotty.tools.tasty.core.TastyUnpickler
import scala.tools.asm
import scala.tools.asm.Handle
@@ -285,7 +286,7 @@ class GenBCodePipeline(val int: DottyBackendInterface, val primitives: DottyPrim
throw ex
finally outstream.close()
- val uuid = new TastyHeaderUnpickler(binary()).readHeader()
+ val uuid = new TastyHeaderUnpickler(TastyUnpickler.scala3CompilerConfig, binary()).readHeader()
val lo = uuid.getMostSignificantBits
val hi = uuid.getLeastSignificantBits
diff --git a/tests/pos/typeclasses.scala b/tests/pos/typeclasses.scala
new file mode 100644
index 000000000000..07fe5a31ce5d
--- /dev/null
+++ b/tests/pos/typeclasses.scala
@@ -0,0 +1,198 @@
+class Common:
+
+ // this should go in Predef
+ infix type at [A <: { type This}, B] = A { type This = B }
+
+ trait Ord:
+ type This
+ extension (x: This)
+ def compareTo(y: This): Int
+ def < (y: This): Boolean = compareTo(y) < 0
+ def > (y: This): Boolean = compareTo(y) > 0
+
+ trait SemiGroup:
+ type This
+ extension (x: This) def combine(y: This): This
+
+ trait Monoid extends SemiGroup:
+ def unit: This
+
+ trait Functor:
+ type This[A]
+ extension [A](x: This[A]) def map[B](f: A => B): This[B]
+
+ trait Monad extends Functor:
+ def pure[A](x: A): This[A]
+ extension [A](x: This[A])
+ def flatMap[B](f: A => This[B]): This[B]
+ def map[B](f: A => B) = x.flatMap(f `andThen` pure)
+end Common
+
+
+object Instances extends Common:
+
+/*
+ instance Int: Ord as intOrd with
+ extension (x: Int)
+ def compareTo(y: Int) =
+ if x < y then -1
+ else if x > y then +1
+ else 0
+*/
+ given intOrd: Ord with
+ type This = Int
+ extension (x: Int)
+ def compareTo(y: Int) =
+ if x < y then -1
+ else if x > y then +1
+ else 0
+/*
+ instance List[T: Ord]: Ord as listOrd with
+ extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match
+ case (Nil, Nil) => 0
+ case (Nil, _) => -1
+ case (_, Nil) => +1
+ case (x :: xs1, y :: ys1) =>
+ val fst = x.compareTo(y)
+ if (fst != 0) fst else xs1.compareTo(ys1)
+*/
+
+ // Proposed short syntax:
+ // given listOrd[T: Ord as ord]: Ord at T with
+ given listOrd[T](using ord: Ord { type This = T}): Ord with
+ type This = List[T]
+ extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match
+ case (Nil, Nil) => 0
+ case (Nil, _) => -1
+ case (_, Nil) => +1
+ case (x :: xs1, y :: ys1) =>
+ val fst = x.compareTo(y)
+ if (fst != 0) fst else xs1.compareTo(ys1)
+ end listOrd
+
+/*
+ instance List: Monad as listMonad with
+ extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] =
+ xs.flatMap(f)
+ def pure[A](x: A): List[A] =
+ List(x)
+*/
+
+ given listMonad: Monad with
+ type This[A] = List[A]
+ extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] =
+ xs.flatMap(f)
+ def pure[A](x: A): List[A] =
+ List(x)
+
+/*
+ type Reader[Ctx] = X =>> Ctx => X
+ instance Reader[Ctx: _]: Monad as readerMonad with
+ extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B =
+ ctx => f(r(ctx))(ctx)
+ def pure[A](x: A): Ctx => A =
+ ctx => x
+*/
+
+ given readerMonad[Ctx]: Monad with
+ type This[X] = Ctx => X
+ extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B =
+ ctx => f(r(ctx))(ctx)
+ def pure[A](x: A): Ctx => A =
+ ctx => x
+
+ extension (xs: Seq[String])
+ def longestStrings: Seq[String] =
+ val maxLength = xs.map(_.length).max
+ xs.filter(_.length == maxLength)
+
+ extension [T](xs: List[T])
+ def second = xs.tail.head
+ def third = xs.tail.tail.head
+
+ //Proposed short syntax:
+ //extension [M: Monad as m, A](xss: M[M[A]])
+ // def flatten: M[A] =
+ // xs.flatMap(identity)
+
+ extension [M, A](using m: Monad)(xss: m.This[m.This[A]])
+ def flatten: m.This[A] =
+ xss.flatMap(identity)
+
+ // Proposed short syntax:
+ //def maximum[T: Ord](xs: List[T]: T =
+ def maximum[T](xs: List[T])(using Ord at T): T =
+ xs.reduceLeft((x, y) => if (x < y) y else x)
+
+ // Proposed short syntax:
+ // def descending[T: Ord as asc]: Ord at T = new Ord:
+ def descending[T](using asc: Ord at T): Ord at T = new Ord:
+ type This = T
+ extension (x: T) def compareTo(y: T) = asc.compareTo(y)(x)
+
+ // Proposed short syntax:
+ // def minimum[T: Ord](xs: List[T]) =
+ def minimum[T](xs: List[T])(using Ord at T) =
+ maximum(xs)(using descending)
+
+ def test(): Unit =
+ val xs = List(1, 2, 3)
+ println(maximum(xs))
+ println(maximum(xs)(using descending))
+ println(maximum(xs)(using descending(using intOrd)))
+ println(minimum(xs))
+
+// Adapted from the Rust by Example book: https://doc.rust-lang.org/rust-by-example/trait.html
+//
+// lines words chars
+// wc Scala: 30 115 853
+// wc Rust : 57 193 1466
+trait Animal:
+ type This
+ // Associated function signature; `This` refers to the implementor type.
+ def apply(name: String): This
+
+ // Method signatures; these will return a string.
+ extension (self: This)
+ def name: String
+ def noise: String
+ def talk(): Unit = println(s"$name, $noise")
+end Animal
+
+class Sheep(val name: String):
+ var isNaked = false
+ def shear() =
+ if isNaked then
+ println(s"$name is already naked...")
+ else
+ println(s"$name gets a haircut!")
+ isNaked = true
+
+/*
+instance Sheep: Animal with
+ def apply(name: String) = Sheep(name)
+ extension (self: This)
+ def name: String = self.name
+ def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!"
+ override def talk(): Unit =
+ println(s"$name pauses briefly... $noise")
+*/
+
+// Implement the `Animal` trait for `Sheep`.
+given Animal with
+ type This = Sheep
+ def apply(name: String) = Sheep(name)
+ extension (self: This)
+ def name: String = self.name
+ def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!"
+ override def talk(): Unit =
+ println(s"$name pauses briefly... $noise")
+
+/*
+
+ - In a type pattern, A <: T, A >: T, A: T, A: _ are all allowed and mean
+ T is a fresh type variable (T can start with a capital letter).
+ - instance definitions
+ - `as m` syntax in context bounds and instance definitions
+
+*/
diff --git a/tests/pos/with-type-operator-future-migration.scala b/tests/pos/with-type-operator-future-migration.scala
new file mode 100644
index 000000000000..d6fe5205fd3d
--- /dev/null
+++ b/tests/pos/with-type-operator-future-migration.scala
@@ -0,0 +1,3 @@
+import scala.language.`future-migration`
+
+def foo: Int with String = ??? // warn
diff --git a/tests/pos/with-type-operator.scala b/tests/pos/with-type-operator.scala
new file mode 100644
index 000000000000..d1fa5e2c34b7
--- /dev/null
+++ b/tests/pos/with-type-operator.scala
@@ -0,0 +1,3 @@
+//> using options -Werror
+
+def foo: Int with String = ??? // warn
diff --git a/tests/rewrites/rewrites3x.check b/tests/rewrites/rewrites3x.check
new file mode 100644
index 000000000000..0e7e0193bdd3
--- /dev/null
+++ b/tests/rewrites/rewrites3x.check
@@ -0,0 +1,10 @@
+import scala.{collection as coll, runtime as _, *}
+import coll.*
+
+def f(xs: Int*) = xs.sum
+def test =
+ f(List(1, 2, 3)*)
+
+def g = { implicit (x: Int) =>
+ x + 1
+}
diff --git a/tests/rewrites/with-type-operator.check b/tests/rewrites/with-type-operator.check
new file mode 100644
index 000000000000..6d59e0eacb95
--- /dev/null
+++ b/tests/rewrites/with-type-operator.check
@@ -0,0 +1 @@
+def foo: Int & String = ???
diff --git a/tests/rewrites/with-type-operator.scala b/tests/rewrites/with-type-operator.scala
new file mode 100644
index 000000000000..6dbd8ded14ee
--- /dev/null
+++ b/tests/rewrites/with-type-operator.scala
@@ -0,0 +1 @@
+def foo: Int with String = ???
diff --git a/tests/run/Pouring.check b/tests/run/Pouring.check
index f07f29105c0b..c9ab84a226bb 100644
--- a/tests/run/Pouring.check
+++ b/tests/run/Pouring.check
@@ -1,2 +1 @@
-Vector(Empty(0), Empty(1), Fill(0), Fill(1), Pour(0,1), Pour(1,0))
-Fill(1) Pour(1,0) Empty(0) Pour(1,0) Fill(1) Pour(1,0) --> Vector(4, 6)
+Illegal command line: more arguments expected
diff --git a/tests/run/Pouring.scala b/tests/run/Pouring.scala
index 6f4611af8bfc..5bb2a92ff8e3 100644
--- a/tests/run/Pouring.scala
+++ b/tests/run/Pouring.scala
@@ -1,37 +1,35 @@
-class Pouring(capacity: Vector[Int]):
- type Glass = Int
- type Content = Vector[Int]
+type Glass = Int
+type Levels = Vector[Int]
- enum Move:
- def apply(content: Content): Content = this match
- case Empty(g) => content.updated(g, 0)
- case Fill(g) => content.updated(g, capacity(g))
- case Pour(from, to) =>
- val amount = content(from) min (capacity(to) - content(to))
- extension (s: Content) def adjust(g: Glass, delta: Int) = s.updated(g, s(g) + delta)
- content.adjust(from, -amount).adjust(to, amount)
+class Pouring(capacity: Levels):
+ enum Move:
case Empty(glass: Glass)
case Fill(glass: Glass)
case Pour(from: Glass, to: Glass)
+
+ def apply(levels: Levels): Levels = this match
+ case Empty(glass) =>
+ levels.updated(glass, 0)
+ case Fill(glass) =>
+ levels.updated(glass, capacity(glass))
+ case Pour(from, to) =>
+ val amount = levels(from) min (capacity(to) - levels(to))
+ levels.updated(from, levels(from) - amount)
+ .updated(to, levels(to) + amount)
end Move
+ val glasses = 0 until capacity.length
val moves =
- val glasses = 0 until capacity.length
-
- (for g <- glasses yield Move.Empty(g))
+ (for g <- glasses yield Move.Empty(g))
++ (for g <- glasses yield Move.Fill(g))
++ (for g1 <- glasses; g2 <- glasses if g1 != g2 yield Move.Pour(g1, g2))
- class Path(history: List[Move], val endContent: Content):
+ class Path(history: List[Move], val endContent: Levels):
def extend(move: Move) = Path(move :: history, move(endContent))
override def toString = s"${history.reverse.mkString(" ")} --> $endContent"
- end Path
-
- val initialContent: Content = capacity.map(x => 0)
- val initialPath = Path(Nil, initialContent)
- def from(paths: Set[Path], explored: Set[Content]): LazyList[Set[Path]] =
+ def from(paths: Set[Path], explored: Set[Levels]): LazyList[Set[Path]] =
if paths.isEmpty then LazyList.empty
else
val extensions =
@@ -44,6 +42,8 @@ class Pouring(capacity: Vector[Int]):
paths #:: from(extensions, explored ++ extensions.map(_.endContent))
def solutions(target: Int): LazyList[Path] =
+ val initialContent: Levels = capacity.map(_ => 0)
+ val initialPath = Path(Nil, initialContent)
for
paths <- from(Set(initialPath), Set(initialContent))
path <- paths
@@ -51,7 +51,7 @@ class Pouring(capacity: Vector[Int]):
yield path
end Pouring
-@main def Test =
- val problem = Pouring(Vector(4, 7))
- println(problem.moves)
- println(problem.solutions(6).head)
+@main def Test(target: Int, capacities: Int*) =
+ val problem = Pouring(capacities.toVector)
+ println(s"Moves: ${problem.moves}")
+ println(s"Solution: ${problem.solutions(target).headOption}")