From 8a3868e462eb45e798f32b307d5b6ca91e01d645 Mon Sep 17 00:00:00 2001 From: Hamza Remmal Date: Wed, 29 Jan 2025 16:44:06 +0100 Subject: [PATCH] chore: first draft of the stdlib --- project/Build.scala | 7 +- ...Scala2LibraryBootstrappedMiMaFilters.scala | 105 +- project/ScalaLibraryPlugin.scala | 107 ++ .../src/scala/Enumeration.scala | 352 ++++ .../src/scala/collection/IterableOnce.scala | 1516 +++++++++++++++++ .../src/scala/collection/LinearSeq.scala | 314 ++++ .../collection/mutable/PriorityQueue.scala | 416 +++++ .../scala/concurrent/BatchingExecutor.scala | 272 +++ .../src/scala/concurrent/Channel.scala | 61 + .../src/scala/io/Source.scala | 382 +++++ 10 files changed, 3454 insertions(+), 78 deletions(-) create mode 100644 project/ScalaLibraryPlugin.scala create mode 100644 scala2-library-bootstrapped/src/scala/Enumeration.scala create mode 100644 scala2-library-bootstrapped/src/scala/collection/IterableOnce.scala create mode 100644 scala2-library-bootstrapped/src/scala/collection/LinearSeq.scala create mode 100644 scala2-library-bootstrapped/src/scala/collection/mutable/PriorityQueue.scala create mode 100644 scala2-library-bootstrapped/src/scala/concurrent/BatchingExecutor.scala create mode 100644 scala2-library-bootstrapped/src/scala/concurrent/Channel.scala create mode 100644 scala2-library-bootstrapped/src/scala/io/Source.scala diff --git a/project/Build.scala b/project/Build.scala index 463abab3f6fd..31e74841e155 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -20,6 +20,8 @@ import sbt.Package.ManifestAttributes import sbt.PublishBinPlugin.autoImport._ import dotty.tools.sbtplugin.RepublishPlugin import dotty.tools.sbtplugin.RepublishPlugin.autoImport._ +import dotty.tools.sbtplugin.ScalaLibraryPlugin + import sbt.plugins.SbtPlugin import sbt.ScriptedPlugin.autoImport._ import xerial.sbt.Sonatype.autoImport._ @@ -32,6 +34,8 @@ import sbtbuildinfo.BuildInfoPlugin.autoImport._ import sbttastymima.TastyMiMaPlugin import sbttastymima.TastyMiMaPlugin.autoImport._ +import scala.jdk.CollectionConverters._ + import scala.util.Properties.isJavaAtLeast import org.portablescala.sbtplatformdeps.PlatformDepsPlugin.autoImport._ @@ -1207,9 +1211,9 @@ object Build { * This version of the library is not (yet) TASTy/binary compatible with the Scala 2 compiled library. */ lazy val `scala2-library-bootstrapped` = project.in(file("scala2-library-bootstrapped")). + enablePlugins(ScalaLibraryPlugin). withCommonSettings(Bootstrapped). dependsOn(dottyCompiler(Bootstrapped) % "provided; compile->runtime; test->test"). - settings(commonBootstrappedSettings). settings(scala2LibraryBootstrappedSettings). settings(moduleName := "scala2-library") // -Ycheck:all is set in project/scripts/scala2-library-tasty-mima.sh @@ -1221,7 +1225,6 @@ object Build { lazy val `scala2-library-cc` = project.in(file("scala2-library-cc")). withCommonSettings(Bootstrapped). dependsOn(dottyCompiler(Bootstrapped) % "provided; compile->runtime; test->test"). - settings(commonBootstrappedSettings). settings(scala2LibraryBootstrappedSettings). settings( moduleName := "scala2-library-cc", diff --git a/project/Scala2LibraryBootstrappedMiMaFilters.scala b/project/Scala2LibraryBootstrappedMiMaFilters.scala index 102a2a50e9d4..245c987fd705 100644 --- a/project/Scala2LibraryBootstrappedMiMaFilters.scala +++ b/project/Scala2LibraryBootstrappedMiMaFilters.scala @@ -4,82 +4,35 @@ import com.typesafe.tools.mima.core._ object Scala2LibraryBootstrappedMiMaFilters { val BackwardsBreakingChanges: Map[String, Seq[ProblemFilter]] = Map( - Build.stdlibBootstrappedVersion -> { - Seq( - // Files that are not compiled in the bootstrapped library - ProblemFilters.exclude[MissingClassProblem]("scala.AnyVal"), - - // Scala language features - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.language."), - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.language#experimental."), - ProblemFilters.exclude[FinalClassProblem]("scala.language$experimental$"), - ProblemFilters.exclude[FinalClassProblem]("scala.languageFeature$*$"), - - // trait $init$ - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.*.$init$"), - - // Value class extension methods - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.*$extension"), - - // Companion module class - ProblemFilters.exclude[FinalClassProblem]("scala.*$"), - - // Scala 2 intrinsic macros - ProblemFilters.exclude[FinalMethodProblem]("scala.StringContext.s"), - - // Specialization? - ProblemFilters.exclude[MissingFieldProblem]("scala.Tuple1._1"), // field _1 in class scala.Tuple1 does not have a correspondent in current version - ProblemFilters.exclude[MissingFieldProblem]("scala.Tuple2._1"), // field _1 in class scala.Tuple2 does not have a correspondent in current version - ProblemFilters.exclude[MissingFieldProblem]("scala.Tuple2._2"), // field _2 in class scala.Tuple2 does not have a correspondent in current version - - // Scala 2 specialization - ProblemFilters.exclude[MissingClassProblem]("scala.*$sp"), - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.*$sp"), - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.*#*#sp.$init$"), - ProblemFilters.exclude[MissingTypesProblem]("scala.collection.DoubleStepper"), - ProblemFilters.exclude[MissingTypesProblem]("scala.collection.immutable.DoubleVectorStepper"), - ProblemFilters.exclude[MissingTypesProblem]("scala.collection.immutable.IntVectorStepper"), - ProblemFilters.exclude[MissingTypesProblem]("scala.collection.immutable.LongVectorStepper"), - ProblemFilters.exclude[MissingTypesProblem]("scala.collection.IntStepper"), - ProblemFilters.exclude[MissingTypesProblem]("scala.collection.LongStepper"), - ProblemFilters.exclude[MissingTypesProblem]("scala.jdk.DoubleAccumulator"), - ProblemFilters.exclude[MissingTypesProblem]("scala.jdk.FunctionWrappers$*"), - ProblemFilters.exclude[MissingTypesProblem]("scala.jdk.IntAccumulator"), - ProblemFilters.exclude[MissingTypesProblem]("scala.jdk.LongAccumulator"), - ProblemFilters.exclude[FinalClassProblem]("scala.collection.ArrayOps$ReverseIterator"), - ProblemFilters.exclude[FinalClassProblem]("scala.Tuple1"), - ProblemFilters.exclude[FinalClassProblem]("scala.Tuple2"), - - // other - ProblemFilters.exclude[FinalMethodProblem]("scala.Enumeration.ValueOrdering"), - ProblemFilters.exclude[FinalMethodProblem]("scala.Enumeration.ValueSet"), - ProblemFilters.exclude[FinalMethodProblem]("scala.io.Source.NoPositioner"), - ProblemFilters.exclude[FinalMethodProblem]("scala.io.Source.RelaxedPosition"), - ProblemFilters.exclude[FinalMethodProblem]("scala.io.Source.RelaxedPositioner"), - ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.immutable.SortedMapOps.coll"), - ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.immutable.TreeMap.empty"), - ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.immutable.TreeMap.fromSpecific"), - ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.mutable.ArrayBuilder#ofUnit.addAll"), - ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.mutable.TreeMap.empty"), - ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.mutable.TreeMap.fromSpecific"), - ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.reflect.ManifestFactory#NothingManifest.newArray"), - ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.reflect.ManifestFactory#NullManifest.newArray"), - ProblemFilters.exclude[MissingFieldProblem]("scala.collection.ArrayOps#ReverseIterator.xs"), - ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.NonLocalReturnControl.value"), - ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.collection.immutable.SortedMapOps.coll"), - ) ++ - Seq( // DirectMissingMethodProblem - "scala.collection.LinearSeqIterator#LazyCell.this", - "scala.collection.mutable.PriorityQueue#ResizableArrayAccess.this", - "scala.concurrent.BatchingExecutor#AbstractBatch.this", - "scala.concurrent.Channel#LinkedList.this", - "scala.Enumeration#ValueOrdering.this", - "scala.io.Source#RelaxedPosition.this", - "scala.collection.IterableOnceOps#Maximized.this", // New in 2.13.11: private inner class - "scala.util.Properties.", - "scala.util.Sorting.scala$util$Sorting$$mergeSort$default$5", - ).map(ProblemFilters.exclude[DirectMissingMethodProblem]) - } + Build.stdlibBootstrappedVersion -> Seq( + // Scala language features (not really a problem) + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.language."), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.language#experimental."), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.util.Properties."), + + // Companion module class (not really a problem) + ProblemFilters.exclude[FinalClassProblem]("scala.*$"), + ProblemFilters.exclude[FinalMethodProblem]("scala.io.Source.NoPositioner"), + ProblemFilters.exclude[FinalMethodProblem]("scala.io.Source.RelaxedPosition"), + ProblemFilters.exclude[FinalMethodProblem]("scala.io.Source.RelaxedPositioner"), + ProblemFilters.exclude[FinalMethodProblem]("scala.Enumeration.ValueOrdering"), + ProblemFilters.exclude[FinalMethodProblem]("scala.Enumeration.ValueSet"), + ProblemFilters.exclude[FinalMethodProblem]("scala.StringContext.s"), + + // Need to be fixed + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.*.$init$"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.*$extension"), + ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.immutable.SortedMapOps.coll"), + ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.immutable.TreeMap.empty"), + ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.immutable.TreeMap.fromSpecific"), + ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.mutable.ArrayBuilder#ofUnit.addAll"), + ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.mutable.TreeMap.empty"), + ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.collection.mutable.TreeMap.fromSpecific"), + ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.reflect.ManifestFactory#NothingManifest.newArray"), + ProblemFilters.exclude[IncompatibleResultTypeProblem]("scala.reflect.ManifestFactory#NullManifest.newArray"), + ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.collection.immutable.SortedMapOps.coll"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.util.Sorting.scala$util$Sorting$$mergeSort$default$5") + ) ) val ForwardsBreakingChanges: Map[String, Seq[ProblemFilter]] = Map( diff --git a/project/ScalaLibraryPlugin.scala b/project/ScalaLibraryPlugin.scala new file mode 100644 index 000000000000..d06b59200099 --- /dev/null +++ b/project/ScalaLibraryPlugin.scala @@ -0,0 +1,107 @@ +package dotty.tools.sbtplugin + +import sbt.* +import sbt.Keys.* +import scala.jdk.CollectionConverters.* +import java.nio.file.Files + +object ScalaLibraryPlugin extends AutoPlugin { + + override def trigger = noTrigger + + val fetchScala2ClassFiles = taskKey[(Set[File], File)]("Fetch the files to use that were compiled with Scala 2") + //val scala2LibraryVersion = settingKey[String]("Version of the Scala 2 Standard Library") + + override def projectSettings = Seq ( + fetchScala2ClassFiles := { + val stream = streams.value + val cache = stream.cacheDirectory + val target = cache / "scala-library-classes" + val report = update.value + + val scalaLibraryBinaryJar = report.select( + configuration = configurationFilter(), + module = (_: ModuleID).name == "scala-library", + artifact = artifactFilter(`type` = "jar")).headOption.getOrElse { + sys.error(s"Could not fetch scala-library binary JAR") + } + + if (!target.exists()) { + IO.createDirectory(target) + } + + (FileFunction.cached(cache / "fetch-scala-library-classes", FilesInfo.lastModified, FilesInfo.exists) { _ => + stream.log.info(s"Unpacking scala-library binaries to persistent directory: ${target.getAbsolutePath}") + IO.unzip(scalaLibraryBinaryJar, target) + (target ** "*.class").get.toSet + } (Set(scalaLibraryBinaryJar)), target) + + }, + (Compile / compile) := { + val stream = streams.value + val target = (Compile / classDirectory).value + val (files, reference) = fetchScala2ClassFiles.value; + val analysis = (Compile / compile).value + stream.log.info(s"Copying files from Scala 2 Standard Library to $target") + for (file <- files; id <- file.relativeTo(reference).map(_.toString())) { + if (filesToCopy(id)) { + stream.log.debug(s"Copying file '${id}' to ${target / id}") + IO.copyFile(file, target / id) + } + } + + val overwrittenBinaries = Files.walk((Compile / classDirectory).value.toPath()) + .iterator() + .asScala + .map(_.toFile) + .map(_.relativeTo((Compile / classDirectory).value).get) + .toSet + val diff = files.filterNot(_.relativeTo(reference).exists(overwrittenBinaries)) + + IO.copy(diff.map { file => + file -> (Compile / classDirectory).value / file.relativeTo(reference).get.getPath + }) + + analysis + } + ) + + private lazy val filesToCopy = Set( + "scala/Tuple1.class", + "scala/Tuple2.class", + "scala/collection/DoubleStepper.class", + "scala/collection/IntStepper.class", + "scala/collection/LongStepper.class", + "scala/collection/immutable/DoubleVectorStepper.class", + "scala/collection/immutable/IntVectorStepper.class", + "scala/collection/immutable/LongVectorStepper.class", + "scala/jdk/DoubleAccumulator.class", + "scala/jdk/IntAccumulator.class", + "scala/jdk/LongAccumulator.class", + "scala/jdk/FunctionWrappers$FromJavaDoubleBinaryOperator.class", + "scala/jdk/FunctionWrappers$FromJavaBooleanSupplier.class", + "scala/jdk/FunctionWrappers$FromJavaDoubleConsumer.class", + "scala/jdk/FunctionWrappers$FromJavaDoublePredicate.class", + "scala/jdk/FunctionWrappers$FromJavaDoubleSupplier.class", + "scala/jdk/FunctionWrappers$FromJavaDoubleToIntFunction.class", + "scala/jdk/FunctionWrappers$FromJavaDoubleToLongFunction.class", + "scala/jdk/FunctionWrappers$FromJavaIntBinaryOperator.class", + "scala/jdk/FunctionWrappers$FromJavaDoubleUnaryOperator.class", + "scala/jdk/FunctionWrappers$FromJavaIntPredicate.class", + "scala/jdk/FunctionWrappers$FromJavaIntConsumer.class", + "scala/jdk/FunctionWrappers$FromJavaIntSupplier.class", + "scala/jdk/FunctionWrappers$FromJavaIntToDoubleFunction.class", + "scala/jdk/FunctionWrappers$FromJavaIntToLongFunction.class", + "scala/jdk/FunctionWrappers$FromJavaIntUnaryOperator.class", + "scala/jdk/FunctionWrappers$FromJavaLongBinaryOperator.class", + "scala/jdk/FunctionWrappers$FromJavaLongConsumer.class", + "scala/jdk/FunctionWrappers$FromJavaLongPredicate.class", + "scala/jdk/FunctionWrappers$FromJavaLongSupplier.class", + "scala/jdk/FunctionWrappers$FromJavaLongToDoubleFunction.class", + "scala/jdk/FunctionWrappers$FromJavaLongToIntFunction.class", + "scala/jdk/FunctionWrappers$FromJavaLongUnaryOperator.class", + "scala/collection/ArrayOps$ReverseIterator.class", + "scala/runtime/NonLocalReturnControl.class", + ) + +} diff --git a/scala2-library-bootstrapped/src/scala/Enumeration.scala b/scala2-library-bootstrapped/src/scala/Enumeration.scala new file mode 100644 index 000000000000..f773f8daf0d9 --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/Enumeration.scala @@ -0,0 +1,352 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala + +import scala.collection.{SpecificIterableFactory, StrictOptimizedIterableOps, View, immutable, mutable} +import java.lang.reflect.{Field => JField, Method => JMethod} + +import scala.annotation.{implicitNotFound, tailrec} +import scala.reflect.NameTransformer._ +import scala.util.matching.Regex +import java.{util => ju} + +/** Defines a finite set of values specific to the enumeration. Typically + * these values enumerate all possible forms something can take and provide + * a lightweight alternative to case classes. + * + * Each call to a `Value` method adds a new unique value to the enumeration. + * To be accessible, these values are usually defined as `val` members of + * the enumeration. + * + * All values in an enumeration share a common, unique type defined as the + * `Value` type member of the enumeration (`Value` selected on the stable + * identifier path of the enumeration instance). + * + * Values SHOULD NOT be added to an enumeration after its construction; + * doing so makes the enumeration thread-unsafe. If values are added to an + * enumeration from multiple threads (in a non-synchronized fashion) after + * construction, the behavior of the enumeration is undefined. + * + * @example {{{ + * // Define a new enumeration with a type alias and work with the full set of enumerated values + * object WeekDay extends Enumeration { + * type WeekDay = Value + * val Mon, Tue, Wed, Thu, Fri, Sat, Sun = Value + * } + * import WeekDay._ + * + * def isWorkingDay(d: WeekDay) = ! (d == Sat || d == Sun) + * + * WeekDay.values filter isWorkingDay foreach println + * // output: + * // Mon + * // Tue + * // Wed + * // Thu + * // Fri + * }}} + * + * @example {{{ + * // Example of adding attributes to an enumeration by extending the Enumeration.Val class + * object Planet extends Enumeration { + * protected case class PlanetVal(mass: Double, radius: Double) extends super.Val { + * def surfaceGravity: Double = Planet.G * mass / (radius * radius) + * def surfaceWeight(otherMass: Double): Double = otherMass * surfaceGravity + * } + * import scala.language.implicitConversions + * implicit def valueToPlanetVal(x: Value): PlanetVal = x.asInstanceOf[PlanetVal] + * + * val G: Double = 6.67300E-11 + * val Mercury = PlanetVal(3.303e+23, 2.4397e6) + * val Venus = PlanetVal(4.869e+24, 6.0518e6) + * val Earth = PlanetVal(5.976e+24, 6.37814e6) + * val Mars = PlanetVal(6.421e+23, 3.3972e6) + * val Jupiter = PlanetVal(1.9e+27, 7.1492e7) + * val Saturn = PlanetVal(5.688e+26, 6.0268e7) + * val Uranus = PlanetVal(8.686e+25, 2.5559e7) + * val Neptune = PlanetVal(1.024e+26, 2.4746e7) + * } + * + * println(Planet.values.filter(_.radius > 7.0e6)) + * // output: + * // Planet.ValueSet(Jupiter, Saturn, Uranus, Neptune) + * }}} + * + * @param initial The initial value from which to count the integers that + * identifies values at run-time. + */ +@SerialVersionUID(8476000850333817230L) +abstract class Enumeration (initial: Int) extends Serializable { + thisenum => + + def this() = this(0) + + /* Note that `readResolve` cannot be private, since otherwise + the JVM does not invoke it when deserializing subclasses. */ + protected def readResolve(): AnyRef = thisenum.getClass.getField(MODULE_INSTANCE_NAME).get(null) + + /** The name of this enumeration. + */ + override def toString: String = + ((getClass.getName stripSuffix MODULE_SUFFIX_STRING split '.').last split + Regex.quote(NAME_JOIN_STRING)).last + + /** The mapping from the integer used to identify values to the actual + * values. */ + private val vmap: mutable.Map[Int, Value] = new mutable.HashMap + + /** The cache listing all values of this enumeration. */ + @transient private var vset: ValueSet = null + @transient @volatile private var vsetDefined = false + + /** The mapping from the integer used to identify values to their + * names. */ + private[this] val nmap: mutable.Map[Int, String] = new mutable.HashMap + + /** The values of this enumeration as a set. + */ + def values: ValueSet = { + if (!vsetDefined) { + vset = (ValueSet.newBuilder ++= vmap.values).result() + vsetDefined = true + } + vset + } + + /** The integer to use to identify the next created value. */ + protected var nextId: Int = initial + + /** The string to use to name the next created value. */ + protected var nextName: Iterator[String] = _ + + private def nextNameOrNull = + if (nextName != null && nextName.hasNext) nextName.next() else null + + /** The highest integer amongst those used to identify values in this + * enumeration. */ + private[this] var topId = initial + + /** The lowest integer amongst those used to identify values in this + * enumeration, but no higher than 0. */ + private[this] var bottomId = if(initial < 0) initial else 0 + + /** The one higher than the highest integer amongst those used to identify + * values in this enumeration. */ + final def maxId = topId + + /** The value of this enumeration with given id `x` + */ + final def apply(x: Int): Value = vmap(x) + + /** Return a `Value` from this `Enumeration` whose name matches + * the argument `s`. The names are determined automatically via reflection. + * + * @param s an `Enumeration` name + * @return the `Value` of this `Enumeration` if its name matches `s` + * @throws NoSuchElementException if no `Value` with a matching + * name is in this `Enumeration` + */ + final def withName(s: String): Value = values.byName.getOrElse(s, + throw new NoSuchElementException(s"No value found for '$s'")) + + /** Creates a fresh value, part of this enumeration. */ + protected final def Value: Value = Value(nextId) + + /** Creates a fresh value, part of this enumeration, identified by the + * integer `i`. + * + * @param i An integer that identifies this value at run-time. It must be + * unique amongst all values of the enumeration. + * @return Fresh value identified by `i`. + */ + protected final def Value(i: Int): Value = Value(i, nextNameOrNull) + + /** Creates a fresh value, part of this enumeration, called `name`. + * + * @param name A human-readable name for that value. + * @return Fresh value called `name`. + */ + protected final def Value(name: String): Value = Value(nextId, name) + + /** Creates a fresh value, part of this enumeration, called `name` + * and identified by the integer `i`. + * + * @param i An integer that identifies this value at run-time. It must be + * unique amongst all values of the enumeration. + * @param name A human-readable name for that value. + * @return Fresh value with the provided identifier `i` and name `name`. + */ + protected final def Value(i: Int, name: String): Value = new Val(i, name) + + private def populateNameMap(): Unit = { + @tailrec def getFields(clazz: Class[_], acc: Array[JField]): Array[JField] = { + if (clazz == null) + acc + else + getFields(clazz.getSuperclass, if (clazz.getDeclaredFields.isEmpty) acc else acc ++ clazz.getDeclaredFields) + } + val fields = getFields(getClass.getSuperclass, getClass.getDeclaredFields) + def isValDef(m: JMethod): Boolean = fields exists (fd => fd.getName == m.getName && fd.getType == m.getReturnType) + + // The list of possible Value methods: 0-args which return a conforming type + val methods: Array[JMethod] = getClass.getMethods filter (m => m.getParameterTypes.isEmpty && + classOf[Value].isAssignableFrom(m.getReturnType) && + m.getDeclaringClass != classOf[Enumeration] && + isValDef(m)) + methods foreach { m => + val name = m.getName + // invoke method to obtain actual `Value` instance + val value = m.invoke(this).asInstanceOf[Value] + // verify that outer points to the correct Enumeration: ticket #3616. + if (value.outerEnum eq thisenum) { + val id: Int = value.id + nmap += ((id, name)) + } + } + } + + /* Obtains the name for the value with id `i`. If no name is cached + * in `nmap`, it populates `nmap` using reflection. + */ + private def nameOf(i: Int): String = synchronized { nmap.getOrElse(i, { populateNameMap() ; nmap(i) }) } + + /** The type of the enumerated values. */ + @SerialVersionUID(7091335633555234129L) + abstract class Value extends Ordered[Value] with Serializable { + /** the id and bit location of this enumeration value */ + def id: Int + /** a marker so we can tell whose values belong to whom come reflective-naming time */ + private[Enumeration] val outerEnum = thisenum + + override def compare(that: Value): Int = + if (this.id < that.id) -1 + else if (this.id == that.id) 0 + else 1 + override def equals(other: Any): Boolean = other match { + case that: Enumeration#Value => (outerEnum eq that.outerEnum) && (id == that.id) + case _ => false + } + override def hashCode: Int = id.## + + /** Create a ValueSet which contains this value and another one */ + def + (v: Value): ValueSet = ValueSet(this, v) + } + + /** A class implementing the [[scala.Enumeration.Value]] type. This class + * can be overridden to change the enumeration's naming and integer + * identification behaviour. + */ + @SerialVersionUID(0 - 3501153230598116017L) + protected class Val(i: Int, name: String) extends Value with Serializable { + def this(i: Int) = this(i, nextNameOrNull) + def this(name: String) = this(nextId, name) + def this() = this(nextId) + + assert(!vmap.isDefinedAt(i), "Duplicate id: " + i) + vmap(i) = this + vsetDefined = false + nextId = i + 1 + if (nextId > topId) topId = nextId + if (i < bottomId) bottomId = i + def id: Int = i + override def toString(): String = + if (name != null) name + else try thisenum.nameOf(i) + catch { case _: NoSuchElementException => "" } + + protected def readResolve(): AnyRef = { + val enumeration = thisenum.readResolve().asInstanceOf[Enumeration] + if (enumeration.vmap == null) this + else enumeration.vmap(i) + } + } + + /** An ordering by id for values of this set */ + implicit object ValueOrdering extends Ordering[Value] { + private val _ = Enumeration.this + def compare(x: Value, y: Value): Int = x compare y + } + + /** A class for sets of values. + * Iterating through this set will yield values in increasing order of their ids. + * + * @param nnIds The set of ids of values (adjusted so that the lowest value does + * not fall below zero), organized as a `BitSet`. + * @define Coll `collection.immutable.SortedSet` + */ + @SerialVersionUID(7229671200427364242L) + class ValueSet private[ValueSet] (private[this] var nnIds: immutable.BitSet) + extends immutable.AbstractSet[Value] + with immutable.SortedSet[Value] + with immutable.SortedSetOps[Value, immutable.SortedSet, ValueSet] + with StrictOptimizedIterableOps[Value, immutable.Set, ValueSet] + with Serializable { + + implicit def ordering: Ordering[Value] = ValueOrdering + def rangeImpl(from: Option[Value], until: Option[Value]): ValueSet = + new ValueSet(nnIds.rangeImpl(from.map(_.id - bottomId), until.map(_.id - bottomId))) + + override def empty: ValueSet = ValueSet.empty + override def knownSize: Int = nnIds.size + override def isEmpty: Boolean = nnIds.isEmpty + def contains(v: Value): Boolean = nnIds contains (v.id - bottomId) + def incl (value: Value): ValueSet = new ValueSet(nnIds + (value.id - bottomId)) + def excl (value: Value): ValueSet = new ValueSet(nnIds - (value.id - bottomId)) + def iterator: Iterator[Value] = nnIds.iterator map (id => thisenum.apply(bottomId + id)) + override def iteratorFrom(start: Value): Iterator[Value] = nnIds iteratorFrom start.id map (id => thisenum.apply(bottomId + id)) + override def className: String = s"$thisenum.ValueSet" + /** Creates a bit mask for the zero-adjusted ids in this set as a + * new array of longs */ + def toBitMask: Array[Long] = nnIds.toBitMask + + override protected def fromSpecific(coll: IterableOnce[Value]): ValueSet = ValueSet.fromSpecific(coll) + override protected def newSpecificBuilder = ValueSet.newBuilder + + def map(f: Value => Value): ValueSet = fromSpecific(new View.Map(this, f)) + def flatMap(f: Value => IterableOnce[Value]): ValueSet = fromSpecific(new View.FlatMap(this, f)) + + // necessary for disambiguation: + override def map[B](f: Value => B)(implicit @implicitNotFound(ValueSet.ordMsg) ev: Ordering[B]): immutable.SortedSet[B] = + super[SortedSet].map[B](f) + override def flatMap[B](f: Value => IterableOnce[B])(implicit @implicitNotFound(ValueSet.ordMsg) ev: Ordering[B]): immutable.SortedSet[B] = + super[SortedSet].flatMap[B](f) + override def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(ValueSet.zipOrdMsg) ev: Ordering[(Value, B)]): immutable.SortedSet[(Value, B)] = + super[SortedSet].zip[B](that) + override def collect[B](pf: PartialFunction[Value, B])(implicit @implicitNotFound(ValueSet.ordMsg) ev: Ordering[B]): immutable.SortedSet[B] = + super[SortedSet].collect[B](pf) + + @transient private[Enumeration] lazy val byName: Map[String, Value] = iterator.map( v => v.toString -> v).toMap + } + + /** A factory object for value sets */ + @SerialVersionUID(3L) + object ValueSet extends SpecificIterableFactory[Value, ValueSet] { + private final val ordMsg = "No implicit Ordering[${B}] found to build a SortedSet[${B}]. You may want to upcast to a Set[Value] first by calling `unsorted`." + private final val zipOrdMsg = "No implicit Ordering[${B}] found to build a SortedSet[(Value, ${B})]. You may want to upcast to a Set[Value] first by calling `unsorted`." + + /** The empty value set */ + val empty: ValueSet = new ValueSet(immutable.BitSet.empty) + /** A value set containing all the values for the zero-adjusted ids + * corresponding to the bits in an array */ + def fromBitMask(elems: Array[Long]): ValueSet = new ValueSet(immutable.BitSet.fromBitMask(elems)) + /** A builder object for value sets */ + def newBuilder: mutable.Builder[Value, ValueSet] = new mutable.Builder[Value, ValueSet] { + private[this] val b = new mutable.BitSet + def addOne (x: Value) = { b += (x.id - bottomId); this } + def clear() = b.clear() + def result() = new ValueSet(b.toImmutable) + } + def fromSpecific(it: IterableOnce[Value]): ValueSet = + newBuilder.addAll(it).result() + } +} diff --git a/scala2-library-bootstrapped/src/scala/collection/IterableOnce.scala b/scala2-library-bootstrapped/src/scala/collection/IterableOnce.scala new file mode 100644 index 000000000000..7a724dc60b51 --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/collection/IterableOnce.scala @@ -0,0 +1,1516 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.tailrec +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.mutable.StringBuilder +import scala.language.implicitConversions +import scala.math.{Numeric, Ordering} +import scala.reflect.ClassTag +import scala.runtime.{AbstractFunction1, AbstractFunction2} + +/** + * A template trait for collections which can be traversed either once only + * or one or more times. + * + * Note: `IterableOnce` does not extend [[IterableOnceOps]]. This is different than the general + * design of the collections library, which uses the following pattern: + * {{{ + * trait Seq extends Iterable with SeqOps + * trait SeqOps extends IterableOps + * + * trait IndexedSeq extends Seq with IndexedSeqOps + * trait IndexedSeqOps extends SeqOps + * }}} + * + * The goal is to provide a minimal interface without any sequential operations. This allows + * third-party extension like Scala parallel collections to integrate at the level of IterableOnce + * without inheriting unwanted implementations. + * + * @define coll collection + */ +trait IterableOnce[+A] extends Any { + + /** An [[scala.collection.Iterator]] over the elements of this $coll. + * + * If an `IterableOnce` object is in fact an [[scala.collection.Iterator]], this method always returns itself, + * in its current state, but if it is an [[scala.collection.Iterable]], this method always returns a new + * [[scala.collection.Iterator]]. + */ + def iterator: Iterator[A] + + /** Returns a [[scala.collection.Stepper]] for the elements of this collection. + * + * The Stepper enables creating a Java stream to operate on the collection, see + * [[scala.jdk.StreamConverters]]. For collections holding primitive values, the Stepper can be + * used as an iterator which doesn't box the elements. + * + * The implicit [[scala.collection.StepperShape]] parameter defines the resulting Stepper type according to the + * element type of this collection. + * + * - For collections of `Int`, `Short`, `Byte` or `Char`, an [[scala.collection.IntStepper]] is returned + * - For collections of `Double` or `Float`, a [[scala.collection.DoubleStepper]] is returned + * - For collections of `Long` a [[scala.collection.LongStepper]] is returned + * - For any other element type, an [[scala.collection.AnyStepper]] is returned + * + * Note that this method is overridden in subclasses and the return type is refined to + * `S with EfficientSplit`, for example [[scala.collection.IndexedSeqOps.stepper]]. For Steppers marked with + * [[scala.collection.Stepper.EfficientSplit]], the converters in [[scala.jdk.StreamConverters]] + * allow creating parallel streams, whereas bare Steppers can be converted only to sequential + * streams. + */ + def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S = { + import convert.impl._ + val s = shape.shape match { + case StepperShape.IntShape => new IntIteratorStepper (iterator.asInstanceOf[Iterator[Int]]) + case StepperShape.LongShape => new LongIteratorStepper (iterator.asInstanceOf[Iterator[Long]]) + case StepperShape.DoubleShape => new DoubleIteratorStepper(iterator.asInstanceOf[Iterator[Double]]) + case _ => shape.seqUnbox(new AnyIteratorStepper[A](iterator)) + } + s.asInstanceOf[S] + } + + /** The number of elements in this $coll, if it can be cheaply computed, + * -1 otherwise. Cheaply usually means: Not requiring a collection traversal. + */ + def knownSize: Int = -1 +} + +final class IterableOnceExtensionMethods[A](private val it: IterableOnce[A]) extends AnyVal { + @deprecated("Use .iterator.withFilter(...) instead", "2.13.0") + def withFilter(f: A => Boolean): Iterator[A] = it.iterator.withFilter(f) + + @deprecated("Use .iterator.reduceLeftOption(...) instead", "2.13.0") + def reduceLeftOption(f: (A, A) => A): Option[A] = it.iterator.reduceLeftOption(f) + + @deprecated("Use .iterator.min instead", "2.13.0") + def min(implicit ord: Ordering[A]): A = it.iterator.min + + @deprecated("Use .iterator.nonEmpty instead", "2.13.0") + def nonEmpty: Boolean = it.iterator.nonEmpty + + @deprecated("Use .iterator.max instead", "2.13.0") + def max(implicit ord: Ordering[A]): A = it.iterator.max + + @deprecated("Use .iterator.reduceRight(...) instead", "2.13.0") + def reduceRight(f: (A, A) => A): A = it.iterator.reduceRight(f) + + @deprecated("Use .iterator.maxBy(...) instead", "2.13.0") + def maxBy[B](f: A => B)(implicit cmp: Ordering[B]): A = it.iterator.maxBy(f) + + @deprecated("Use .iterator.reduceLeft(...) instead", "2.13.0") + def reduceLeft(f: (A, A) => A): A = it.iterator.reduceLeft(f) + + @deprecated("Use .iterator.sum instead", "2.13.0") + def sum(implicit num: Numeric[A]): A = it.iterator.sum + + @deprecated("Use .iterator.product instead", "2.13.0") + def product(implicit num: Numeric[A]): A = it.iterator.product + + @deprecated("Use .iterator.count(...) instead", "2.13.0") + def count(f: A => Boolean): Int = it.iterator.count(f) + + @deprecated("Use .iterator.reduceOption(...) instead", "2.13.0") + def reduceOption(f: (A, A) => A): Option[A] = it.iterator.reduceOption(f) + + @deprecated("Use .iterator.minBy(...) instead", "2.13.0") + def minBy[B](f: A => B)(implicit cmp: Ordering[B]): A = it.iterator.minBy(f) + + @deprecated("Use .iterator.size instead", "2.13.0") + def size: Int = it.iterator.size + + @deprecated("Use .iterator.forall(...) instead", "2.13.0") + def forall(f: A => Boolean): Boolean = it.iterator.forall(f) + + @deprecated("Use .iterator.collectFirst(...) instead", "2.13.0") + def collectFirst[B](f: PartialFunction[A, B]): Option[B] = it.iterator.collectFirst(f) + + @deprecated("Use .iterator.filter(...) instead", "2.13.0") + def filter(f: A => Boolean): Iterator[A] = it.iterator.filter(f) + + @deprecated("Use .iterator.exists(...) instead", "2.13.0") + def exists(f: A => Boolean): Boolean = it.iterator.exists(f) + + @deprecated("Use .iterator.copyToBuffer(...) instead", "2.13.0") + def copyToBuffer(dest: mutable.Buffer[A]): Unit = it.iterator.copyToBuffer(dest) + + @deprecated("Use .iterator.reduce(...) instead", "2.13.0") + def reduce(f: (A, A) => A): A = it.iterator.reduce(f) + + @deprecated("Use .iterator.reduceRightOption(...) instead", "2.13.0") + def reduceRightOption(f: (A, A) => A): Option[A] = it.iterator.reduceRightOption(f) + + @deprecated("Use .iterator.toIndexedSeq instead", "2.13.0") + def toIndexedSeq: IndexedSeq[A] = it.iterator.toIndexedSeq + + @deprecated("Use .iterator.foreach(...) instead", "2.13.0") + @`inline` def foreach[U](f: A => U): Unit = it match { + case it: Iterable[A] => it.foreach(f) + case _ => it.iterator.foreach(f) + } + + @deprecated("Use .iterator.to(factory) instead", "2.13.0") + def to[C1](factory: Factory[A, C1]): C1 = factory.fromSpecific(it) + + @deprecated("Use .iterator.to(ArrayBuffer) instead", "2.13.0") + def toBuffer[B >: A]: mutable.Buffer[B] = mutable.ArrayBuffer.from(it) + + @deprecated("Use .iterator.toArray", "2.13.0") + def toArray[B >: A: ClassTag]: Array[B] = it match { + case it: Iterable[B] => it.toArray[B] + case _ => it.iterator.toArray[B] + } + + @deprecated("Use .iterator.to(List) instead", "2.13.0") + def toList: immutable.List[A] = immutable.List.from(it) + + @deprecated("Use .iterator.to(Set) instead", "2.13.0") + @`inline` def toSet[B >: A]: immutable.Set[B] = immutable.Set.from(it) + + @deprecated("Use .iterator.to(Iterable) instead", "2.13.0") + @`inline` final def toTraversable: Traversable[A] = toIterable + + @deprecated("Use .iterator.to(Iterable) instead", "2.13.0") + @`inline` final def toIterable: Iterable[A] = Iterable.from(it) + + @deprecated("Use .iterator.to(Seq) instead", "2.13.0") + @`inline` def toSeq: immutable.Seq[A] = immutable.Seq.from(it) + + @deprecated("Use .iterator.to(LazyList) instead", "2.13.0") + @`inline` def toStream: immutable.Stream[A] = immutable.Stream.from(it) + + @deprecated("Use .iterator.to(Vector) instead", "2.13.0") + @`inline` def toVector: immutable.Vector[A] = immutable.Vector.from(it) + + @deprecated("Use .iterator.to(Map) instead", "2.13.0") + def toMap[K, V](implicit ev: A <:< (K, V)): immutable.Map[K, V] = + immutable.Map.from(it.asInstanceOf[IterableOnce[(K, V)]]) + + @deprecated("Use .iterator instead", "2.13.0") + @`inline` def toIterator: Iterator[A] = it.iterator + + @deprecated("Use .iterator.isEmpty instead", "2.13.0") + def isEmpty: Boolean = it match { + case it: Iterable[A] => it.isEmpty + case _ => it.iterator.isEmpty + } + + @deprecated("Use .iterator.mkString instead", "2.13.0") + def mkString(start: String, sep: String, end: String): String = it match { + case it: Iterable[A] => it.mkString(start, sep, end) + case _ => it.iterator.mkString(start, sep, end) + } + + @deprecated("Use .iterator.mkString instead", "2.13.0") + def mkString(sep: String): String = it match { + case it: Iterable[A] => it.mkString(sep) + case _ => it.iterator.mkString(sep) + } + + @deprecated("Use .iterator.mkString instead", "2.13.0") + def mkString: String = it match { + case it: Iterable[A] => it.mkString + case _ => it.iterator.mkString + } + + @deprecated("Use .iterator.find instead", "2.13.0") + def find(p: A => Boolean): Option[A] = it.iterator.find(p) + + @deprecated("Use .iterator.foldLeft instead", "2.13.0") + @`inline` def foldLeft[B](z: B)(op: (B, A) => B): B = it.iterator.foldLeft(z)(op) + + @deprecated("Use .iterator.foldRight instead", "2.13.0") + @`inline` def foldRight[B](z: B)(op: (A, B) => B): B = it.iterator.foldRight(z)(op) + + @deprecated("Use .iterator.fold instead", "2.13.0") + def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = it.iterator.fold(z)(op) + + @deprecated("Use .iterator.foldLeft instead", "2.13.0") + @`inline` def /: [B](z: B)(op: (B, A) => B): B = foldLeft[B](z)(op) + + @deprecated("Use .iterator.foldRight instead", "2.13.0") + @`inline` def :\ [B](z: B)(op: (A, B) => B): B = foldRight[B](z)(op) + + @deprecated("Use .iterator.map instead or consider requiring an Iterable", "2.13.0") + def map[B](f: A => B): IterableOnce[B] = it match { + case it: Iterable[A] => it.map(f) + case _ => it.iterator.map(f) + } + + @deprecated("Use .iterator.flatMap instead or consider requiring an Iterable", "2.13.0") + def flatMap[B](f: A => IterableOnce[B]): IterableOnce[B] = it match { + case it: Iterable[A] => it.flatMap(f) + case _ => it.iterator.flatMap(f) + } + + @deprecated("Use .iterator.sameElements instead", "2.13.0") + def sameElements[B >: A](that: IterableOnce[B]): Boolean = it.iterator.sameElements(that) +} + +object IterableOnce { + @inline implicit def iterableOnceExtensionMethods[A](it: IterableOnce[A]): IterableOnceExtensionMethods[A] = + new IterableOnceExtensionMethods[A](it) + + /** Computes the number of elements to copy to an array from a source IterableOnce + * + * @param srcLen the length of the source collection + * @param destLen the length of the destination array + * @param start the index in the destination array at which to start copying elements to + * @param len the requested number of elements to copy (we may only be able to copy less than this) + * @return the number of elements that will be copied to the destination array + */ + @inline private[collection] def elemsToCopyToArray(srcLen: Int, destLen: Int, start: Int, len: Int): Int = + math.max(math.min(math.min(len, srcLen), destLen - start), 0) + + /** Calls `copyToArray` on the given collection, regardless of whether or not it is an `Iterable`. */ + @inline private[collection] def copyElemsToArray[A, B >: A](elems: IterableOnce[A], + xs: Array[B], + start: Int = 0, + len: Int = Int.MaxValue): Int = + elems match { + case src: Iterable[A] => src.copyToArray[B](xs, start, len) + case src => src.iterator.copyToArray[B](xs, start, len) + } +} + +/** This implementation trait can be mixed into an `IterableOnce` to get the basic methods that are shared between + * `Iterator` and `Iterable`. The `IterableOnce` must support multiple calls to `iterator` but may or may not + * return the same `Iterator` every time. + * + * @define orderDependent + * + * Note: might return different results for different runs, unless the underlying collection type is ordered. + * @define orderDependentReduce + * + * Note: might return different results for different runs, unless the + * underlying collection type is ordered or the operator is associative + * and commutative. + * @define orderIndependentReduce + * + * Note: might return different results for different runs, unless either + * of the following conditions is met: (1) the operator is associative, + * and the underlying collection type is ordered; or (2) the operator is + * associative and commutative. + * @define mayNotTerminateInf + * + * Note: may not terminate for infinite-sized collections. + * @define willNotTerminateInf + * + * Note: will not terminate for infinite-sized collections. + * @define willForceEvaluation + * Note: Even when applied to a view or a lazy collection it will always force the elements. + * @define consumesIterator + * After calling this method, one should discard the iterator it was called + * on. Using it is undefined and subject to change. + * @define undefinedOrder + * The order of applications of the operator is unspecified and may be nondeterministic. + * @define exactlyOnce + * Each element appears exactly once in the computation. + * @define coll collection + * + */ +trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => + /////////////////////////////////////////////////////////////// Abstract methods that must be implemented + + /** Produces a $coll containing cumulative results of applying the + * operator going left to right, including the initial value. + * + * $willNotTerminateInf + * $orderDependent + * + * @tparam B the type of the elements in the resulting collection + * @param z the initial value + * @param op the binary operator applied to the intermediate result and the element + * @return collection with intermediate results + */ + def scanLeft[B](z: B)(op: (B, A) => B): CC[B] + + /** Selects all elements of this $coll which satisfy a predicate. + * + * @param p the predicate used to test elements. + * @return a new $coll consisting of all elements of this $coll that satisfy the given + * predicate `p`. The order of the elements is preserved. + */ + def filter(p: A => Boolean): C + + /** Selects all elements of this $coll which do not satisfy a predicate. + * + * @param pred the predicate used to test elements. + * @return a new $coll consisting of all elements of this $coll that do not satisfy the given + * predicate `pred`. Their order may not be preserved. + */ + def filterNot(pred: A => Boolean): C + + /** Selects the first `n` elements. + * $orderDependent + * @param n the number of elements to take from this $coll. + * @return a $coll consisting only of the first `n` elements of this $coll, + * or else the whole $coll, if it has less than `n` elements. + * If `n` is negative, returns an empty $coll. + */ + def take(n: Int): C + + /** Selects the longest prefix of elements that satisfy a predicate. + * + * The matching prefix starts with the first element of this $coll, + * and the element following the prefix is the first element that + * does not satisfy the predicate. The matching prefix may empty, + * so that this method returns an empty $coll. + * + * Example: + * + * {{{ + * scala> List(1, 2, 3, 100, 4).takeWhile(n => n < 10) + * val res0: List[Int] = List(1, 2, 3) + * + * scala> List(1, 2, 3, 100, 4).takeWhile(n => n == 0) + * val res1: List[Int] = List() + * }}} + * + * Use [[span]] to obtain both the prefix and suffix. + * Use [[filter]] to retain only those elements from the entire $coll that satisfy the predicate. + * $orderDependent + * @param p The predicate used to test elements. + * @return the longest prefix of this $coll whose elements all satisfy + * the predicate `p`. + */ + def takeWhile(p: A => Boolean): C + + /** Selects all elements except the first `n` ones. + * $orderDependent + * @param n the number of elements to drop from this $coll. + * @return a $coll consisting of all elements of this $coll except the first `n` ones, or else the + * empty $coll, if this $coll has less than `n` elements. + * If `n` is negative, don't drop any elements. + */ + def drop(n: Int): C + + /** Selects all elements except the longest prefix that satisfies a predicate. + * + * The matching prefix starts with the first element of this $coll, + * and the element following the prefix is the first element that + * does not satisfy the predicate. The matching prefix may be empty, + * so that this method returns the entire $coll. + * + * Example: + * + * {{{ + * scala> List(1, 2, 3, 100, 4).dropWhile(n => n < 10) + * val res0: List[Int] = List(100, 4) + * + * scala> List(1, 2, 3, 100, 4).dropWhile(n => n == 0) + * val res1: List[Int] = List(1, 2, 3, 100, 4) + * }}} + * + * Use [[span]] to obtain both the prefix and suffix. + * Use [[filterNot]] to drop all elements that satisfy the predicate. + * + * $orderDependent + * @param p The predicate used to test elements. + * @return the longest suffix of this $coll whose first element + * does not satisfy the predicate `p`. + */ + def dropWhile(p: A => Boolean): C + + /** Selects an interval of elements. The returned $coll is made up + * of all elements `x` which satisfy the invariant: + * {{{ + * from <= indexOf(x) < until + * }}} + * $orderDependent + * + * @param from the lowest index to include from this $coll. + * @param until the lowest index to EXCLUDE from this $coll. + * @return a $coll containing the elements greater than or equal to + * index `from` extending up to (but not including) index `until` + * of this $coll. + */ + def slice(from: Int, until: Int): C + + /** Builds a new $coll by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned $coll. + * @return a new $coll resulting from applying the given function + * `f` to each element of this $coll and collecting the results. + */ + def map[B](f: A => B): CC[B] + + /** Builds a new $coll by applying a function to all elements of this $coll + * and using the elements of the resulting collections. + * + * For example: + * + * {{{ + * def getWords(lines: Seq[String]): Seq[String] = lines flatMap (line => line split "\\W+") + * }}} + * + * The type of the resulting collection is guided by the static type of $coll. This might + * cause unexpected results sometimes. For example: + * + * {{{ + * // lettersOf will return a Seq[Char] of likely repeated letters, instead of a Set + * def lettersOf(words: Seq[String]) = words flatMap (word => word.toSet) + * + * // lettersOf will return a Set[Char], not a Seq + * def lettersOf(words: Seq[String]) = words.toSet flatMap ((word: String) => word.toSeq) + * + * // xs will be an Iterable[Int] + * val xs = Map("a" -> List(11,111), "b" -> List(22,222)).flatMap(_._2) + * + * // ys will be a Map[Int, Int] + * val ys = Map("a" -> List(1 -> 11,1 -> 111), "b" -> List(2 -> 22,2 -> 222)).flatMap(_._2) + * }}} + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned collection. + * @return a new $coll resulting from applying the given collection-valued function + * `f` to each element of this $coll and concatenating the results. + */ + def flatMap[B](f: A => IterableOnce[B]): CC[B] + + /** Converts this $coll of iterable collections into + * a $coll formed by the elements of these iterable + * collections. + * + * The resulting collection's type will be guided by the + * type of $coll. For example: + * + * {{{ + * val xs = List( + * Set(1, 2, 3), + * Set(1, 2, 3) + * ).flatten + * // xs == List(1, 2, 3, 1, 2, 3) + * + * val ys = Set( + * List(1, 2, 3), + * List(3, 2, 1) + * ).flatten + * // ys == Set(1, 2, 3) + * }}} + * + * @tparam B the type of the elements of each iterable collection. + * @param asIterable an implicit conversion which asserts that the element + * type of this $coll is an `Iterable`. + * @return a new $coll resulting from concatenating all element ${coll}s. + */ + def flatten[B](implicit asIterable: A => IterableOnce[B]): CC[B] + + /** Builds a new $coll by applying a partial function to all elements of this $coll + * on which the function is defined. + * + * @param pf the partial function which filters and maps the $coll. + * @tparam B the element type of the returned $coll. + * @return a new $coll resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[B](pf: PartialFunction[A, B]): CC[B] + + /** Zips this $coll with its indices. + * + * @return A new $coll containing pairs consisting of all elements of this $coll paired with their index. + * Indices start at `0`. + * @example + * `List("a", "b", "c").zipWithIndex == List(("a", 0), ("b", 1), ("c", 2))` + */ + def zipWithIndex: CC[(A @uncheckedVariance, Int)] + + /** Splits this $coll into a prefix/suffix pair according to a predicate. + * + * Note: `c span p` is equivalent to (but possibly more efficient than) + * `(c takeWhile p, c dropWhile p)`, provided the evaluation of the + * predicate `p` does not cause any side-effects. + * $orderDependent + * + * @param p the test predicate + * @return a pair consisting of the longest prefix of this $coll whose + * elements all satisfy `p`, and the rest of this $coll. + */ + def span(p: A => Boolean): (C, C) + + /** Splits this $coll into a prefix/suffix pair at a given position. + * + * Note: `c splitAt n` is equivalent to (but possibly more efficient than) + * `(c take n, c drop n)`. + * $orderDependent + * + * @param n the position at which to split. + * @return a pair of ${coll}s consisting of the first `n` + * elements of this $coll, and the other elements. + */ + def splitAt(n: Int): (C, C) = { + class Spanner extends runtime.AbstractFunction1[A, Boolean] { + var i = 0 + def apply(a: A) = i < n && { i += 1 ; true } + } + val spanner = new Spanner + span(spanner) + } + + /** Applies a side-effecting function to each element in this collection. + * Strict collections will apply `f` to their elements immediately, while lazy collections + * like Views and LazyLists will only apply `f` on each element if and when that element + * is evaluated, and each time that element is evaluated. + * + * @param f a function to apply to each element in this $coll + * @tparam U the return type of f + * @return The same logical collection as this + */ + def tapEach[U](f: A => U): C + + /////////////////////////////////////////////////////////////// Concrete methods based on iterator + + /** Tests whether this $coll is known to have a finite size. + * All strict collections are known to have finite size. For a non-strict + * collection such as `Stream`, the predicate returns `'''true'''` if all + * elements have been computed. It returns `'''false'''` if the stream is + * not yet evaluated to the end. Non-empty Iterators usually return + * `'''false'''` even if they were created from a collection with a known + * finite size. + * + * Note: many collection methods will not work on collections of infinite sizes. + * The typical failure mode is an infinite loop. These methods always attempt a + * traversal without checking first that `hasDefiniteSize` returns `'''true'''`. + * However, checking `hasDefiniteSize` can provide an assurance that size is + * well-defined and non-termination is not a concern. + * + * @deprecated This method is deprecated in 2.13 because it does not provide any + * actionable information. As noted above, even the collection library itself + * does not use it. When there is no guarantee that a collection is finite, it + * is generally best to attempt a computation anyway and document that it will + * not terminate for infinite collections rather than backing out because this + * would prevent performing the computation on collections that are in fact + * finite even though `hasDefiniteSize` returns `false`. + * + * @see method `knownSize` for a more useful alternative + * + * @return `'''true'''` if this collection is known to have finite size, + * `'''false'''` otherwise. + */ + @deprecated("Check .knownSize instead of .hasDefiniteSize for more actionable information (see scaladoc for details)", "2.13.0") + def hasDefiniteSize: Boolean = true + + /** Tests whether this $coll can be repeatedly traversed. Always + * true for Iterables and false for Iterators unless overridden. + * + * @return `true` if it is repeatedly traversable, `false` otherwise. + */ + def isTraversableAgain: Boolean = false + + /** Applies `f` to each element for its side effects. + * Note: `U` parameter needed to help scalac's type inference. + */ + def foreach[U](f: A => U): Unit = { + val it = iterator + while(it.hasNext) f(it.next()) + } + + /** Tests whether a predicate holds for all elements of this $coll. + * + * $mayNotTerminateInf + * + * @param p the predicate used to test elements. + * @return `true` if this $coll is empty or the given predicate `p` + * holds for all elements of this $coll, otherwise `false`. + */ + def forall(p: A => Boolean): Boolean = { + var res = true + val it = iterator + while (res && it.hasNext) res = p(it.next()) + res + } + + /** Tests whether a predicate holds for at least one element of this $coll. + * + * $mayNotTerminateInf + * + * @param p the predicate used to test elements. + * @return `true` if the given predicate `p` is satisfied by at least one element of this $coll, otherwise `false` + */ + def exists(p: A => Boolean): Boolean = { + var res = false + val it = iterator + while (!res && it.hasNext) res = p(it.next()) + res + } + + /** Counts the number of elements in the $coll which satisfy a predicate. + * + * $willNotTerminateInf + * + * @param p the predicate used to test elements. + * @return the number of elements satisfying the predicate `p`. + */ + def count(p: A => Boolean): Int = { + var res = 0 + val it = iterator + while (it.hasNext) if (p(it.next())) res += 1 + res + } + + /** Finds the first element of the $coll satisfying a predicate, if any. + * + * $mayNotTerminateInf + * $orderDependent + * + * @param p the predicate used to test elements. + * @return an option value containing the first element in the $coll + * that satisfies `p`, or `None` if none exists. + */ + def find(p: A => Boolean): Option[A] = { + val it = iterator + while (it.hasNext) { + val a = it.next() + if (p(a)) return Some(a) + } + None + } + + // in future, move to IndexedSeqOps + private def foldl[X >: A, B](seq: IndexedSeq[X], start: Int, z: B, op: (B, X) => B): B = { + @tailrec def loop(at: Int, end: Int, acc: B): B = + if (at == end) acc + else loop(at + 1, end, op(acc, seq(at))) + loop(start, seq.length, z) + } + + private def foldr[X >: A, B >: X](seq: IndexedSeq[X], op: (X, B) => B): B = { + @tailrec def loop(at: Int, acc: B): B = + if (at == 0) acc + else loop(at - 1, op(seq(at - 1), acc)) + loop(seq.length - 1, seq(seq.length - 1)) + } + + /** Applies the given binary operator `op` to the given initial value `z` and all + * elements of this $coll, going left to right. Returns the initial value if this $coll + * is empty. + * + * "Going left to right" only makes sense if this collection is ordered: then if + * `x,,1,,`, `x,,2,,`, ..., `x,,n,,` are the elements of this $coll, the result is + * `op( op( ... op( op(z, x,,1,,), x,,2,,) ... ), x,,n,,)`. + * + * If this collection is not ordered, then for each application of the operator, each + * right operand is an element. In addition, the leftmost operand is the initial + * value, and each other left operand is itself an application of the operator. The + * elements of this $coll and the initial value all appear exactly once in the + * computation. + * + * $orderDependent + * $willNotTerminateInf + * + * @param z An initial value. + * @param op A binary operator. + * @tparam B The result type of the binary operator. + * @return The result of applying `op` to `z` and all elements of this $coll, + * going left to right. Returns `z` if this $coll is empty. + */ + def foldLeft[B](z: B)(op: (B, A) => B): B = this match { + case seq: IndexedSeq[A @unchecked] => foldl[A, B](seq, 0, z, op) + case _ => + var result = z + val it = iterator + while (it.hasNext) { + result = op(result, it.next()) + } + result + } + + /** Applies the given binary operator `op` to all elements of this $coll and the given + * initial value `z`, going right to left. Returns the initial value if this $coll is + * empty. + * + * "Going right to left" only makes sense if this collection is ordered: then if + * `x,,1,,`, `x,,2,,`, ..., `x,,n,,` are the elements of this $coll, the result is + * `op(x,,1,,, op(x,,2,,, op( ... op(x,,n,,, z) ... )))`. + * + * If this collection is not ordered, then for each application of the operator, each + * left operand is an element. In addition, the rightmost operand is the initial + * value, and each other right operand is itself an application of the operator. The + * elements of this $coll and the initial value all appear exactly once in the + * computation. + * + * $orderDependent + * $willNotTerminateInf + * + * @param z An initial value. + * @param op A binary operator. + * @tparam B The result type of the binary operator. + * @return The result of applying `op` to all elements of this $coll and `z`, + * going right to left. Returns `z` if this $coll is empty. + */ + def foldRight[B](z: B)(op: (A, B) => B): B = reversed.foldLeft(z)((b, a) => op(a, b)) + + @deprecated("Use foldLeft instead of /:", "2.13.0") + @`inline` final def /: [B](z: B)(op: (B, A) => B): B = foldLeft[B](z)(op) + + @deprecated("Use foldRight instead of :\\", "2.13.0") + @`inline` final def :\ [B](z: B)(op: (A, B) => B): B = foldRight[B](z)(op) + + /** Applies the given binary operator `op` to the given initial value `z` and all + * elements of this $coll. + * + * For each application of the operator, each operand is either an element of this + * $coll, the initial value, or another such application of the operator. + * $undefinedOrder $exactlyOnce The initial value may be used an arbitrary number of + * times, but at least once. + * + * If this collection is ordered, then for any application of the operator, the + * element(s) appearing in the left operand will precede those in the right. + * + * $orderIndependentReduce In either case, it is also necessary that the initial value + * be a neutral value for the operator, e.g. `Nil` for `List` concatenation or `1` for + * multiplication. + * + * The default implementation in `IterableOnce` is equivalent to `foldLeft` but may be + * overridden for more efficient traversal orders. + * + * $willNotTerminateInf + * + * @tparam A1 The type parameter for the binary operator, a supertype of `A`. + * @param z An initial value; may be used an arbitrary number of times in the + * computation of the result; must be a neutral value for `op` for the + * result to always be the same across runs. + * @param op A binary operator; must be associative for the result to always be the + * same across runs. + * @return The result of applying `op` between all the elements and `z`, or `z` + * if this $coll is empty. + */ + def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = foldLeft(z)(op) + + /** Applies the given binary operator `op` to all elements of this $coll. + * + * For each application of the operator, each operand is either an element of this + * $coll or another such application of the operator. $undefinedOrder $exactlyOnce + * + * If this collection is ordered, then for any application of the operator, the + * element(s) appearing in the left operand will precede those in the right. + * + * $orderIndependentReduce + * $willNotTerminateInf + * + * @tparam B The type parameter for the binary operator, a supertype of `A`. + * @param op A binary operator; must be associative for the result to always be the + * same across runs. + * @return The result of applying `op` between all the elements if the $coll is + * nonempty. + * @throws UnsupportedOperationException if this $coll is empty. + */ + def reduce[B >: A](op: (B, B) => B): B = reduceLeft(op) + + /** If this $coll is nonempty, reduces it with the given binary operator `op`. + * + * The behavior is the same as [[reduce]] except that the value is `None` if the $coll + * is empty. $undefinedOrder $exactlyOnce + * + * $orderIndependentReduce + * $willNotTerminateInf + * + * @tparam B A type parameter for the binary operator, a supertype of `A`. + * @param op A binary operator; must be associative for the result to always be the + * same across runs. + * @return The result of reducing this $coll with `op` if the $coll is nonempty, + * inside a `Some`, and `None` otherwise. + */ + def reduceOption[B >: A](op: (B, B) => B): Option[B] = reduceLeftOption(op) + + /** Applies the given binary operator `op` to all elements of this $coll, going left to + * right. + * + * "Going left to right" only makes sense if this collection is ordered: then if + * `x,,1,,`, `x,,2,,`, ..., `x,,n,,` are the elements of this $coll, the result is + * `op( op( op( ... op(x,,1,,, x,,2,,) ... ), x,,n-1,,), x,,n,,)`. + * + * If this collection is not ordered, then for each application of the operator, each + * right operand is an element. In addition, the leftmost operand is the first element + * of this $coll and each other left operand is itself an application of the + * operator. $exactlyOnce + * + * $orderDependentReduce + * $willNotTerminateInf + * + * @param op A binary operator. + * @tparam B The result type of the binary operator, a supertype of `A`. + * @return The result of applying `op` to all elements of this $coll, going + * left to right. + * @throws UnsupportedOperationException if this $coll is empty. + */ + def reduceLeft[B >: A](op: (B, A) => B): B = this match { + case seq: IndexedSeq[A @unchecked] if seq.length > 0 => foldl(seq, 1, seq(0), op) + case _ if knownSize == 0 => throw new UnsupportedOperationException("empty.reduceLeft") + case _ => reduceLeftIterator[B](throw new UnsupportedOperationException("empty.reduceLeft"))(op) + } + private final def reduceLeftIterator[B >: A](onEmpty: => B)(op: (B, A) => B): B = { + val it = iterator + if (it.hasNext) { + var acc: B = it.next() + while (it.hasNext) + acc = op(acc, it.next()) + acc + } + else onEmpty + } + + /** Applies the given binary operator `op` to all elements of this $coll, going right to + * left. + * + * "Going right to left" only makes sense if this collection is ordered: then if + * `x,,1,,`, `x,,2,,`, ..., `x,,n,,` are the elements of this $coll, the result is + * `op(x,,1,,, op(x,,2,,, op( ... op(x,,n-1,,, x,,n,,) ... )))`. + * + * If this collection is not ordered, then for each application of the operator, each + * left operand is an element. In addition, the rightmost operand is the last element + * of this $coll and each other right operand is itself an application of the + * operator. $exactlyOnce + * + * $orderDependentReduce + * $willNotTerminateInf + * + * @param op A binary operator. + * @tparam B The result type of the binary operator, a supertype of `A`. + * @return The result of applying `op` to all elements of this $coll, going + * right to left. + * @throws UnsupportedOperationException if this $coll is empty. + */ + def reduceRight[B >: A](op: (A, B) => B): B = this match { + case seq: IndexedSeq[A @unchecked] if seq.length > 0 => foldr[A, B](seq, op) + case _ if knownSize == 0 => throw new UnsupportedOperationException("empty.reduceRight") + case _ => reversed.reduceLeft[B]((x, y) => op(y, x)) // reduceLeftIterator + } + + /** If this $coll is nonempty, reduces it with the given binary operator `op`, going + * left to right. + * + * The behavior is the same as [[reduceLeft]] except that the value is `None` if the + * $coll is empty. $exactlyOnce + * + * $orderDependentReduce + * $willNotTerminateInf + * + * @param op A binary operator. + * @tparam B The result type of the binary operator, a supertype of `A`. + * @return The result of reducing this $coll with `op` going left to right if + * the $coll is nonempty, inside a `Some`, and `None` otherwise. + */ + def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] = + knownSize match { + case -1 => reduceLeftOptionIterator[B](op) + case 0 => None + case _ => Some(reduceLeft(op)) + } + private final def reduceLeftOptionIterator[B >: A](op: (B, A) => B): Option[B] = reduceOptionIterator[A, B](iterator)(op) + private final def reduceOptionIterator[X >: A, B >: X](it: Iterator[X])(op: (B, X) => B): Option[B] = { + if (it.hasNext) { + var acc: B = it.next() + while (it.hasNext) + acc = op(acc, it.next()) + Some(acc) + } + else None + } + + /** If this $coll is nonempty, reduces it with the given binary operator `op`, going + * right to left. + * + * The behavior is the same as [[reduceRight]] except that the value is `None` if the + * $coll is empty. $exactlyOnce + * + * $orderDependentReduce + * $willNotTerminateInf + * + * @param op A binary operator. + * @tparam B The result type of the binary operator, a supertype of `A`. + * @return The result of reducing this $coll with `op` going right to left if + * the $coll is nonempty, inside a `Some`, and `None` otherwise. + */ + def reduceRightOption[B >: A](op: (A, B) => B): Option[B] = + knownSize match { + case -1 => reduceOptionIterator[A, B](reversed.iterator)((x, y) => op(y, x)) + case 0 => None + case _ => Some(reduceRight(op)) + } + + /** Tests whether the $coll is empty. + * + * Note: The default implementation creates and discards an iterator. + * + * Note: Implementations in subclasses that are not repeatedly iterable must take + * care not to consume any elements when `isEmpty` is called. + * + * @return `true` if the $coll contains no elements, `false` otherwise. + */ + def isEmpty: Boolean = + knownSize match { + case -1 => !iterator.hasNext + case 0 => true + case _ => false + } + + /** Tests whether the $coll is not empty. + * + * @return `true` if the $coll contains at least one element, `false` otherwise. + */ + @deprecatedOverriding("nonEmpty is defined as !isEmpty; override isEmpty instead", "2.13.0") + def nonEmpty: Boolean = !isEmpty + + /** The size of this $coll. + * + * $willNotTerminateInf + * + * @return the number of elements in this $coll. + */ + def size: Int = + if (knownSize >= 0) knownSize + else { + val it = iterator + var len = 0 + while (it.hasNext) { len += 1; it.next() } + len + } + + @deprecated("Use `dest ++= coll` instead", "2.13.0") + @inline final def copyToBuffer[B >: A](dest: mutable.Buffer[B]): Unit = dest ++= this + + /** Copies elements to an array, returning the number of elements written. + * + * Fills the given array `xs` starting at index `start` with values of this $coll. + * + * Copying will stop once either all the elements of this $coll have been copied, + * or the end of the array is reached. + * + * @param xs the array to fill. + * @tparam B the type of the elements of the array. + * @return the number of elements written to the array + * + * @note Reuse: $consumesIterator + */ + @deprecatedOverriding("This should always forward to the 3-arg version of this method", since = "2.13.4") + def copyToArray[B >: A](xs: Array[B]): Int = copyToArray(xs, 0, Int.MaxValue) + + /** Copies elements to an array, returning the number of elements written. + * + * Fills the given array `xs` starting at index `start` with values of this $coll. + * + * Copying will stop once either all the elements of this $coll have been copied, + * or the end of the array is reached. + * + * @param xs the array to fill. + * @param start the starting index of xs. + * @tparam B the type of the elements of the array. + * @return the number of elements written to the array + * + * @note Reuse: $consumesIterator + */ + @deprecatedOverriding("This should always forward to the 3-arg version of this method", since = "2.13.4") + def copyToArray[B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) + + /** Copy elements to an array, returning the number of elements written. + * + * Fills the given array `xs` starting at index `start` with at most `len` elements of this $coll. + * + * Copying will stop once either all the elements of this $coll have been copied, + * or the end of the array is reached, or `len` elements have been copied. + * + * @param xs the array to fill. + * @param start the starting index of xs. + * @param len the maximal number of elements to copy. + * @tparam B the type of the elements of the array. + * @return the number of elements written to the array + * + * @note Reuse: $consumesIterator + */ + def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val it = iterator + var i = start + val end = start + math.min(len, xs.length - start) + while (i < end && it.hasNext) { + xs(i) = it.next() + i += 1 + } + i - start + } + + /** Sums the elements of this collection. + * + * The default implementation uses `reduce` for a known non-empty collection, `foldLeft` otherwise. + * + * $willNotTerminateInf + * + * @param num an implicit parameter defining a set of numeric operations + * which includes the `+` operator to be used in forming the sum. + * @tparam B the result type of the `+` operator. + * @return the sum of all elements of this $coll with respect to the `+` operator in `num`. + */ + def sum[B >: A](implicit num: Numeric[B]): B = + knownSize match { + case -1 => foldLeft(num.zero)(num.plus) + case 0 => num.zero + case _ => reduce(num.plus) + } + + /** Multiplies together the elements of this collection. + * + * The default implementation uses `reduce` for a known non-empty collection, `foldLeft` otherwise. + * + * $willNotTerminateInf + * + * @param num an implicit parameter defining a set of numeric operations + * which includes the `*` operator to be used in forming the product. + * @tparam B the result type of the `*` operator. + * @return the product of all elements of this $coll with respect to the `*` operator in `num`. + */ + def product[B >: A](implicit num: Numeric[B]): B = + knownSize match { + case -1 => foldLeft(num.one)(num.times) + case 0 => num.one + case _ => reduce(num.times) + } + + /** Finds the smallest element. + * + * $willNotTerminateInf + * + * @param ord An ordering to be used for comparing elements. + * @tparam B The type over which the ordering is defined. + * @throws UnsupportedOperationException if this $coll is empty. + * @return the smallest element of this $coll with respect to the ordering `ord`. + * + */ + def min[B >: A](implicit ord: Ordering[B]): A = + knownSize match { + case -1 => reduceLeftIterator[A](throw new UnsupportedOperationException("empty.min"))(ord.min) + case 0 => throw new UnsupportedOperationException("empty.min") + case _ => reduceLeft(ord.min) + } + + /** Finds the smallest element. + * + * $willNotTerminateInf + * + * @param ord An ordering to be used for comparing elements. + * @tparam B The type over which the ordering is defined. + * @return an option value containing the smallest element of this $coll + * with respect to the ordering `ord`. + */ + def minOption[B >: A](implicit ord: Ordering[B]): Option[A] = + knownSize match { + case -1 => reduceLeftOptionIterator[A](ord.min) + case 0 => None + case _ => Some(reduceLeft(ord.min)) + } + + /** Finds the largest element. + * + * $willNotTerminateInf + * + * @param ord An ordering to be used for comparing elements. + * @tparam B The type over which the ordering is defined. + * @throws UnsupportedOperationException if this $coll is empty. + * @return the largest element of this $coll with respect to the ordering `ord`. + */ + def max[B >: A](implicit ord: Ordering[B]): A = + knownSize match { + case -1 => reduceLeftIterator[A](throw new UnsupportedOperationException("empty.max"))(ord.max) + case 0 => throw new UnsupportedOperationException("empty.max") + case _ => reduceLeft(ord.max) + } + + /** Finds the largest element. + * + * $willNotTerminateInf + * + * @param ord An ordering to be used for comparing elements. + * @tparam B The type over which the ordering is defined. + * @return an option value containing the largest element of this $coll with + * respect to the ordering `ord`. + */ + def maxOption[B >: A](implicit ord: Ordering[B]): Option[A] = + knownSize match { + case -1 => reduceLeftOptionIterator[A](ord.max) + case 0 => None + case _ => Some(reduceLeft(ord.max)) + } + + /** Finds the first element which yields the largest value measured by function `f`. + * + * $willNotTerminateInf + * + * @param cmp An ordering to be used for comparing elements. + * @tparam B The result type of the function `f`. + * @param f The measuring function. + * @throws UnsupportedOperationException if this $coll is empty. + * @return the first element of this $coll with the largest value measured by function `f` + * with respect to the ordering `cmp`. + */ + def maxBy[B](f: A => B)(implicit ord: Ordering[B]): A = + knownSize match { + case 0 => throw new UnsupportedOperationException("empty.maxBy") + case _ => foldLeft(new Maximized[A, B]("maxBy")(f)(ord.gt))((m, a) => m(m, a)).result + } + + private class Maximized[X, B](descriptor: String)(f: X => B)(cmp: (B, B) => Boolean) extends AbstractFunction2[Maximized[X, B], X, Maximized[X, B]] { + def this(outer: IterableOnceOps[?, ?, ?])(descriptor: String)(f: X => B)(cmp: (B, B) => Boolean) = this(descriptor)(f)(cmp) + + var maxElem: X = null.asInstanceOf[X] + var maxF: B = null.asInstanceOf[B] + var nonEmpty = false + def toOption: Option[X] = if (nonEmpty) Some(maxElem) else None + def result: X = if (nonEmpty) maxElem else throw new UnsupportedOperationException(s"empty.$descriptor") + def apply(m: Maximized[X, B], a: X): Maximized[X, B] = + if (m.nonEmpty) { + val fa = f(a) + if (cmp(fa, maxF)) { + maxF = fa + maxElem = a + } + m + } + else { + m.nonEmpty = true + m.maxElem = a + m.maxF = f(a) + m + } + } + + /** Finds the first element which yields the largest value measured by function `f`. + * + * $willNotTerminateInf + * + * @param cmp An ordering to be used for comparing elements. + * @tparam B The result type of the function `f`. + * @param f The measuring function. + * @return an option value containing the first element of this $coll with the + * largest value measured by function `f` with respect to the ordering `cmp`. + */ + def maxByOption[B](f: A => B)(implicit ord: Ordering[B]): Option[A] = + knownSize match { + case 0 => None + case _ => foldLeft(new Maximized[A, B]("maxBy")(f)(ord.gt))((m, a) => m(m, a)).toOption + } + + /** Finds the first element which yields the smallest value measured by function `f`. + * + * $willNotTerminateInf + * + * @param cmp An ordering to be used for comparing elements. + * @tparam B The result type of the function `f`. + * @param f The measuring function. + * @throws UnsupportedOperationException if this $coll is empty. + * @return the first element of this $coll with the smallest value measured by function `f` + * with respect to the ordering `cmp`. + */ + def minBy[B](f: A => B)(implicit ord: Ordering[B]): A = + knownSize match { + case 0 => throw new UnsupportedOperationException("empty.minBy") + case _ => foldLeft(new Maximized[A, B]("minBy")(f)(ord.lt))((m, a) => m(m, a)).result + } + + /** Finds the first element which yields the smallest value measured by function `f`. + * + * $willNotTerminateInf + * + * @param cmp An ordering to be used for comparing elements. + * @tparam B The result type of the function `f`. + * @param f The measuring function. + * @return an option value containing the first element of this $coll + * with the smallest value measured by function `f` + * with respect to the ordering `cmp`. + */ + def minByOption[B](f: A => B)(implicit ord: Ordering[B]): Option[A] = + knownSize match { + case 0 => None + case _ => foldLeft(new Maximized[A, B]("minBy")(f)(ord.lt))((m, a) => m(m, a)).toOption + } + + /** Finds the first element of the $coll for which the given partial + * function is defined, and applies the partial function to it. + * + * $mayNotTerminateInf + * $orderDependent + * + * @param pf the partial function + * @return an option value containing pf applied to the first + * value for which it is defined, or `None` if none exists. + * @example `Seq("a", 1, 5L).collectFirst({ case x: Int => x*10 }) = Some(10)` + */ + def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = { + // Presumably the fastest way to get in and out of a partial function is for a sentinel function to return itself + // (Tested to be lower-overhead than runWith. Would be better yet to not need to (formally) allocate it) + val sentinel: scala.Function1[A, Any] = new AbstractFunction1[A, Any] { + def apply(a: A): AbstractFunction1[A, Any] = this + } + val it = iterator + while (it.hasNext) { + val x = pf.applyOrElse(it.next(), sentinel) + if (x.asInstanceOf[AnyRef] ne sentinel) return Some(x.asInstanceOf[B]) + } + None + } + + /** Aggregates the results of applying an operator to subsequent elements. + * + * Since this method degenerates to `foldLeft` for sequential (non-parallel) collections, + * where the combining operation is ignored, it is advisable to prefer `foldLeft` for that case. + * + * For [[https://github.com/scala/scala-parallel-collections parallel collections]], + * use the `aggregate` method specified by `scala.collection.parallel.ParIterableLike`. + * + * @param z the start value, a neutral element for `seqop`. + * @param seqop the binary operator used to accumulate the result. + * @param combop an associative operator for combining sequential results, unused for sequential collections. + * @tparam B the result type, produced by `seqop`, `combop`, and by this function as a final result. + */ + @deprecated("For sequential collections, prefer `foldLeft(z)(seqop)`. For parallel collections, use `ParIterableLike#aggregate`.", "2.13.0") + def aggregate[B](z: => B)(seqop: (B, A) => B, combop: (B, B) => B): B = foldLeft(z)(seqop) + + /** Tests whether every element of this collection's iterator relates to the + * corresponding element of another collection by satisfying a test predicate. + * + * $willNotTerminateInf + * + * @param that the other collection + * @param p the test predicate, which relates elements from both collections + * @tparam B the type of the elements of `that` + * @return `true` if both collections have the same length and + * `p(x, y)` is `true` for all corresponding elements `x` of this iterator + * and `y` of `that`, otherwise `false` + */ + def corresponds[B](that: IterableOnce[B])(p: (A, B) => Boolean): Boolean = { + val a = iterator + val b = that.iterator + + while (a.hasNext && b.hasNext) { + if (!p(a.next(), b.next())) return false + } + + a.hasNext == b.hasNext + } + + /** Displays all elements of this $coll in a string using start, end, and separator strings. + * + * Delegates to addString, which can be overridden. + * + * @param start the starting string. + * @param sep the separator string. + * @param end the ending string. + * @return a string representation of this $coll. The resulting string + * begins with the string `start` and ends with the string + * `end`. Inside, the string representations (w.r.t. the method + * `toString`) of all elements of this $coll are separated by + * the string `sep`. + * + * @example `List(1, 2, 3).mkString("(", "; ", ")") = "(1; 2; 3)"` + */ + final def mkString(start: String, sep: String, end: String): String = + if (knownSize == 0) start + end + else addString(new StringBuilder(), start, sep, end).result() + + /** Displays all elements of this $coll in a string using a separator string. + * + * Delegates to addString, which can be overridden. + * + * @param sep the separator string. + * @return a string representation of this $coll. In the resulting string + * the string representations (w.r.t. the method `toString`) + * of all elements of this $coll are separated by the string `sep`. + * + * @example `List(1, 2, 3).mkString("|") = "1|2|3"` + */ + @inline final def mkString(sep: String): String = mkString("", sep, "") + + /** Displays all elements of this $coll in a string. + * + * Delegates to addString, which can be overridden. + * + * @return a string representation of this $coll. In the resulting string + * the string representations (w.r.t. the method `toString`) + * of all elements of this $coll follow each other without any + * separator string. + */ + @inline final def mkString: String = mkString("") + + /** Appends all elements of this $coll to a string builder using start, end, and separator strings. + * The written text begins with the string `start` and ends with the string `end`. + * Inside, the string representations (w.r.t. the method `toString`) + * of all elements of this $coll are separated by the string `sep`. + * + * Example: + * + * {{{ + * scala> val a = List(1,2,3,4) + * a: List[Int] = List(1, 2, 3, 4) + * + * scala> val b = new StringBuilder() + * b: StringBuilder = + * + * scala> a.addString(b , "List(" , ", " , ")") + * res5: StringBuilder = List(1, 2, 3, 4) + * }}} + * + * @param b the string builder to which elements are appended. + * @param start the starting string. + * @param sep the separator string. + * @param end the ending string. + * @return the string builder `b` to which elements were appended. + */ + def addString(b: StringBuilder, start: String, sep: String, end: String): b.type = { + val jsb = b.underlying + if (start.length != 0) jsb.append(start) + val it = iterator + if (it.hasNext) { + jsb.append(it.next()) + while (it.hasNext) { + jsb.append(sep) + jsb.append(it.next()) + } + } + if (end.length != 0) jsb.append(end) + b + } + + /** Appends all elements of this $coll to a string builder using a separator string. + * The written text consists of the string representations (w.r.t. the method `toString`) + * of all elements of this $coll, separated by the string `sep`. + * + * Example: + * + * {{{ + * scala> val a = List(1,2,3,4) + * a: List[Int] = List(1, 2, 3, 4) + * + * scala> val b = new StringBuilder() + * b: StringBuilder = + * + * scala> a.addString(b, ", ") + * res0: StringBuilder = 1, 2, 3, 4 + * }}} + * + * @param b the string builder to which elements are appended. + * @param sep the separator string. + * @return the string builder `b` to which elements were appended. + */ + @inline final def addString(b: StringBuilder, sep: String): b.type = addString(b, "", sep, "") + + /** Appends all elements of this $coll to a string builder. + * The written text consists of the string representations (w.r.t. the method + * `toString`) of all elements of this $coll without any separator string. + * + * Example: + * + * {{{ + * scala> val a = List(1,2,3,4) + * a: List[Int] = List(1, 2, 3, 4) + * + * scala> val b = new StringBuilder() + * b: StringBuilder = + * + * scala> val h = a.addString(b) + * h: StringBuilder = 1234 + * }}} + * + * @param b the string builder to which elements are appended. + * @return the string builder `b` to which elements were appended. + */ + @inline final def addString(b: StringBuilder): b.type = addString(b, "") + + /** Given a collection factory `factory`, converts this $coll to the appropriate + * representation for the current element type `A`. Example uses: + * + * {{{ + * xs.to(List) + * xs.to(ArrayBuffer) + * xs.to(BitSet) // for xs: Iterable[Int] + * }}} + */ + def to[C1](factory: Factory[A, C1]): C1 = factory.fromSpecific(this) + + @deprecated("Use .iterator instead of .toIterator", "2.13.0") + @`inline` final def toIterator: Iterator[A] = iterator + + /** Converts this $coll to a `List`. + * + * @return This $coll as a `List[A]`. + */ + def toList: immutable.List[A] = immutable.List.from(this) + + /** Converts this $coll to a `Vector`. + * + * @return This $coll as a `Vector[A]`. + */ + def toVector: immutable.Vector[A] = immutable.Vector.from(this) + + /** Converts this $coll to a `Map`, given an implicit coercion from the $coll's type to a key-value tuple. + * + * @tparam K The key type for the resulting map. + * @tparam V The value type for the resulting map. + * @param ev An implicit coercion from `A` to `[K, V]`. + * @return This $coll as a `Map[K, V]`. + */ + def toMap[K, V](implicit ev: A <:< (K, V)): immutable.Map[K, V] = + immutable.Map.from(this.asInstanceOf[IterableOnce[(K, V)]]) + + /** Converts this $coll to a `Set`. + * + * @tparam B The type of elements of the result, a supertype of `A`. + * @return This $coll as a `Set[B]`. + */ + def toSet[B >: A]: immutable.Set[B] = immutable.Set.from(this) + + /** @return This $coll as a `Seq[A]`. This is equivalent to `to(Seq)` but might be faster. + */ + def toSeq: immutable.Seq[A] = immutable.Seq.from(this) + + /** Converts this $coll to an `IndexedSeq`. + * + * @return This $coll as an `IndexedSeq[A]`. + */ + def toIndexedSeq: immutable.IndexedSeq[A] = immutable.IndexedSeq.from(this) + + @deprecated("Use .to(LazyList) instead of .toStream", "2.13.0") + @inline final def toStream: immutable.Stream[A] = to(immutable.Stream) + + /** Converts this $coll to a `Buffer`. + * + * @tparam B The type of elements of the result, a supertype of `A`. + * @return This $coll as a `Buffer[B]`. + */ + @inline final def toBuffer[B >: A]: mutable.Buffer[B] = mutable.Buffer.from(this) + + /** Converts this $coll to an `Array`. + * + * Implementation note: DO NOT call [[Array.from]] from this method. + * + * @tparam B The type of elements of the result, a supertype of `A`. + * @return This $coll as an `Array[B]`. + */ + def toArray[B >: A: ClassTag]: Array[B] = + if (knownSize >= 0) { + val destination = new Array[B](knownSize) + @annotation.unused val copied = copyToArray(destination, 0) + //assert(copied == destination.length) + destination + } + else mutable.ArrayBuilder.make[B].addAll(this).result() + + // For internal use + protected def reversed: Iterable[A] = { + var xs: immutable.List[A] = immutable.Nil + val it = iterator + while (it.hasNext) xs = it.next() :: xs + xs + } +} diff --git a/scala2-library-bootstrapped/src/scala/collection/LinearSeq.scala b/scala2-library-bootstrapped/src/scala/collection/LinearSeq.scala new file mode 100644 index 000000000000..b8c40d07426f --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/collection/LinearSeq.scala @@ -0,0 +1,314 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.{nowarn, tailrec} + +/** Base trait for linearly accessed sequences that have efficient `head` and + * `tail` operations. + * Known subclasses: List, LazyList + */ +trait LinearSeq[+A] extends Seq[A] + with LinearSeqOps[A, LinearSeq, LinearSeq[A]] + with IterableFactoryDefaults[A, LinearSeq] { + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "LinearSeq" + + override def iterableFactory: SeqFactory[LinearSeq] = LinearSeq +} + +@SerialVersionUID(3L) +object LinearSeq extends SeqFactory.Delegate[LinearSeq](immutable.LinearSeq) + +/** Base trait for linear Seq operations */ +trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeqOps[A, CC, C]] extends Any with SeqOps[A, CC, C] { + + /** @inheritdoc + * + * Note: *Must* be overridden in subclasses. The default implementation that is inherited from [[SeqOps]] + * uses `lengthCompare`, which is defined here to use `isEmpty`. + */ + override def isEmpty: Boolean + + /** @inheritdoc + * + * Note: *Must* be overridden in subclasses. The default implementation is inherited from [[IterableOps]]. + */ + def head: A + + /** @inheritdoc + * + * Note: *Must* be overridden in subclasses. The default implementation is inherited from [[IterableOps]]. + */ + def tail: C + + override def headOption: Option[A] = + if (isEmpty) None else Some(head) + + def iterator: Iterator[A] = + if (knownSize == 0) Iterator.empty + else new LinearSeqIterator[A](this) + + def length: Int = { + var these = coll + var len = 0 + while (these.nonEmpty) { + len += 1 + these = these.tail + } + len + } + + override def last: A = { + if (isEmpty) throw new NoSuchElementException("LinearSeq.last") + else { + var these = coll + var scout = tail + while (scout.nonEmpty) { + these = scout + scout = scout.tail + } + these.head + } + } + + override def lengthCompare(len: Int): Int = { + @tailrec def loop(i: Int, xs: LinearSeq[A]): Int = { + if (i == len) + if (xs.isEmpty) 0 else 1 + else if (xs.isEmpty) + -1 + else + loop(i + 1, xs.tail) + } + if (len < 0) 1 + else loop(0, coll) + } + + override def lengthCompare(that: Iterable[_]): Int = { + val thatKnownSize = that.knownSize + + if (thatKnownSize >= 0) this lengthCompare thatKnownSize + else that match { + case that: LinearSeq[_] => + var thisSeq = this + var thatSeq = that + while (thisSeq.nonEmpty && thatSeq.nonEmpty) { + thisSeq = thisSeq.tail + thatSeq = thatSeq.tail + } + java.lang.Boolean.compare(thisSeq.nonEmpty, thatSeq.nonEmpty) + case _ => + var thisSeq = this + val thatIt = that.iterator + while (thisSeq.nonEmpty && thatIt.hasNext) { + thisSeq = thisSeq.tail + thatIt.next() + } + java.lang.Boolean.compare(thisSeq.nonEmpty, thatIt.hasNext) + } + } + + override def isDefinedAt(x: Int): Boolean = x >= 0 && lengthCompare(x) > 0 + + // `apply` is defined in terms of `drop`, which is in turn defined in + // terms of `tail`. + @throws[IndexOutOfBoundsException] + override def apply(n: Int): A = { + if (n < 0) throw new IndexOutOfBoundsException(n.toString) + val skipped = drop(n) + if (skipped.isEmpty) throw new IndexOutOfBoundsException(n.toString) + skipped.head + } + + override def foreach[U](f: A => U): Unit = { + var these: LinearSeq[A] = coll + while (!these.isEmpty) { + f(these.head) + these = these.tail + } + } + + override def forall(p: A => Boolean): Boolean = { + var these: LinearSeq[A] = coll + while (!these.isEmpty) { + if (!p(these.head)) return false + these = these.tail + } + true + } + + override def exists(p: A => Boolean): Boolean = { + var these: LinearSeq[A] = coll + while (!these.isEmpty) { + if (p(these.head)) return true + these = these.tail + } + false + } + + override def contains[A1 >: A](elem: A1): Boolean = { + var these: LinearSeq[A] = coll + while (!these.isEmpty) { + if (these.head == elem) return true + these = these.tail + } + false + } + + override def find(p: A => Boolean): Option[A] = { + var these: LinearSeq[A] = coll + while (!these.isEmpty) { + if (p(these.head)) return Some(these.head) + these = these.tail + } + None + } + + override def foldLeft[B](z: B)(op: (B, A) => B): B = { + var acc = z + var these: LinearSeq[A] = coll + while (!these.isEmpty) { + acc = op(acc, these.head) + these = these.tail + } + acc + } + + override def sameElements[B >: A](that: IterableOnce[B]): Boolean = { + @tailrec def linearSeqEq(a: LinearSeq[B], b: LinearSeq[B]): Boolean = + (a eq b) || { + if (a.nonEmpty && b.nonEmpty && a.head == b.head) { + linearSeqEq(a.tail, b.tail) + } + else { + a.isEmpty && b.isEmpty + } + } + + that match { + case that: LinearSeq[B] => linearSeqEq(coll, that) + case _ => super.sameElements(that) + } + } + + override def segmentLength(p: A => Boolean, from: Int): Int = { + var i = 0 + var seq = drop(from) + while (seq.nonEmpty && p(seq.head)) { + i += 1 + seq = seq.tail + } + i + } + + override def indexWhere(p: A => Boolean, from: Int): Int = { + var i = math.max(from, 0) + var these: LinearSeq[A] = this drop from + while (these.nonEmpty) { + if (p(these.head)) + return i + + i += 1 + these = these.tail + } + -1 + } + + override def lastIndexWhere(p: A => Boolean, end: Int): Int = { + var i = 0 + var these: LinearSeq[A] = coll + var last = -1 + while (!these.isEmpty && i <= end) { + if (p(these.head)) last = i + these = these.tail + i += 1 + } + last + } + + override def findLast(p: A => Boolean): Option[A] = { + var these: LinearSeq[A] = coll + var found = false + var last: A = null.asInstanceOf[A] // don't use `Option`, to prevent excessive `Some` allocation + while (these.nonEmpty) { + val elem = these.head + if (p(elem)) { + found = true + last = elem + } + these = these.tail + } + if (found) Some(last) else None + } + + override def tails: Iterator[C] = { + val end = Iterator.single(empty) + Iterator.iterate(coll)(_.tail).takeWhile(_.nonEmpty) ++ end + } +} + +trait StrictOptimizedLinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with StrictOptimizedLinearSeqOps[A, CC, C]] extends Any with LinearSeqOps[A, CC, C] with StrictOptimizedSeqOps[A, CC, C] { + // A more efficient iterator implementation than the default LinearSeqIterator + override def iterator: Iterator[A] = new AbstractIterator[A] { + private[this] var current = StrictOptimizedLinearSeqOps.this + def hasNext = !current.isEmpty + def next() = { val r = current.head; current = current.tail; r } + } + + // Optimized version of `drop` that avoids copying + override def drop(n: Int): C = { + @tailrec def loop(n: Int, s: C): C = + if (n <= 0 || s.isEmpty) s + else loop(n - 1, s.tail) + loop(n, coll) + } + + override def dropWhile(p: A => Boolean): C = { + @tailrec def loop(s: C): C = + if (s.nonEmpty && p(s.head)) loop(s.tail) + else s + loop(coll) + } +} + +/** A specialized Iterator for LinearSeqs that is lazy enough for Stream and LazyList. This is accomplished by not + * evaluating the tail after returning the current head. + */ +private[collection] final class LinearSeqIterator[A](coll: LinearSeqOps[A, LinearSeq, LinearSeq[A]]) extends AbstractIterator[A] { + + // A call-by-need cell + private[this] final class LazyCell(st: => LinearSeqOps[A, LinearSeq, LinearSeq[A]]) { + def this(outer: LinearSeqIterator[A], st: => LinearSeqOps[A, LinearSeq, LinearSeq[A]]) = this(st) + lazy val v = st + } + + private[this] var these: LazyCell = { + // Reassign reference to avoid creating a private class field and holding a reference to the head. + // LazyCell would otherwise close over `coll`. + val initialHead = coll + new LazyCell(initialHead) + } + + def hasNext: Boolean = these.v.nonEmpty + + def next(): A = + if (isEmpty) Iterator.empty.next() + else { + val cur = these.v + val result = cur.head + these = new LazyCell(cur.tail) + result + } +} diff --git a/scala2-library-bootstrapped/src/scala/collection/mutable/PriorityQueue.scala b/scala2-library-bootstrapped/src/scala/collection/mutable/PriorityQueue.scala new file mode 100644 index 000000000000..4f4c62914b50 --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/collection/mutable/PriorityQueue.scala @@ -0,0 +1,416 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.collection.generic.DefaultSerializationProxy +import scala.math.Ordering + +/** A heap-based priority queue. + * + * To prioritize elements of type `A` there must be an implicit + * `Ordering[A]` available at creation. Elements are retrieved + * in priority order by using [[dequeue]] or [[dequeueAll]]. + * + * If multiple elements have the same priority as determined by the ordering for this + * `PriorityQueue`, no guarantees are made regarding the order in which those elements + * are returned by `dequeue` or `dequeueAll`. In particular, that means this + * class does not guarantee first-in-first-out behavior, as may be + * incorrectly inferred from the fact that this data structure is + * called a "queue". + * + * Only the `dequeue` and `dequeueAll` methods will return elements in priority + * order (while removing elements from the heap). Standard collection methods + * such as `drop`, `iterator`, `toList` and `toString` use an arbitrary + * iteration order: they will traverse the heap or remove elements + * in whichever order seems most convenient. + * + * Therefore, printing a `PriorityQueue` will not show elements in priority order, + * though the highest-priority element will be printed first. + * To print the elements in order, it's necessary to `dequeue` them. + * To do this non-destructively, duplicate the `PriorityQueue` first; + * the `clone` method is a suitable way to obtain a disposable copy. + * + * Client keys are assumed to be immutable. Mutating keys may violate + * the invariant of the underlying heap-ordered tree. Note that [[clone]] + * does not rebuild the underlying tree. + * + * {{{ + * scala> val pq = collection.mutable.PriorityQueue(1, 2, 5, 3, 7) + * val pq: scala.collection.mutable.PriorityQueue[Int] = PriorityQueue(7, 3, 5, 1, 2) + * + * scala> pq.toList // also not in order + * val res0: List[Int] = List(7, 3, 5, 1, 2) + * + * scala> pq.clone.dequeueAll + * val res1: Seq[Int] = ArraySeq(7, 5, 3, 2, 1) + * }}} + * + * @tparam A type of the elements in this priority queue. + * @param ord implicit ordering used to compare the elements of type `A`. + * + * @define Coll PriorityQueue + * @define coll priority queue + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +sealed class PriorityQueue[A](implicit val ord: Ordering[A]) + extends AbstractIterable[A] + with Iterable[A] + with IterableOps[A, Iterable, PriorityQueue[A]] + with StrictOptimizedIterableOps[A, Iterable, PriorityQueue[A]] + with Builder[A, PriorityQueue[A]] + with Cloneable[PriorityQueue[A]] + with Growable[A] + with Serializable +{ + + private class ResizableArrayAccess[A0] extends ArrayBuffer[A0] { + + def this(outer: PriorityQueue[A]) = this() + + override def mapInPlace(f: A0 => A0): this.type = { + var i = 1 // see "we do not use array(0)" comment below (???) + val siz = this.size + while (i < siz) { this(i) = f(this(i)); i += 1 } + this + } + + def p_size0 = size0 + def p_size0_=(s: Int) = size0 = s + def p_array = array + def p_ensureSize(n: Int) = super.ensureSize(n) + def p_ensureAdditionalSize(n: Int) = super.ensureSize(size0 + n) + def p_swap(a: Int, b: Int): Unit = { + val h = array(a) + array(a) = array(b) + array(b) = h + } + } + + private val resarr = new ResizableArrayAccess[A] + + // we do not use array(0) + // storing the root of the heap at array(1) simplifies the calculations for + // parent and child indices: for a given index k, the parent of k is k / 2, + // the left child is k * 2, and the right child is k * 2 + 1 + resarr.p_size0 += 1 + /** Alias for [[size]]. */ + def length: Int = resarr.length - 1 // adjust length accordingly + override def size: Int = length + override def knownSize: Int = length + override def isEmpty: Boolean = resarr.p_size0 < 2 + + // not eligible for EvidenceIterableFactoryDefaults since C != CC[A] (PriorityQueue[A] != Iterable[A]) + override protected def fromSpecific(coll: scala.collection.IterableOnce[A]): PriorityQueue[A] = PriorityQueue.from(coll) + override protected def newSpecificBuilder: Builder[A, PriorityQueue[A]] = PriorityQueue.newBuilder + override def empty: PriorityQueue[A] = PriorityQueue.empty + + /** Replace the contents of this $coll with the mapped result. + * + * @param f the mapping function + * @return this $coll + */ + def mapInPlace(f: A => A): this.type = { + resarr.mapInPlace(f) + heapify(1) + this + } + + def result() = this + + private def toA(x: AnyRef): A = x.asInstanceOf[A] + protected def fixUp(as: Array[AnyRef], m: Int): Unit = { + var k: Int = m + // use `ord` directly to avoid allocating `OrderingOps` + while (k > 1 && ord.lt(toA(as(k / 2)), toA(as(k)))) { + resarr.p_swap(k, k / 2) + k = k / 2 + } + } + + protected def fixDown(as: Array[AnyRef], m: Int, n: Int): Boolean = { + // returns true if any swaps were done (used in heapify) + var k: Int = m + while (n >= 2 * k) { + var j = 2 * k + // use `ord` directly to avoid allocating `OrderingOps` + if (j < n && ord.lt(toA(as(j)), toA(as(j + 1)))) + j += 1 + if (ord.gteq(toA(as(k)), toA(as(j)))) + return k != m + else { + val h = as(k) + as(k) = as(j) + as(j) = h + k = j + } + } + k != m + } + + /** Inserts a single element into the priority queue. + * + * @param elem the element to insert. + * @return this $coll. + */ + def addOne(elem: A): this.type = { + resarr.p_ensureAdditionalSize(1) + resarr.p_array(resarr.p_size0) = elem.asInstanceOf[AnyRef] + fixUp(resarr.p_array, resarr.p_size0) + resarr.p_size0 += 1 + this + } + + override def addAll(xs: IterableOnce[A]): this.type = { + val from = resarr.p_size0 + for (x <- xs.iterator) unsafeAdd(x) + heapify(from) + this + } + + private def unsafeAdd(elem: A): Unit = { + // like += but skips fixUp, which breaks the ordering invariant + // a series of unsafeAdds MUST be followed by heapify + resarr.p_ensureAdditionalSize(1) + resarr.p_array(resarr.p_size0) = elem.asInstanceOf[AnyRef] + resarr.p_size0 += 1 + } + + private def heapify(from: Int): Unit = { + // elements at indices 1..from-1 were already in heap order before any adds + // elements at indices from..n are newly added, their order must be fixed + val n = length + + if (from <= 2) { + // no pre-existing order to maintain, do the textbook heapify algorithm + for (i <- n/2 to 1 by -1) fixDown(resarr.p_array, i, n) + } + else if (n - from < 4) { + // for very small adds, doing the simplest fix is faster + for (i <- from to n) fixUp(resarr.p_array, i) + } + else { + var min = from/2 // tracks the minimum element in the queue + val queue = scala.collection.mutable.Queue[Int](min) + + // do fixDown on the parents of all the new elements + // except the parent of the first new element, which is in the queue + // (that parent is treated specially because it might be the root) + for (i <- n/2 until min by -1) { + if (fixDown(resarr.p_array, i, n)) { + // there was a swap, so also need to fixDown i's parent + val parent = i/2 + if (parent < min) { // make sure same parent isn't added twice + min = parent + queue += parent + } + } + } + + while (queue.nonEmpty) { + val i = queue.dequeue() + if (fixDown(resarr.p_array, i, n)) { + val parent = i/2 + if (parent < min && parent > 0) { + // the "parent > 0" is to avoid adding the parent of the root + min = parent + queue += parent + } + } + } + } + } + + /** Adds all elements provided by a `IterableOnce` object + * into the priority queue. + * + * @param xs a iterable object. + * @return a new priority queue containing elements of both `xs` and `this`. + */ + def ++(xs: IterableOnce[A]): PriorityQueue[A] = { this.clone() ++= xs } + + /** Adds all elements to the queue. + * + * @param elems the elements to add. + */ + def enqueue(elems: A*): Unit = { this ++= elems } + + /** Returns the element with the highest priority in the queue, + * and removes this element from the queue. + * + * @return the element with the highest priority. + * @throws NoSuchElementException if no element to remove from heap + */ + def dequeue(): A = + if (resarr.p_size0 > 1) { + resarr.p_size0 = resarr.p_size0 - 1 + val result = resarr.p_array(1) + resarr.p_array(1) = resarr.p_array(resarr.p_size0) + resarr.p_array(resarr.p_size0) = null // erase reference from array + fixDown(resarr.p_array, 1, resarr.p_size0 - 1) + toA(result) + } else + throw new NoSuchElementException("no element to remove from heap") + + /** Dequeues all elements and returns them in a sequence, in priority order. */ + def dequeueAll[A1 >: A]: immutable.Seq[A1] = { + val b = ArrayBuilder.make[Any] + b.sizeHint(size) + while (nonEmpty) { + b += dequeue() + } + immutable.ArraySeq.unsafeWrapArray(b.result()).asInstanceOf[immutable.ArraySeq[A1]] + } + + /** Returns the element with the highest priority in the queue, + * or throws an error if there is no element contained in the queue. + * + * @return the element with the highest priority. + */ + override def head: A = if (resarr.p_size0 > 1) toA(resarr.p_array(1)) else throw new NoSuchElementException("queue is empty") + + /** Removes all elements from the queue. After this operation is completed, + * the queue will be empty. + */ + def clear(): Unit = { + resarr.clear() + resarr.p_size0 = 1 + } + + /** Returns an iterator which yields all the elements. + * + * Note: The order of elements returned is undefined. + * If you want to traverse the elements in priority queue + * order, use `clone().dequeueAll.iterator`. + * + * @return an iterator over all the elements. + */ + override def iterator: Iterator[A] = resarr.iterator.drop(1) + + /** Returns the reverse of this priority queue. The new priority queue has + * the same elements as the original, but the opposite ordering. + * + * For example, the element with the highest priority in `pq` has the lowest + * priority in `pq.reverse`, and vice versa. + * + * Ties are handled arbitrarily. Elements with equal priority may or + * may not be reversed with respect to each other. + * + * @return the reversed priority queue. + */ + def reverse: PriorityQueue[A] = { + val revq = new PriorityQueue[A]()(ord.reverse) + // copy the existing data into the new array backwards + // this won't put it exactly into the correct order, + // but will require less fixing than copying it in + // the original order + val n = resarr.p_size0 + revq.resarr.p_ensureSize(n) + revq.resarr.p_size0 = n + val from = resarr.p_array + val to = revq.resarr.p_array + for (i <- 1 until n) to(i) = from(n-i) + revq.heapify(1) + revq + } + + + /** Returns an iterator which yields all the elements in the reverse order + * than that returned by the method `iterator`. + * + * Note: The order of elements returned is undefined. + * + * @return an iterator over all elements sorted in descending order. + */ + def reverseIterator: Iterator[A] = new AbstractIterator[A] { + private[this] var i = resarr.p_size0 - 1 + def hasNext: Boolean = i >= 1 + def next(): A = { + val n = resarr.p_array(i) + i -= 1 + toA(n) + } + } + + /** Returns a regular queue containing the same elements. + * + * Note: the order of elements is undefined. + */ + def toQueue: Queue[A] = new Queue[A] ++= this.iterator + + /** Returns a textual representation of a queue as a string. + * + * @return the string representation of this queue. + */ + override def toString() = toList.mkString("PriorityQueue(", ", ", ")") + + /** Converts this $coll to a list. + * + * Note: the order of elements is undefined. + * + * @return a list containing all elements of this $coll. + */ + override def toList: immutable.List[A] = immutable.List.from(this.iterator) + + /** This method clones the priority queue. + * + * @return a priority queue with the same elements. + */ + override def clone(): PriorityQueue[A] = { + val pq = new PriorityQueue[A] + val n = resarr.p_size0 + pq.resarr.p_ensureSize(n) + java.lang.System.arraycopy(resarr.p_array, 1, pq.resarr.p_array, 1, n-1) + pq.resarr.p_size0 = n + pq + } + + override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) + if (copied > 0) { + Array.copy(resarr.p_array, 1, xs, start, copied) + } + copied + } + + @deprecated("Use `PriorityQueue` instead", "2.13.0") + def orderedCompanion: PriorityQueue.type = PriorityQueue + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(PriorityQueue.evidenceIterableFactory[A], this) + + override protected[this] def className = "PriorityQueue" +} + + +@SerialVersionUID(3L) +object PriorityQueue extends SortedIterableFactory[PriorityQueue] { + def newBuilder[A : Ordering]: Builder[A, PriorityQueue[A]] = { + new Builder[A, PriorityQueue[A]] { + val pq = new PriorityQueue[A] + def addOne(elem: A): this.type = { pq.unsafeAdd(elem); this } + def result(): PriorityQueue[A] = { pq.heapify(1); pq } + def clear(): Unit = pq.clear() + } + } + + def empty[A : Ordering]: PriorityQueue[A] = new PriorityQueue[A] + + def from[E : Ordering](it: IterableOnce[E]): PriorityQueue[E] = { + val b = newBuilder[E] + b ++= it + b.result() + } +} diff --git a/scala2-library-bootstrapped/src/scala/concurrent/BatchingExecutor.scala b/scala2-library-bootstrapped/src/scala/concurrent/BatchingExecutor.scala new file mode 100644 index 000000000000..ce01e43b7fa5 --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/concurrent/BatchingExecutor.scala @@ -0,0 +1,272 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.concurrent + +import java.util.concurrent.Executor +import java.util.Objects +import scala.util.control.NonFatal +import scala.annotation.{switch, tailrec} + +/** + * Marker trait to indicate that a Runnable is Batchable by BatchingExecutors + */ +trait Batchable { + self: Runnable => +} + +private[concurrent] object BatchingExecutorStatics { + final val emptyBatchArray: Array[Runnable] = new Array[Runnable](0) + + // Max number of Runnables executed nested before starting to batch (to prevent stack exhaustion) + final val syncPreBatchDepth = 16 + + // Max number of Runnables processed in one go (to prevent starvation of other tasks on the pool) + final val runLimit = 1024 + + object MissingParentBlockContext extends BlockContext { + override def blockOn[T](thunk: => T)(implicit permission: CanAwait): T = + try thunk finally throw new IllegalStateException("BUG in BatchingExecutor.Batch: parentBlockContext is null") + } +} + +/** + * Mixin trait for an Executor + * which groups multiple nested `Runnable.run()` calls + * into a single Runnable passed to the original + * Executor. This can be a useful optimization + * because it bypasses the original context's task + * queue and keeps related (nested) code on a single + * thread which may improve CPU affinity. However, + * if tasks passed to the Executor are blocking + * or expensive, this optimization can prevent work-stealing + * and make performance worse. + * A batching executor can create deadlocks if code does + * not use `scala.concurrent.blocking` when it should, + * because tasks created within other tasks will block + * on the outer task completing. + * This executor may run tasks in any order, including LIFO order. + * There are no ordering guarantees. + * + * WARNING: Only use *EITHER* `submitAsyncBatched` OR `submitSyncBatched`!! + * + * When you implement this trait for async executors like thread pools, + * you're going to need to implement it something like the following: + * + * {{{ + * final override def submitAsync(runnable: Runnable): Unit = + * super[SuperClass].execute(runnable) // To prevent reentrancy into `execute` + * + * final override def execute(runnable: Runnable): Unit = + * if (runnable.isInstanceOf[Batchable]) // Or other logic + * submitAsyncBatched(runnable) + * else + * submitAsync(runnable) + * + * final override def reportFailure(cause: Throwable): Unit = … + * }}} + * + * And if you want to implement if for a sync, trampolining, executor you're + * going to implement it something like this: + * + * {{{ + * final override def submitAsync(runnable: Runnable): Unit = () + * + * final override def execute(runnable: Runnable): Unit = + * submitSyncBatched(runnable) // You typically will want to batch everything + * + * final override def reportFailure(cause: Throwable): Unit = + * ExecutionContext.defaultReporter(cause) // Or choose something more fitting + * }}} + * + */ +private[concurrent] trait BatchingExecutor extends Executor { + private[this] final val _tasksLocal = new ThreadLocal[AnyRef]() + + /* + * Batch implements a LIFO queue (stack) and is used as a trampolining Runnable. + * In order to conserve allocations, the first element in the batch is stored "unboxed" in + * the `first` field. Subsequent Runnables are stored in the array called `other`. + */ + private[this] sealed abstract class AbstractBatch protected (protected final var first: Runnable, protected final var other: Array[Runnable], protected final var size: Int) { + + def this(outer: BatchingExecutor, first: Runnable, other: Array[Runnable], size: Int) = this(first, other, size) + + private[this] final def ensureCapacity(curSize: Int): Array[Runnable] = { + val curOther = this.other + val curLen = curOther.length + if (curSize <= curLen) curOther + else { + val newLen = if (curLen == 0) 4 else curLen << 1 + + if (newLen <= curLen) throw new StackOverflowError("Space limit of asynchronous stack reached: " + curLen) + val newOther = new Array[Runnable](newLen) + System.arraycopy(curOther, 0, newOther, 0, curLen) + this.other = newOther + newOther + } + } + + final def push(r: Runnable): Unit = { + val sz = this.size + if(sz == 0) + this.first = r + else + ensureCapacity(sz)(sz - 1) = r + this.size = sz + 1 + } + + @tailrec protected final def runN(n: Int): Unit = + if (n > 0) + (this.size: @switch) match { + case 0 => + case 1 => + val next = this.first + this.first = null + this.size = 0 + next.run() + runN(n - 1) + case sz => + val o = this.other + val next = o(sz - 2) + o(sz - 2) = null + this.size = sz - 1 + next.run() + runN(n - 1) + } + } + + private[this] final class AsyncBatch private(_first: Runnable, _other: Array[Runnable], _size: Int) extends AbstractBatch(_first, _other, _size) with Runnable with BlockContext with (BlockContext => Throwable) { + private[this] final var parentBlockContext: BlockContext = BatchingExecutorStatics.MissingParentBlockContext + + final def this(runnable: Runnable) = this(runnable, BatchingExecutorStatics.emptyBatchArray, 1) + + override final def run(): Unit = { + _tasksLocal.set(this) // This is later cleared in `apply` or `runWithoutResubmit` + + val f = resubmit(BlockContext.usingBlockContext(this)(this)) + + if (f != null) + throw f + } + + /* LOGIC FOR ASYNCHRONOUS BATCHES */ + override final def apply(prevBlockContext: BlockContext): Throwable = try { + parentBlockContext = prevBlockContext + runN(BatchingExecutorStatics.runLimit) + null + } catch { + case t: Throwable => t // We are handling exceptions on the outside of this method + } finally { + parentBlockContext = BatchingExecutorStatics.MissingParentBlockContext + _tasksLocal.remove() + } + + /* Attempts to resubmit this Batch to the underlying ExecutionContext, + * this only happens for Batches where `resubmitOnBlock` is `true`. + * Only attempt to resubmit when there are `Runnables` left to process. + * Note that `cause` can be `null`. + */ + private[this] final def resubmit(cause: Throwable): Throwable = + if (this.size > 0) { + try { submitForExecution(this); cause } catch { + case inner: Throwable => + if (NonFatal(inner)) { + val e = new ExecutionException("Non-fatal error occurred and resubmission failed, see suppressed exception.", cause) + e.addSuppressed(inner) + e + } else inner + } + } else cause // TODO: consider if NonFatals should simply be `reportFailure`:ed rather than rethrown + + private[this] final def cloneAndClear(): AsyncBatch = { + val newBatch = new AsyncBatch(this.first, this.other, this.size) + this.first = null + this.other = BatchingExecutorStatics.emptyBatchArray + this.size = 0 + newBatch + } + + override final def blockOn[T](thunk: => T)(implicit permission: CanAwait): T = { + // If we know there will be blocking, we don't want to keep tasks queued up because it could deadlock. + if(this.size > 0) + submitForExecution(cloneAndClear()) // If this throws then we have bigger problems + + parentBlockContext.blockOn(thunk) // Now delegate the blocking to the previous BC + } + } + + private[this] final class SyncBatch(runnable: Runnable) extends AbstractBatch(runnable, BatchingExecutorStatics.emptyBatchArray, 1) with Runnable { + @tailrec override final def run(): Unit = { + try runN(BatchingExecutorStatics.runLimit) catch { + case ie: InterruptedException => + reportFailure(ie) // TODO: Handle InterruptedException differently? + case f if NonFatal(f) => + reportFailure(f) + } + + if (this.size > 0) + run() + } + } + + /** MUST throw a NullPointerException when `runnable` is null + * When implementing a sync BatchingExecutor, it is RECOMMENDED + * to implement this method as `runnable.run()` + */ + protected def submitForExecution(runnable: Runnable): Unit + + /** Reports that an asynchronous computation failed. + * See `ExecutionContext.reportFailure(throwable: Throwable)` + */ + protected def reportFailure(throwable: Throwable): Unit + + /** + * WARNING: Never use both `submitAsyncBatched` and `submitSyncBatched` in the same + * implementation of `BatchingExecutor` + */ + protected final def submitAsyncBatched(runnable: Runnable): Unit = { + val b = _tasksLocal.get + if (b.isInstanceOf[AsyncBatch]) b.asInstanceOf[AsyncBatch].push(runnable) + else submitForExecution(new AsyncBatch(runnable)) + } + + /** + * WARNING: Never use both `submitAsyncBatched` and `submitSyncBatched` in the same + * implementation of `BatchingExecutor` + */ + protected final def submitSyncBatched(runnable: Runnable): Unit = { + Objects.requireNonNull(runnable, "runnable is null") + val tl = _tasksLocal + val b = tl.get + if (b.isInstanceOf[SyncBatch]) b.asInstanceOf[SyncBatch].push(runnable) + else { + val i = if (b ne null) b.asInstanceOf[java.lang.Integer].intValue else 0 + if (i < BatchingExecutorStatics.syncPreBatchDepth) { + tl.set(java.lang.Integer.valueOf(i + 1)) + try submitForExecution(runnable) // User code so needs to be try-finally guarded here + catch { + case ie: InterruptedException => + reportFailure(ie) // TODO: Handle InterruptedException differently? + case f if NonFatal(f) => + reportFailure(f) + } + finally tl.set(b) + } else { + val batch = new SyncBatch(runnable) + tl.set(batch) + submitForExecution(batch) + tl.set(b) // Batch only throws fatals so no need for try-finally here + } + } + } +} diff --git a/scala2-library-bootstrapped/src/scala/concurrent/Channel.scala b/scala2-library-bootstrapped/src/scala/concurrent/Channel.scala new file mode 100644 index 000000000000..a1ccbf5fdcde --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/concurrent/Channel.scala @@ -0,0 +1,61 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.concurrent + +/** This class provides a simple FIFO queue of data objects, + * which are read by one or more reader threads. + * + * @tparam A type of data exchanged + */ +@deprecated("Use `java.util.concurrent.LinkedTransferQueue` instead.", since = "2.13.0") +class Channel[A] { + private class LinkedList { + def this(outer: Channel[A]) = this() + + var elem: A = _ + var next: LinkedList = _ + } + private[this] var written = new LinkedList // FIFO queue, realized through + private[this] var lastWritten = written // aliasing of a linked list + private[this] var nreaders = 0 + + /** Append a value to the FIFO queue to be read by `read`. + * This operation is nonblocking and can be executed by any thread. + * + * @param x object to enqueue to this channel + */ + def write(x: A): Unit = synchronized { + lastWritten.elem = x + lastWritten.next = new LinkedList + lastWritten = lastWritten.next + if (nreaders > 0) notify() + } + + /** Retrieve the next waiting object from the FIFO queue, + * blocking if necessary until an object is available. + * + * @return next object dequeued from this channel + */ + def read: A = synchronized { + while (written.next == null) { + try { + nreaders += 1 + wait() + } + finally nreaders -= 1 + } + val x = written.elem + written = written.next + x + } +} diff --git a/scala2-library-bootstrapped/src/scala/io/Source.scala b/scala2-library-bootstrapped/src/scala/io/Source.scala new file mode 100644 index 000000000000..ca4a11596d92 --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/io/Source.scala @@ -0,0 +1,382 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. dba Akka + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package io + +import scala.collection.{AbstractIterator, BufferedIterator} +import java.io.{Closeable, FileInputStream, FileNotFoundException, InputStream, PrintStream, File => JFile} +import java.net.{URI, URL} + +import scala.annotation.nowarn + +/** This object provides convenience methods to create an iterable + * representation of a source file. + */ +object Source { + val DefaultBufSize = 2048 + + /** Creates a `Source` from System.in. + */ + def stdin = fromInputStream(System.in) + + /** Creates a Source from an Iterable. + * + * @param iterable the Iterable + * @return the Source + */ + def fromIterable(iterable: Iterable[Char]): Source = new Source { + val iter = iterable.iterator + } withReset(() => fromIterable(iterable)) + + /** Creates a Source instance from a single character. + */ + def fromChar(c: Char): Source = fromIterable(Array(c)) + + /** creates Source from array of characters, with empty description. + */ + def fromChars(chars: Array[Char]): Source = fromIterable(chars) + + /** creates Source from a String, with no description. + */ + def fromString(s: String): Source = fromIterable(s) + + /** creates Source from file with given name, setting its description to + * filename. + */ + def fromFile(name: String)(implicit codec: Codec): BufferedSource = + fromFile(new JFile(name))(codec) + + /** creates Source from file with given name, using given encoding, setting + * its description to filename. + */ + def fromFile(name: String, enc: String): BufferedSource = + fromFile(name)(Codec(enc)) + + /** creates `source` from file with given file `URI`. + */ + def fromFile(uri: URI)(implicit codec: Codec): BufferedSource = + fromFile(new JFile(uri))(codec) + + /** creates Source from file with given file: URI + */ + def fromFile(uri: URI, enc: String): BufferedSource = + fromFile(uri)(Codec(enc)) + + /** creates Source from file, using default character encoding, setting its + * description to filename. + */ + def fromFile(file: JFile)(implicit codec: Codec): BufferedSource = + fromFile(file, Source.DefaultBufSize)(codec) + + /** same as fromFile(file, enc, Source.DefaultBufSize) + */ + def fromFile(file: JFile, enc: String): BufferedSource = + fromFile(file)(Codec(enc)) + + def fromFile(file: JFile, enc: String, bufferSize: Int): BufferedSource = + fromFile(file, bufferSize)(Codec(enc)) + + /** Creates Source from `file`, using given character encoding, setting + * its description to filename. Input is buffered in a buffer of size + * `bufferSize`. + */ + def fromFile(file: JFile, bufferSize: Int)(implicit codec: Codec): BufferedSource = { + val inputStream = new FileInputStream(file) + + createBufferedSource( + inputStream, + bufferSize, + () => fromFile(file, bufferSize)(codec), + () => inputStream.close() + )(codec) withDescription s"file:${file.getAbsolutePath}" + } + + /** Create a `Source` from array of bytes, decoding + * the bytes according to codec. + * + * @return the created `Source` instance. + */ + def fromBytes(bytes: Array[Byte])(implicit codec: Codec): Source = + fromString(new String(bytes, codec.name)) + + def fromBytes(bytes: Array[Byte], enc: String): Source = + fromBytes(bytes)(Codec(enc)) + + /** Create a `Source` from array of bytes, assuming + * one byte per character (ISO-8859-1 encoding.) + */ + @deprecated("Use `fromBytes` and specify an encoding", since="2.13.9") + def fromRawBytes(bytes: Array[Byte]): Source = + fromString(new String(bytes, Codec.ISO8859.charSet)) + + /** creates `Source` from file with given file: URI + */ + def fromURI(uri: URI)(implicit codec: Codec): BufferedSource = + fromFile(new JFile(uri))(codec) + + /** same as fromURL(new URL(s))(Codec(enc)) + */ + def fromURL(s: String, enc: String): BufferedSource = + fromURL(s)(Codec(enc)) + + /** same as fromURL(new URL(s)) + */ + def fromURL(s: String)(implicit codec: Codec): BufferedSource = + fromURL(new URI(s).toURL)(codec) + + /** same as fromInputStream(url.openStream())(Codec(enc)) + */ + def fromURL(url: URL, enc: String): BufferedSource = + fromURL(url)(Codec(enc)) + + /** same as fromInputStream(url.openStream())(codec) + */ + def fromURL(url: URL)(implicit codec: Codec): BufferedSource = + fromInputStream(url.openStream())(codec) + + /** Reads data from inputStream with a buffered reader, using the encoding + * in implicit parameter codec. + * + * @param inputStream the input stream from which to read + * @param bufferSize buffer size (defaults to Source.DefaultBufSize) + * @param reset a () => Source which resets the stream (if unset, reset() will throw an Exception) + * @param close a () => Unit method which closes the stream (if unset, close() will do nothing) + * @param codec (implicit) a scala.io.Codec specifying behavior (defaults to Codec.default) + * @return the buffered source + */ + def createBufferedSource( + inputStream: InputStream, + bufferSize: Int = DefaultBufSize, + reset: () => Source = null, + close: () => Unit = null + )(implicit codec: Codec): BufferedSource = { + // workaround for default arguments being unable to refer to other parameters + val resetFn = if (reset == null) () => createBufferedSource(inputStream, bufferSize, reset, close)(codec) else reset + + new BufferedSource(inputStream, bufferSize)(codec) withReset resetFn withClose close + } + + def fromInputStream(is: InputStream, enc: String): BufferedSource = + fromInputStream(is)(Codec(enc)) + + def fromInputStream(is: InputStream)(implicit codec: Codec): BufferedSource = + createBufferedSource(is, reset = () => fromInputStream(is)(codec), close = () => is.close())(codec) + + /** Reads data from a classpath resource, using either a context classloader (default) or a passed one. + * + * @param resource name of the resource to load from the classpath + * @param classLoader classloader to be used, or context classloader if not specified + * @return the buffered source + */ + def fromResource(resource: String, classLoader: ClassLoader = Thread.currentThread().getContextClassLoader())(implicit codec: Codec): BufferedSource = + Option(classLoader.getResourceAsStream(resource)) match { + case Some(in) => fromInputStream(in) + case None => throw new FileNotFoundException(s"resource '$resource' was not found in the classpath from the given classloader.") + } + +} + +/** An iterable representation of source data. + * It may be reset with the optional [[reset]] method. + * + * Subclasses must supply [[scala.io.Source.iter the underlying iterator]]. + * + * Error handling may be customized by overriding the [[scala.io.Source.report report]] method. + * + * The [[scala.io.Source.ch current input]] and [[scala.io.Source.pos position]], + * as well as the [[scala.io.Source.next next character]] methods delegate to + * [[scala.io.Source#Positioner the positioner]]. + * + * The default positioner encodes line and column numbers in the position passed to [[report]]. + * This behavior can be changed by supplying a + * [[scala.io.Source.withPositioning(pos:* custom positioner]]. + * + */ +abstract class Source extends Iterator[Char] with Closeable { + /** the actual iterator */ + protected val iter: Iterator[Char] + + // ------ public values + + /** description of this source, default empty */ + var descr: String = "" + var nerrors = 0 + var nwarnings = 0 + + private def lineNum(line: Int): String = (getLines() drop (line - 1) take 1).mkString + + class LineIterator extends AbstractIterator[String] with Iterator[String] { + private[this] val sb = new StringBuilder + + lazy val iter: BufferedIterator[Char] = Source.this.iter.buffered + def isNewline(ch: Char): Boolean = ch == '\r' || ch == '\n' + def getc(): Boolean = iter.hasNext && { + val ch = iter.next() + if (ch == '\n') false + else if (ch == '\r') { + if (iter.hasNext && iter.head == '\n') + iter.next() + + false + } + else { + sb append ch + true + } + } + def hasNext: Boolean = iter.hasNext + def next(): String = { + sb.clear() + while (getc()) { } + sb.toString + } + } + + /** Returns an iterator who returns lines (NOT including newline character(s)). + * It will treat any of \r\n, \r, or \n as a line separator (longest match) - if + * you need more refined behavior you can subclass Source#LineIterator directly. + */ + def getLines(): Iterator[String] = new LineIterator() + + /** Returns `'''true'''` if this source has more characters. + */ + def hasNext: Boolean = iter.hasNext + + /** Returns next character. + */ + def next(): Char = positioner.next() + + @nowarn("cat=deprecation") + class Positioner(encoder: Position) { + def this() = this(RelaxedPosition) + /** the last character returned by next. */ + var ch: Char = _ + + /** position of last character returned by next */ + var pos = 0 + + /** current line and column */ + var cline = 1 + var ccol = 1 + + /** default col increment for tabs '\t', set to 4 initially */ + var tabinc = 4 + + def next(): Char = { + ch = iter.next() + pos = encoder.encode(cline, ccol) + ch match { + case '\n' => + ccol = 1 + cline += 1 + case '\t' => + ccol += tabinc + case _ => + ccol += 1 + } + ch + } + } + /** A Position implementation which ignores errors in + * the positions. + */ + @nowarn("cat=deprecation") + object RelaxedPosition extends Position { + private val _ = Source.this + def checkInput(line: Int, column: Int): Unit = () + } + object RelaxedPositioner extends Positioner(RelaxedPosition) { } + object NoPositioner extends Positioner(Position) { + override def next(): Char = iter.next() + } + def ch: Char = positioner.ch + def pos: Int = positioner.pos + + /** Reports an error message to the output stream `out`. + * + * @param pos the source position (line/column) + * @param msg the error message to report + * @param out PrintStream to use (optional: defaults to `Console.err`) + */ + def reportError( + pos: Int, + msg: String, + out: PrintStream = Console.err): Unit = + { + nerrors += 1 + report(pos, msg, out) + } + + private def spaces(n: Int) = List.fill(n)(' ').mkString + /** + * @param pos the source position (line/column) + * @param msg the error message to report + * @param out PrintStream to use + */ + def report(pos: Int, msg: String, out: PrintStream): Unit = { + val line = Position line pos + val col = Position column pos + + out println "%s:%d:%d: %s%s%s^".format(descr, line, col, msg, lineNum(line), spaces(col - 1)) + } + + /** + * @param pos the source position (line/column) + * @param msg the warning message to report + * @param out PrintStream to use (optional: defaults to `Console.out`) + */ + def reportWarning( + pos: Int, + msg: String, + out: PrintStream = Console.out): Unit = + { + nwarnings += 1 + report(pos, "warning! " + msg, out) + } + + private[this] var resetFunction: () => Source = null + private[this] var closeFunction: () => Unit = null + private[this] var positioner: Positioner = RelaxedPositioner + + def withReset(f: () => Source): this.type = { + resetFunction = f + this + } + def withClose(f: () => Unit): this.type = { + closeFunction = f + this + } + def withDescription(text: String): this.type = { + descr = text + this + } + /** Change or disable the positioner. */ + def withPositioning(on: Boolean): this.type = { + positioner = if (on) RelaxedPositioner else NoPositioner + this + } + def withPositioning(pos: Positioner): this.type = { + positioner = pos + this + } + + /** The close() method closes the underlying resource. */ + def close(): Unit = { + if (closeFunction != null) closeFunction() + } + + /** The reset() method creates a fresh copy of this Source. */ + def reset(): Source = + if (resetFunction != null) resetFunction() + else throw new UnsupportedOperationException("Source's reset() method was not set.") +}