Skip to content

Commit d1b977f

Browse files
SethTisuesom-snytt
andcommitted
prepare for Dotty: add types
Co-authored-by: Som Snytt <[email protected]>
1 parent b0f709c commit d1b977f

File tree

6 files changed

+16
-16
lines changed

6 files changed

+16
-16
lines changed

core/src/main/scala/scala/collection/parallel/RemainsIterator.scala

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -465,7 +465,7 @@ self =>
465465
def split: Seq[IterableSplitter[S]] = self.split.map { _ map f }
466466
}
467467

468-
override def map[S](f: T => S) = new Mapped(f)
468+
override def map[S](f: T => S): IterableSplitter[S] = new Mapped[S](f)
469469

470470
class Appended[U >: T, PI <: IterableSplitter[U]](protected val that: PI) extends IterableSplitter[U] {
471471
signalDelegate = self.signalDelegate
@@ -500,7 +500,7 @@ self =>
500500
}
501501
}
502502

503-
def zipParSeq[S](that: SeqSplitter[S]) = new Zipped(that)
503+
def zipParSeq[S](that: SeqSplitter[S]): IterableSplitter[(T, S)] = new Zipped(that)
504504

505505
class ZippedAll[U >: T, S](protected val that: SeqSplitter[S], protected val thiselem: U, protected val thatelem: S)
506506
extends IterableSplitter[(U, S)] {
@@ -578,7 +578,7 @@ self =>
578578
def psplit(sizes: Int*): Seq[SeqSplitter[S]] = self.psplit(sizes: _*).map { _ map f }
579579
}
580580

581-
override def map[S](f: T => S) = new RemainsIteratorMapped(f)
581+
override def map[S](f: T => S): SeqSplitter[S] = new RemainsIteratorMapped(f)
582582

583583
class RemainsIteratorAppended[U >: T, PI <: SeqSplitter[U]](it: PI) extends Appended[U, PI](it) with SeqSplitter[U] {
584584
override def dup = super.dup.asInstanceOf[SeqSplitter[U]]
@@ -614,10 +614,10 @@ self =>
614614
class RemainsIteratorZipped[S](ti: SeqSplitter[S]) extends Zipped[S](ti) with SeqSplitter[(T, S)] {
615615
override def dup = super.dup.asInstanceOf[SeqSplitter[(T, S)]]
616616
override def split: Seq[SeqSplitter[(T, S)]] = super.split.asInstanceOf[Seq[SeqSplitter[(T, S)]]]
617-
def psplit(szs: Int*) = (self.psplit(szs: _*) zip that.psplit(szs: _*)) map { p => p._1 zipParSeq p._2 }
617+
def psplit(szs: Int*) = (self.psplit(szs: _*) zip that.psplit(szs: _*)) map { p => (p._1 zipParSeq p._2) }
618618
}
619619

620-
override def zipParSeq[S](that: SeqSplitter[S]) = new RemainsIteratorZipped(that)
620+
override def zipParSeq[S](that: SeqSplitter[S]): SeqSplitter[(T, S)] = new RemainsIteratorZipped(that)
621621

622622
class RemainsIteratorZippedAll[U >: T, S](ti: SeqSplitter[S], thise: U, thate: S) extends ZippedAll[U, S](ti, thise, thate) with SeqSplitter[(U, S)] {
623623
override def dup = super.dup.asInstanceOf[SeqSplitter[(U, S)]]
@@ -635,7 +635,7 @@ self =>
635635
}
636636
def psplit(sizes: Int*): Seq[SeqSplitter[(U, S)]] = {
637637
val (thisit, thatit) = patchem
638-
val zipped = thisit zipParSeq thatit
638+
val zipped = (thisit zipParSeq thatit)
639639
zipped.psplit(sizes: _*)
640640
}
641641
}

core/src/main/scala/scala/collection/parallel/Tasks.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -306,7 +306,7 @@ trait AdaptiveWorkStealingForkJoinTasks extends ForkJoinTasks with AdaptiveWorkS
306306
def split = body.split.map(b => newWrappedTask(b))
307307
}
308308

309-
def newWrappedTask[R, Tp](b: Task[R, Tp]) = new AWSFJTWrappedTask[R, Tp](b)
309+
def newWrappedTask[R, Tp](b: Task[R, Tp]): AWSFJTWrappedTask[R, Tp] = new AWSFJTWrappedTask[R, Tp](b)
310310
}
311311

312312
/** An implementation of the `Tasks` that uses Scala `Future`s to compute

core/src/main/scala/scala/collection/parallel/immutable/ParHashMap.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -92,9 +92,9 @@ self =>
9292
phit
9393
}
9494
def split: Seq[IterableSplitter[(K, V)]] = if (remaining < 2) Seq(this) else triter match {
95-
case t: TrieIterator[_] =>
95+
case t: TrieIterator[(K, V)] =>
9696
val previousRemaining = remaining
97-
val ((fst: Iterator[(K, V) @unchecked], fstlength), snd: Iterator[(K, V) @unchecked]) = t.split
97+
val ((fst, fstlength), snd) = t.split
9898
val sndlength = previousRemaining - fstlength
9999
Seq(
100100
new ParHashMapIterator(fst, fstlength),
@@ -308,7 +308,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V
308308
case hm1: OldHashMap.OldHashMap1[_, _] =>
309309
val evaledvalue = hm1.value.result()
310310
new OldHashMap.OldHashMap1[K, Repr](hm1.key, hm1.hash, evaledvalue, null)
311-
case hmc: OldHashMap.OldHashMapCollision1[_, _] =>
311+
case hmc: OldHashMap.OldHashMapCollision1[_, Combiner[_, Repr]] =>
312312
val evaledkvs = hmc.kvs map { p => (p._1, p._2.result()) }
313313
new OldHashMap.OldHashMapCollision1[K, Repr](hmc.hash, evaledkvs)
314314
case htm: OldHashMap.HashTrieMap[k, v] =>

core/src/main/scala/scala/collection/parallel/immutable/ParHashSet.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ self =>
7878
extends IterableSplitter[T] {
7979
var i = 0
8080
def dup = triter match {
81-
case t: TrieIterator[_] =>
81+
case t: TrieIterator[T] =>
8282
dupFromIterator(t.dupIterator)
8383
case _ =>
8484
val buff = triter.toBuffer
@@ -91,7 +91,7 @@ self =>
9191
phit
9292
}
9393
def split: Seq[IterableSplitter[T]] = if (remaining < 2) Seq(this) else triter match {
94-
case t: TrieIterator[_] =>
94+
case t: TrieIterator[T] =>
9595
val previousRemaining = remaining
9696
val ((fst, fstlength), snd) = t.split
9797
val sndlength = previousRemaining - fstlength

core/src/main/scala/scala/collection/parallel/mutable/ParHashMap.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -196,7 +196,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
196196
} else {
197197
// construct a normal table and fill it sequentially
198198
// TODO parallelize by keeping separate sizemaps and merging them
199-
object table extends HashTable[K, DefaultEntry[K, V], DefaultEntry[K, V]] with WithContents[K, DefaultEntry[K, V], DefaultEntry[K, V]] {
199+
object newTable extends HashTable[K, DefaultEntry[K, V], DefaultEntry[K, V]] with WithContents[K, DefaultEntry[K, V], DefaultEntry[K, V]] {
200200
type Entry = DefaultEntry[K, V]
201201
def insertEntry(e: Entry): Unit = { super.findOrAddEntry(e.key, e) }
202202
def createNewEntry(key: K, entry: Entry): Entry = entry
@@ -205,11 +205,11 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
205205
var i = 0
206206
while (i < ParHashMapCombiner.numblocks) {
207207
if (buckets(i) ne null) {
208-
for (elem <- buckets(i)) table.insertEntry(elem)
208+
for (elem <- buckets(i)) newTable.insertEntry(elem)
209209
}
210210
i += 1
211211
}
212-
new ParHashMap(table.hashTableContents)
212+
new ParHashMap(newTable.hashTableContents)
213213
}
214214

215215
/* classes */

core/src/main/scala/scala/collection/parallel/mutable/ParTrieMap.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -127,7 +127,7 @@ extends TrieMapIterator[K, V](lev, ct, mustInit)
127127
lazy val totalsize = new ParTrieMap(ct).size
128128
var iterated = 0
129129

130-
protected override def newIterator(_lev: Int, _ct: TrieMap[K, V], _mustInit: Boolean) = new ParTrieMapSplitter[K, V](_lev, _ct, _mustInit)
130+
protected override def newIterator(_lev: Int, _ct: TrieMap[K, V], _mustInit: Boolean): ParTrieMapSplitter[K, V] = new ParTrieMapSplitter[K, V](_lev, _ct, _mustInit)
131131

132132
override def shouldSplitFurther[S](coll: scala.collection.parallel.ParIterable[S], parallelismLevel: Int) = {
133133
val maxsplits = 3 + Integer.highestOneBit(parallelismLevel)

0 commit comments

Comments
 (0)