Skip to content

Commit c664f6d

Browse files
committed
use new wildcard syntax
1 parent 6df3ffc commit c664f6d

18 files changed

+126
-126
lines changed

core/src/main/scala/scala/collection/generic/ParFactory.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ extends GenericParCompanion[CC] {
3939
def concat[A](xss: Iterable[A]*): CC[A] = {
4040
val b = newBuilder[A]
4141
// At present we're using IndexedSeq as a proxy for "has a cheap size method".
42-
if (xss forall (_.isInstanceOf[IndexedSeq[_]]))
42+
if (xss forall (_.isInstanceOf[IndexedSeq[?]]))
4343
b.sizeHint(xss.map(_.size).sum)
4444

4545
for (xs <- xss) b ++= xs

core/src/main/scala/scala/collection/generic/ParSetFactory.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ import scala.collection.parallel.ParSetLike
2222
* @define factoryInfo
2323
* This object provides a set of operations needed to create `$Coll` values.
2424
*/
25-
abstract class ParSetFactory[CC[X] <: ParSet[X] with ParSetLike[X, CC, CC[X], _] with GenericParTemplate[X, CC]]
25+
abstract class ParSetFactory[CC[X] <: ParSet[X] with ParSetLike[X, CC, CC[X], ?] with GenericParTemplate[X, CC]]
2626
extends GenericParCompanion[CC] {
2727
def newBuilder[A]: Combiner[A, CC[A]] = newCombiner[A]
2828

core/src/main/scala/scala/collection/immutable/OldHashMap.scala

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -423,13 +423,13 @@ object OldHashMap extends MapFactory[OldHashMap] {
423423
val sizeNew = size - sub.size
424424
// if we have only one child, which is not a HashTrieSet but a self-contained set like
425425
// HashSet1 or HashSetCollision1, return the child instead
426-
if (elemsNew.length == 1 && !elemsNew(0).isInstanceOf[HashTrieMap[_,_]])
426+
if (elemsNew.length == 1 && !elemsNew(0).isInstanceOf[HashTrieMap[?,?]])
427427
elemsNew(0)
428428
else
429429
new HashTrieMap(bitmapNew, elemsNew, sizeNew)
430430
} else
431431
OldHashMap.empty[K,V]
432-
} else if(elems.length == 1 && !subNew.isInstanceOf[HashTrieMap[_,_]]) {
432+
} else if(elems.length == 1 && !subNew.isInstanceOf[HashTrieMap[?,?]]) {
433433
subNew
434434
} else {
435435
val elemsNew = java.util.Arrays.copyOf(elems, elems.length)
@@ -529,9 +529,9 @@ object OldHashMap extends MapFactory[OldHashMap] {
529529
}
530530

531531
protected def merge0[V1 >: V](that: OldHashMap[K, V1], level: Int, merger: Merger[K, V1]): OldHashMap[K, V1] = that match {
532-
case hm: OldHashMap1[_, _] =>
532+
case hm: OldHashMap1[?, ?] =>
533533
this.updated0(hm.key, hm.hash, level, hm.value.asInstanceOf[V1], hm.kv, merger)
534-
case hm: HashTrieMap[_, _] =>
534+
case hm: HashTrieMap[?, ?] =>
535535
val that = hm.asInstanceOf[HashTrieMap[K, V1]]
536536
val thiselems = this.elems
537537
val thatelems = that.elems
@@ -582,8 +582,8 @@ object OldHashMap extends MapFactory[OldHashMap] {
582582
}
583583

584584
new HashTrieMap[K, V1](this.bitmap | that.bitmap, merged, totalelems)
585-
case hm: OldHashMapCollision1[_, _] => that.merge0(this, level, merger.invert)
586-
case hm: OldHashMap[_, _] => this
585+
case hm: OldHashMapCollision1[?, ?] => that.merge0(this, level, merger.invert)
586+
case hm: OldHashMap[?, ?] => this
587587
}
588588
}
589589

core/src/main/scala/scala/collection/immutable/OldHashSet.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -579,13 +579,13 @@ object OldHashSet extends IterableFactory[OldHashSet] {
579579
val sizeNew = size - sub.size
580580
// if we have only one child, which is not a HashTrieSet but a self-contained set like
581581
// OldHashSet1 or OldHashSetCollision1, return the child instead
582-
if (elemsNew.length == 1 && !elemsNew(0).isInstanceOf[HashTrieSet[_]])
582+
if (elemsNew.length == 1 && !elemsNew(0).isInstanceOf[HashTrieSet[?]])
583583
elemsNew(0)
584584
else
585585
new HashTrieSet(bitmapNew, elemsNew, sizeNew)
586586
} else
587587
null
588-
} else if(elems.length == 1 && !subNew.isInstanceOf[HashTrieSet[_]]) {
588+
} else if(elems.length == 1 && !subNew.isInstanceOf[HashTrieSet[?]]) {
589589
subNew
590590
} else {
591591
val elemsNew = java.util.Arrays.copyOf(elems, elems.length)

core/src/main/scala/scala/collection/immutable/TrieIterator.scala

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -46,23 +46,23 @@ private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) e
4646
private[this] var subIter = initSubIter
4747

4848
private[this] def getElems(x: Iterable[T]): Array[Iterable[T]] = ((x: @unchecked) match {
49-
case x: HashTrieMap[_, _] => x.elems
50-
case x: HashTrieSet[_] => x.elems
49+
case x: HashTrieMap[?, ?] => x.elems
50+
case x: HashTrieSet[?] => x.elems
5151
}).asInstanceOf[Array[Iterable[T]]]
5252

5353
private[this] def collisionToArray(x: Iterable[T]): Array[Iterable[T]] = ((x: @unchecked) match {
54-
case x: OldHashMapCollision1[_, _] => x.kvs.map((x: (Any, Any)) => OldHashMap(x)).toArray
55-
case x: OldHashSetCollision1[_] => x.ks.map(x => OldHashSet(x)).toArray
54+
case x: OldHashMapCollision1[?, ?] => x.kvs.map((x: (Any, Any)) => OldHashMap(x)).toArray
55+
case x: OldHashSetCollision1[?] => x.ks.map(x => OldHashSet(x)).toArray
5656
}).asInstanceOf[Array[Iterable[T]]]
5757

5858
private[this] type SplitIterators = ((Iterator[T], Int), Iterator[T])
5959

6060
private def isTrie(x: AnyRef) = x match {
61-
case _: HashTrieMap[_,_] | _: HashTrieSet[_] => true
61+
case _: HashTrieMap[?,?] | _: HashTrieSet[?] => true
6262
case _ => false
6363
}
6464
private def isContainer(x: AnyRef) = x match {
65-
case _: OldHashMap1[_, _] | _: OldHashSet1[_] => true
65+
case _: OldHashMap1[?, ?] | _: OldHashSet1[?] => true
6666
case _ => false
6767
}
6868

@@ -84,7 +84,7 @@ private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) e
8484
}
8585

8686
private[this] def iteratorWithSize(arr: Array[Iterable[T]]): (Iterator[T], Int) =
87-
(newIterator(arr), ((arr.map(_.size): Array[Int]): scala.collection.IterableOps[Int, scala.collection.Iterable, _]).sum)
87+
(newIterator(arr), ((arr.map(_.size): Array[Int]): scala.collection.IterableOps[Int, scala.collection.Iterable, ?]).sum)
8888

8989
private[this] def arrayToIterators(arr: Array[Iterable[T]]): SplitIterators = {
9090
val (fst, snd) = arr.splitAt(arr.length / 2)
@@ -94,7 +94,7 @@ private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) e
9494
private[this] def splitArray(ad: Array[Iterable[T]]): SplitIterators =
9595
if (ad.length > 1) arrayToIterators(ad)
9696
else ad(0) match {
97-
case _: OldHashMapCollision1[_, _] | _: OldHashSetCollision1[_] =>
97+
case _: OldHashMapCollision1[?, ?] | _: OldHashSetCollision1[?] =>
9898
arrayToIterators(collisionToArray(ad(0)))
9999
case _ =>
100100
splitArray(getElems(ad(0)))

core/src/main/scala/scala/collection/parallel/CollectionConverters.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ object CollectionConverters {
2727
def seq = coll
2828
override def par = coll match {
2929
case coll: sc.Set[A @unchecked] => new SetIsParallelizable(coll).par
30-
case coll: sc.Map[_, _] => new MapIsParallelizable(coll).par.asInstanceOf[ParIterable[A]]
30+
case coll: sc.Map[?, ?] => new MapIsParallelizable(coll).par.asInstanceOf[ParIterable[A]]
3131
case coll: sci.Iterable[A] => new ImmutableIterableIsParallelizable(coll).par
3232
case coll: scm.Iterable[A @unchecked] => new MutableIterableIsParallelizable(coll).par
3333
case _ => ParIterable.newCombiner[A].fromSequential(seq) // builds ParArray, same as for scm.Iterable
@@ -39,7 +39,7 @@ object CollectionConverters {
3939
override def par = coll match {
4040
case coll: scm.Seq[A] => new MutableSeqIsParallelizable(coll).par
4141
case coll: scm.Set[A] => new MutableSetIsParallelizable(coll).par
42-
case coll: scm.Map[_, _] => new MutableMapIsParallelizable(coll).par.asInstanceOf[mutable.ParIterable[A]]
42+
case coll: scm.Map[?, ?] => new MutableMapIsParallelizable(coll).par.asInstanceOf[mutable.ParIterable[A]]
4343
case _ => mutable.ParIterable.newCombiner[A].fromSequential(seq) // builds ParArray
4444
}
4545
}
@@ -49,7 +49,7 @@ object CollectionConverters {
4949
override def par = coll match {
5050
case coll: sci.Seq[A] => new ImmutableSeqIsParallelizable(coll).par
5151
case coll: sci.Set[A @unchecked] => new ImmutableSetIsParallelizable(coll).par
52-
case coll: sci.Map[_, _] => new ImmutableMapIsParallelizable(coll).par.asInstanceOf[immutable.ParIterable[A]]
52+
case coll: sci.Map[?, ?] => new ImmutableMapIsParallelizable(coll).par.asInstanceOf[immutable.ParIterable[A]]
5353
case _ => immutable.ParIterable.newCombiner[A].fromSequential(seq) // builds ParVector
5454
}
5555
}
@@ -85,7 +85,7 @@ object CollectionConverters {
8585
implicit class ImmutableSeqIsParallelizable[A](private val coll: sci.Seq[A]) extends AnyVal with sc.CustomParallelizable[A, immutable.ParSeq[A]] {
8686
def seq = coll
8787
override def par = coll match {
88-
case coll: sci.Vector[_] => new VectorIsParallelizable(coll.asInstanceOf[sci.Vector[A]]).par
88+
case coll: sci.Vector[?] => new VectorIsParallelizable(coll.asInstanceOf[sci.Vector[A]]).par
8989
case coll: sci.Range => new RangeIsParallelizable(coll).par.asInstanceOf[immutable.ParSeq[A]]
9090
case _ => immutable.ParSeq.newCombiner[A].fromSequential(seq)
9191
}

core/src/main/scala/scala/collection/parallel/ParIterableLike.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -328,7 +328,7 @@ extends IterableOnce[T @uncheckedVariance]
328328
if (cb.getClass == t.runtimeClass) isbody(cb.asInstanceOf[Cmb]) else notbody
329329
}
330330
}
331-
def isCombiner = cb.isInstanceOf[Combiner[_, _]]
331+
def isCombiner = cb.isInstanceOf[Combiner[?, ?]]
332332
def asCombiner = cb.asInstanceOf[Combiner[Elem, To]]
333333
}
334334

@@ -877,7 +877,7 @@ extends IterableOnce[T @uncheckedVariance]
877877

878878
protected[this] trait NonDivisible[R] extends NonDivisibleTask[R, NonDivisible[R]]
879879

880-
protected[this] abstract class Composite[FR, SR, R, First <: StrictSplitterCheckTask[FR, _], Second <: StrictSplitterCheckTask[SR, _]]
880+
protected[this] abstract class Composite[FR, SR, R, First <: StrictSplitterCheckTask[FR, ?], Second <: StrictSplitterCheckTask[SR, ?]]
881881
(val ft: First, val st: Second)
882882
extends NonDivisibleTask[R, Composite[FR, SR, R, First, Second]] {
883883
def combineResults(fr: FR, sr: SR): R
@@ -894,7 +894,7 @@ extends IterableOnce[T @uncheckedVariance]
894894
}
895895

896896
/** Sequentially performs one task after another. */
897-
protected[this] abstract class SeqComposite[FR, SR, R, First <: StrictSplitterCheckTask[FR, _], Second <: StrictSplitterCheckTask[SR, _]]
897+
protected[this] abstract class SeqComposite[FR, SR, R, First <: StrictSplitterCheckTask[FR, ?], Second <: StrictSplitterCheckTask[SR, ?]]
898898
(f: First, s: Second)
899899
extends Composite[FR, SR, R, First, Second](f, s) {
900900
def leaf(prevr: Option[R]) = {
@@ -905,7 +905,7 @@ extends IterableOnce[T @uncheckedVariance]
905905
}
906906

907907
/** Performs two tasks in parallel, and waits for both to finish. */
908-
protected[this] abstract class ParComposite[FR, SR, R, First <: StrictSplitterCheckTask[FR, _], Second <: StrictSplitterCheckTask[SR, _]]
908+
protected[this] abstract class ParComposite[FR, SR, R, First <: StrictSplitterCheckTask[FR, ?], Second <: StrictSplitterCheckTask[SR, ?]]
909909
(f: First, s: Second)
910910
extends Composite[FR, SR, R, First, Second](f, s) {
911911
def leaf(prevr: Option[R]) = {

core/src/main/scala/scala/collection/parallel/ParMapLike.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ self =>
5353
* same mappings, `false` otherwise.
5454
*/
5555
override def equals(that: Any): Boolean = that match {
56-
case that: ParMap[b, _] =>
56+
case that: ParMap[b, ?] =>
5757
(this eq that) ||
5858
(that canEqual this) &&
5959
(this.size == that.size) && {

core/src/main/scala/scala/collection/parallel/ParSeqLike.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -467,7 +467,7 @@ extends ParIterableLike[T, CC, Repr, Sequential]
467467

468468
/* tasks */
469469

470-
protected[this] def down(p: IterableSplitter[_]) = p.asInstanceOf[SeqSplitter[T]]
470+
protected[this] def down(p: IterableSplitter[?]) = p.asInstanceOf[SeqSplitter[T]]
471471

472472
protected trait ParSeqLikeAccessor[R, Tp] extends Accessor[R, Tp] {
473473
protected[this] val pit: SeqSplitter[T]

core/src/main/scala/scala/collection/parallel/ParSetLike.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ extends ParIterableLike[T, CC, Repr, Sequential]
9797
* as this set.
9898
*/
9999
override def equals(that: Any): Boolean = that match {
100-
case that: ParSet[_] =>
100+
case that: ParSet[?] =>
101101
(this eq that) ||
102102
(that canEqual this) &&
103103
(this.size == that.size) &&

core/src/main/scala/scala/collection/parallel/Tasks.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ trait Task[R, +Tp] {
6868
mergeThrowables(that)
6969
}
7070

71-
private[parallel] def mergeThrowables(that: Task[_, _]): Unit =
71+
private[parallel] def mergeThrowables(that: Task[?, ?]): Unit =
7272
if (this.throwable != null) {
7373
if (that.throwable != null && (this.throwable ne that.throwable))
7474
this.throwable.addSuppressed(that.throwable)

core/src/main/scala/scala/collection/parallel/immutable/ParHashMap.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -305,10 +305,10 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V
305305
evaluateCombiners(trie).asInstanceOf[OldHashMap[K, Repr]]
306306
}
307307
private def evaluateCombiners(trie: OldHashMap[K, Combiner[V, Repr]]): OldHashMap[K, Repr] = trie match {
308-
case hm1: OldHashMap.OldHashMap1[_, _] =>
308+
case hm1: OldHashMap.OldHashMap1[?, ?] =>
309309
val evaledvalue = hm1.value.result()
310310
new OldHashMap.OldHashMap1[K, Repr](hm1.key, hm1.hash, evaledvalue, null)
311-
case hmc: OldHashMap.OldHashMapCollision1[_, Combiner[_, Repr]] =>
311+
case hmc: OldHashMap.OldHashMapCollision1[?, Combiner[?, Repr]] =>
312312
val evaledkvs = hmc.kvs map { p => (p._1, p._2.result()) }
313313
new OldHashMap.OldHashMapCollision1[K, Repr](hmc.hash, evaledkvs)
314314
case htm: OldHashMap.HashTrieMap[k, v] =>

core/src/main/scala/scala/collection/parallel/mutable/LazyCombiner.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ trait LazyCombiner[Elem, +To, Buff <: Growable[Elem] with Sizing] extends Combin
3434
def result(): To = allocateAndCopy
3535
def clear() = { chain.clear() }
3636
def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] = if (this ne other) {
37-
if (other.isInstanceOf[LazyCombiner[_, _, _]]) {
37+
if (other.isInstanceOf[LazyCombiner[?, ?, ?]]) {
3838
val that = other.asInstanceOf[LazyCombiner[Elem, To, Buff]]
3939
newLazyCombiner(chain ++= that.chain)
4040
} else throw new UnsupportedOperationException("Cannot combine with combiner of different type.")

core/src/main/scala/scala/collection/parallel/mutable/ParTrieMap.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -80,9 +80,9 @@ extends ParMap[K, V]
8080
val in = ctrie.readRoot()
8181
val r = in.gcasRead(ctrie)
8282
(r: @unchecked) match {
83-
case tn: TNode[_, _] => tn.cachedSize(ctrie)
84-
case ln: LNode[_, _] => ln.cachedSize(ctrie)
85-
case cn: CNode[_, _] =>
83+
case tn: TNode[?, ?] => tn.cachedSize(ctrie)
84+
case ln: LNode[?, ?] => ln.cachedSize(ctrie)
85+
case cn: CNode[?, ?] =>
8686
tasksupport.executeAndWaitResult(new Size(0, cn.array.length, cn.array))
8787
cn.cachedSize(ctrie)
8888
}
@@ -103,7 +103,7 @@ extends ParMap[K, V]
103103
val until = offset + howmany
104104
while (i < until) {
105105
(array(i): @unchecked) match {
106-
case sn: SNode[_, _] => sz += 1
106+
case sn: SNode[?, ?] => sz += 1
107107
case in: INode[K @unchecked, V @unchecked] => sz += in.cachedSize(ctrie)
108108
}
109109
i += 1

core/src/main/scala/scala/collection/parallel/package.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ package object parallel {
4040

4141
def setTaskSupport[Coll](c: Coll, t: TaskSupport): Coll = {
4242
c match {
43-
case pc: ParIterableLike[_, _, _, _] => pc.tasksupport = t
43+
case pc: ParIterableLike[?, ?, ?, ?] => pc.tasksupport = t
4444
case _ => // do nothing
4545
}
4646
c
@@ -50,7 +50,7 @@ package object parallel {
5050
implicit class CollectionsHaveToParArray[C, T](c: C)(implicit asGto: C => scala.collection.IterableOnce[T]) {
5151
def toParArray = {
5252
val t = asGto(c)
53-
if (t.isInstanceOf[ParArray[_]]) t.asInstanceOf[ParArray[T]]
53+
if (t.isInstanceOf[ParArray[?]]) t.asInstanceOf[ParArray[T]]
5454
else {
5555
val it = t.iterator
5656
val cb = mutable.ParArrayCombiner[T]()
@@ -67,9 +67,9 @@ package parallel {
6767
private[collection] object ParallelCollectionImplicits {
6868
implicit def traversable2ops[T](t: scala.collection.IterableOnce[T]): TraversableOps[T] = new TraversableOps[T] {
6969
def isParallel = t.isInstanceOf[Parallel]
70-
def isParIterable = t.isInstanceOf[ParIterable[_]]
70+
def isParIterable = t.isInstanceOf[ParIterable[?]]
7171
def asParIterable = t.asInstanceOf[ParIterable[T]]
72-
def isParSeq = t.isInstanceOf[ParSeq[_]]
72+
def isParSeq = t.isInstanceOf[ParSeq[?]]
7373
def asParSeq = t.asInstanceOf[ParSeq[T]]
7474
def ifParSeq[R](isbody: ParSeq[T] => R) = new Otherwise[R] {
7575
def otherwise(notbody: => R) = if (isParallel) isbody(asParSeq) else notbody
@@ -184,7 +184,7 @@ package parallel {
184184
def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] = {
185185
if (this eq other) this
186186
else other match {
187-
case _: BucketCombiner[_, _, _, _] =>
187+
case _: BucketCombiner[?, ?, ?, ?] =>
188188
beforeCombine(other)
189189
val that = other.asInstanceOf[BucketCombiner[Elem, To, Buck, CombinerType]]
190190

junit/src/test/scala/MiscTest.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ class MiscTest {
2525
}
2626
}
2727

28-
def foo(arg: ParSeq[_]): String = arg.map(x => x).mkString(",")
28+
def foo(arg: ParSeq[?]): String = arg.map(x => x).mkString(",")
2929

3030
@Test
3131
def si4608: Unit = {
@@ -113,9 +113,9 @@ class MiscTest {
113113
@Test
114114
def si6510: Unit = {
115115
val x = collection.parallel.mutable.ParArray.range(1,10) groupBy { _ % 2 } mapValues { _.size }
116-
assertTrue(x.isInstanceOf[parallel.ParMap[_, _]])
116+
assertTrue(x.isInstanceOf[parallel.ParMap[?, ?]])
117117
val y = collection.parallel.immutable.ParVector.range(1,10) groupBy { _ % 2 } mapValues { _.size }
118-
assertTrue(y.isInstanceOf[parallel.ParMap[_, _]])
118+
assertTrue(y.isInstanceOf[parallel.ParMap[?, ?]])
119119
}
120120

121121
@Test

0 commit comments

Comments
 (0)