Skip to content

Commit

Permalink
Fix a bundle of patmat issues (#21000)
Browse files Browse the repository at this point in the history
  • Loading branch information
dwijnand authored Aug 5, 2024
2 parents b981231 + ab240f1 commit 976133a
Show file tree
Hide file tree
Showing 22 changed files with 289 additions and 38 deletions.
5 changes: 3 additions & 2 deletions compiler/src/dotty/tools/dotc/config/Settings.scala
Original file line number Diff line number Diff line change
Expand Up @@ -411,9 +411,10 @@ object Settings:
def PhasesSetting(category: SettingCategory, name: String, descr: String, default: String = "", aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[List[String]] =
publish(Setting(category, prependName(name), descr, if (default.isEmpty) Nil else List(default), aliases = aliases, deprecation = deprecation))

def PrefixSetting(category: SettingCategory, name: String, descr: String, deprecation: Option[Deprecation] = None): Setting[List[String]] =
def PrefixSetting(category: SettingCategory, name0: String, descr: String, deprecation: Option[Deprecation] = None): Setting[List[String]] =
val name = prependName(name0)
val prefix = name.takeWhile(_ != '<')
publish(Setting(category, "-" + name, descr, Nil, prefix = Some(prefix), deprecation = deprecation))
publish(Setting(category, name, descr, Nil, prefix = Some(prefix), deprecation = deprecation))

def VersionSetting(category: SettingCategory, name: String, descr: String, default: ScalaVersion = NoScalaVersion, legacyArgs: Boolean = false, deprecation: Option[Deprecation] = None): Setting[ScalaVersion] =
publish(Setting(category, prependName(name), descr, default, legacyArgs = legacyArgs, deprecation = deprecation))
Expand Down
14 changes: 2 additions & 12 deletions compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala
Original file line number Diff line number Diff line change
Expand Up @@ -35,13 +35,6 @@ class PatternMatcher extends MiniPhase {

override def runsAfter: Set[String] = Set(ElimRepeated.name)

private val InInlinedCode = new util.Property.Key[Boolean]
private def inInlinedCode(using Context) = ctx.property(InInlinedCode).getOrElse(false)

override def prepareForInlined(tree: Inlined)(using Context): Context =
if inInlinedCode then ctx
else ctx.fresh.setProperty(InInlinedCode, true)

override def transformMatch(tree: Match)(using Context): Tree =
if (tree.isInstanceOf[InlineMatch]) tree
else {
Expand All @@ -53,13 +46,10 @@ class PatternMatcher extends MiniPhase {
case rt => tree.tpe
val translated = new Translator(matchType, this).translateMatch(tree)

if !inInlinedCode then
// Skip analysis on inlined code (eg pos/i19157)
if !tpd.enclosingInlineds.nonEmpty then
// check exhaustivity and unreachability
SpaceEngine.checkMatch(tree)
else
// only check exhaustivity, as inlining may generate unreachable code
// like in i19157.scala
SpaceEngine.checkMatchExhaustivityOnly(tree)

translated.ensureConforms(matchType)
}
Expand Down
84 changes: 60 additions & 24 deletions compiler/src/dotty/tools/dotc/transform/patmat/Space.scala
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,9 @@ package dotc
package transform
package patmat

import core.*, Constants.*, Contexts.*, Decorators.*, Flags.*, Names.*, NameOps.*, StdNames.*, Symbols.*, Types.*
import core.*
import Constants.*, Contexts.*, Decorators.*, Flags.*, NullOpsDecorator.*, Symbols.*, Types.*
import Names.*, NameOps.*, StdNames.*
import ast.*, tpd.*
import config.Printers.*
import printing.{ Printer, * }, Texts.*
Expand Down Expand Up @@ -350,7 +352,7 @@ object SpaceEngine {
val funRef = fun1.tpe.asInstanceOf[TermRef]
if (fun.symbol.name == nme.unapplySeq)
val (arity, elemTp, resultTp) = unapplySeqInfo(fun.tpe.widen.finalResultType, fun.srcPos)
if (fun.symbol.owner == defn.SeqFactoryClass && defn.ListType.appliedTo(elemTp) <:< pat.tpe)
if fun.symbol.owner == defn.SeqFactoryClass && pat.tpe.hasClassSymbol(defn.ListClass) then
// The exhaustivity and reachability logic already handles decomposing sum types (into its subclasses)
// and product types (into its components). To get better counter-examples for patterns that are of type
// List (or a super-type of list, like LinearSeq) we project them into spaces that use `::` and Nil.
Expand Down Expand Up @@ -522,14 +524,26 @@ object SpaceEngine {
val mt: MethodType = unapp.widen match {
case mt: MethodType => mt
case pt: PolyType =>
val locked = ctx.typerState.ownedVars
val tvars = constrained(pt)
val mt = pt.instantiate(tvars).asInstanceOf[MethodType]
scrutineeTp <:< mt.paramInfos(0)
// force type inference to infer a narrower type: could be singleton
// see tests/patmat/i4227.scala
mt.paramInfos(0) <:< scrutineeTp
instantiateSelected(mt, tvars)
isFullyDefined(mt, ForceDegree.all)
maximizeType(mt.paramInfos(0), Spans.NoSpan)
if !(ctx.typerState.ownedVars -- locked).isEmpty then
// constraining can create type vars out of wildcard types
// (in legalBound, by using a LevelAvoidMap)
// maximise will only do one pass at maximising the type vars in the target type
// which means we can maximise to types that include other type vars
// this fails TreeChecker's "non-empty constraint at end of $fusedPhase" check
// e.g. run-macros/string-context-implicits
// I can't prove that a second call won't also create type vars,
// but I'd rather have an unassigned new-new type var, than an infinite loop.
// After all, there's nothing strictly "wrong" with unassigned type vars,
// it just fails TreeChecker's linting.
maximizeType(mt.paramInfos(0), Spans.NoSpan)
mt
}

Expand All @@ -543,7 +557,7 @@ object SpaceEngine {
// Case unapplySeq:
// 1. return the type `List[T]` where `T` is the element type of the unapplySeq return type `Seq[T]`

val resTp = ctx.typeAssigner.safeSubstMethodParams(mt, scrutineeTp :: Nil).finalResultType
val resTp = wildApprox(ctx.typeAssigner.safeSubstMethodParams(mt, scrutineeTp :: Nil).finalResultType)

val sig =
if (resTp.isRef(defn.BooleanClass))
Expand All @@ -564,20 +578,14 @@ object SpaceEngine {
if (arity > 0)
productSelectorTypes(resTp, unappSym.srcPos)
else {
val getTp = resTp.select(nme.get).finalResultType match
case tp: TermRef if !tp.isOverloaded =>
// Like widenTermRefExpr, except not recursively.
// For example, in i17184 widen Option[foo.type]#get
// to Option[foo.type] instead of Option[Int].
tp.underlying.widenExpr
case tp => tp
val getTp = extractorMemberType(resTp, nme.get, unappSym.srcPos)
if (argLen == 1) getTp :: Nil
else productSelectorTypes(getTp, unappSym.srcPos)
}
}
}

sig.map(_.annotatedToRepeated)
sig.map { case tp: WildcardType => tp.bounds.hi case tp => tp }
}

/** Whether the extractor covers the given type */
Expand Down Expand Up @@ -616,14 +624,36 @@ object SpaceEngine {
case tp if tp.classSymbol.isAllOf(JavaEnum) => tp.classSymbol.children.map(_.termRef)
// the class of a java enum value is the enum class, so this must follow SingletonType to not loop infinitely

case tp @ AppliedType(Parts(parts), targs) if tp.classSymbol.children.isEmpty =>
case Childless(tp @ AppliedType(Parts(parts), targs)) =>
// It might not obvious that it's OK to apply the type arguments of a parent type to child types.
// But this is guarded by `tp.classSymbol.children.isEmpty`,
// meaning we'll decompose to the same class, just not the same type.
// For instance, from i15029, `decompose((X | Y).Field[T]) = [X.Field[T], Y.Field[T]]`.
parts.map(tp.derivedAppliedType(_, targs))

case tp if tp.isDecomposableToChildren =>
case tpOriginal if tpOriginal.isDecomposableToChildren =>
// isDecomposableToChildren uses .classSymbol.is(Sealed)
// But that classSymbol could be from an AppliedType
// where the type constructor is a non-class type
// E.g. t11620 where `?1.AA[X]` returns as "sealed"
// but using that we're not going to infer A1[X] and A2[X]
// but end up with A1[<?>] and A2[<?>].
// So we widen (like AppliedType superType does) away
// non-class type constructors.
//
// Can't use `tpOriginal.baseType(cls)` because it causes
// i15893 to return exhaustivity warnings, because instead of:
// <== refineUsingParent(N, class Succ, []) = Succ[<? <: NatT>]
// <== isSub(Succ[<? <: NatT>] <:< Succ[Succ[<?>]]) = true
// we get
// <== refineUsingParent(NatT, class Succ, []) = Succ[NatT]
// <== isSub(Succ[NatT] <:< Succ[Succ[<?>]]) = false
def getAppliedClass(tp: Type): Type = tp match
case tp @ AppliedType(_: HKTypeLambda, _) => tp
case tp @ AppliedType(tycon: TypeRef, _) if tycon.symbol.isClass => tp
case tp @ AppliedType(tycon: TypeProxy, _) => getAppliedClass(tycon.superType.applyIfParameterized(tp.args))
case tp => tp
val tp = getAppliedClass(tpOriginal)
def getChildren(sym: Symbol): List[Symbol] =
sym.children.flatMap { child =>
if child eq sym then List(sym) // i3145: sealed trait Baz, val x = new Baz {}, Baz.children returns Baz...
Expand Down Expand Up @@ -676,6 +706,12 @@ object SpaceEngine {
final class PartsExtractor(val get: List[Type]) extends AnyVal:
def isEmpty: Boolean = get == ListOfNoType

object Childless:
def unapply(tp: Type)(using Context): Result =
Result(if tp.classSymbol.children.isEmpty then tp else NoType)
class Result(val get: Type) extends AnyVal:
def isEmpty: Boolean = !get.exists

/** Show friendly type name with current scope in mind
*
* E.g. C.this.B --> B if current owner is C
Expand Down Expand Up @@ -772,12 +808,15 @@ object SpaceEngine {
doShow(s)
}

private def exhaustivityCheckable(sel: Tree)(using Context): Boolean = {
extension (self: Type) private def stripUnsafeNulls()(using Context): Type =
if Nullables.unsafeNullsEnabled then self.stripNull() else self

private def exhaustivityCheckable(sel: Tree)(using Context): Boolean = trace(i"exhaustivityCheckable($sel ${sel.className})") {
val seen = collection.mutable.Set.empty[Symbol]

// Possible to check everything, but be compatible with scalac by default
def isCheckable(tp: Type): Boolean =
val tpw = tp.widen.dealias
def isCheckable(tp: Type): Boolean = trace(i"isCheckable($tp ${tp.className})"):
val tpw = tp.widen.dealias.stripUnsafeNulls()
val classSym = tpw.classSymbol
classSym.is(Sealed) && !tpw.isLargeGenericTuple || // exclude large generic tuples from exhaustivity
// requires an unknown number of changes to make work
Expand Down Expand Up @@ -813,7 +852,7 @@ object SpaceEngine {
/** Return the underlying type of non-module, non-constant, non-enum case singleton types.
* Also widen ExprType to its result type, and rewrap any annotation wrappers.
* For example, with `val opt = None`, widen `opt.type` to `None.type`. */
def toUnderlying(tp: Type)(using Context): Type = trace(i"toUnderlying($tp)")(tp match {
def toUnderlying(tp: Type)(using Context): Type = trace(i"toUnderlying($tp ${tp.className})")(tp match {
case _: ConstantType => tp
case tp: TermRef if tp.symbol.is(Module) => tp
case tp: TermRef if tp.symbol.isAllOf(EnumCase) => tp
Expand All @@ -824,7 +863,7 @@ object SpaceEngine {
})

def checkExhaustivity(m: Match)(using Context): Unit = trace(i"checkExhaustivity($m)") {
val selTyp = toUnderlying(m.selector.tpe).dealias
val selTyp = toUnderlying(m.selector.tpe.stripUnsafeNulls()).dealias
val targetSpace = trace(i"targetSpace($selTyp)")(project(selTyp))

val patternSpace = Or(m.cases.foldLeft(List.empty[Space]) { (acc, x) =>
Expand Down Expand Up @@ -902,9 +941,6 @@ object SpaceEngine {
}

def checkMatch(m: Match)(using Context): Unit =
checkMatchExhaustivityOnly(m)
if reachabilityCheckable(m.selector) then checkReachability(m)

def checkMatchExhaustivityOnly(m: Match)(using Context): Unit =
if exhaustivityCheckable(m.selector) then checkExhaustivity(m)
if reachabilityCheckable(m.selector) then checkReachability(m)
}
13 changes: 13 additions & 0 deletions tests/warn/i15893.min.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
sealed trait NatT
case class Zero() extends NatT
case class Succ[+N <: NatT](n: N) extends NatT

type Mod2[N <: NatT] <: NatT = N match
case Zero => Zero
case Succ[Zero] => Succ[Zero]
case Succ[Succ[predPredN]] => Mod2[predPredN]

def dependentlyTypedMod2[N <: NatT](n: N): Mod2[N] = n match
case Zero(): Zero => Zero() // warn
case Succ(Zero()): Succ[Zero] => Succ(Zero()) // warn
case Succ(Succ(predPredN)): Succ[Succ[?]] => dependentlyTypedMod2(predPredN) // warn
13 changes: 13 additions & 0 deletions tests/warn/i20121.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
sealed trait T_A[A, B]
type X = T_A[Byte, Byte]

case class CC_B[A](a: A) extends T_A[A, X]

val v_a: T_A[X, X] = CC_B(null)
val v_b = v_a match
case CC_B(_) => 0 // warn: unreachable
case _ => 1
// for CC_B[A] to match T_A[X, X]
// A := X
// so require X, aka T_A[Byte, Byte]
// which isn't instantiable, outside of null
17 changes: 17 additions & 0 deletions tests/warn/i20122.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
sealed trait T_B[C, D]

case class CC_A()
case class CC_B[A, C](a: A) extends T_B[C, CC_A]
case class CC_C[C, D](a: T_B[C, D]) extends T_B[Int, CC_A]
case class CC_E(a: CC_C[Char, Byte])

val v_a: T_B[Int, CC_A] = CC_B(CC_E(CC_C(null)))
val v_b = v_a match
case CC_B(CC_E(CC_C(_))) => 0 // warn: unreachable
case _ => 1
// for CC_B[A, C] to match T_B[C, CC_A]
// C <: Int, ok
// A <: CC_E, ok
// but you need a CC_C[Char, Byte]
// which requires a T_B[Char, Byte]
// which isn't instantiable, outside of null
16 changes: 16 additions & 0 deletions tests/warn/i20123.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
sealed trait T_A[A, B]
sealed trait T_B[C]

case class CC_D[A, C]() extends T_A[A, C]
case class CC_E() extends T_B[Nothing]
case class CC_G[A, C](c: C) extends T_A[A, C]

val v_a: T_A[Boolean, T_B[Boolean]] = CC_G(null)
val v_b = v_a match {
case CC_D() => 0
case CC_G(_) => 1 // warn: unreachable
// for CC_G[A, C] to match T_A[Boolean, T_B[Boolean]]
// A := Boolean, which is ok
// C := T_B[Boolean],
// which isn't instantiable, outside of null
}
9 changes: 9 additions & 0 deletions tests/warn/i20128.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
sealed trait T_A[A]
case class CC_B[A](a: T_A[A]) extends T_A[Byte]
case class CC_E[A](b: T_A[A]) extends T_A[Byte]

val v_a: T_A[Byte] = CC_E(CC_B(null))
val v_b: Int = v_a match { // warn: not exhaustive
case CC_E(CC_E(_)) => 0
case CC_B(_) => 1
}
14 changes: 14 additions & 0 deletions tests/warn/i20129.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
sealed trait T_A[A]
case class CC_B[A](a: T_A[A], c: T_A[A]) extends T_A[Char]
case class CC_C[A]() extends T_A[A]
case class CC_G() extends T_A[Char]

val v_a: T_A[Char] = CC_B(CC_G(), CC_C())
val v_b: Int = v_a match { // warn: not exhaustive
case CC_C() => 0
case CC_G() => 1
case CC_B(CC_B(_, _), CC_C()) => 2
case CC_B(CC_C(), CC_C()) => 3
case CC_B(_, CC_G()) => 4
case CC_B(_, CC_B(_, _)) => 5
}
11 changes: 11 additions & 0 deletions tests/warn/i20130.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
sealed trait T_A[B]
sealed trait T_B[C]
case class CC_B[C]() extends T_A[T_B[C]]
case class CC_C[B, C](c: T_A[B], d: T_B[C]) extends T_B[C]
case class CC_E[C]() extends T_B[C]

val v_a: T_B[Int] = CC_C(null, CC_E())
val v_b: Int = v_a match { // warn: not exhaustive
case CC_C(_, CC_C(_, _)) => 0
case CC_E() => 5
}
17 changes: 17 additions & 0 deletions tests/warn/i20131.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
sealed trait Foo
case class Foo1() extends Foo
case class Foo2[A, B]() extends Foo

sealed trait Bar[A, B]
case class Bar1[A, C, D](a: Bar[C, D]) extends Bar[A, Bar[C, D]]
case class Bar2[ C, D](b: Bar[C, D], c: Foo) extends Bar[Bar1[Int, Byte, Int], Bar[C, D]]

class Test:
def m1(bar: Bar[Bar1[Int, Byte, Int], Bar[Char, Char]]): Int = bar match
case Bar1(_) => 0
case Bar2(_, Foo2()) => 1
def t1 = m1(Bar2(null, Foo1()))
// for Bar2[C, D] to match the scrutinee
// C := Char and D := Char
// which requires a Bar[Char, Char]
// which isn't instantiable, outside of null
8 changes: 8 additions & 0 deletions tests/warn/i20132.alt.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
sealed trait Foo[A]
case class Bar[C](x: Foo[C]) extends Foo[C]
case class End[B]() extends Foo[B]
class Test:
def m1[M](foo: Foo[M]): Int = foo match // warn: not exhaustive
case End() => 0
case Bar(End()) => 1
def t1 = m1[Int](Bar[Int](Bar[Int](End[Int]())))
13 changes: 13 additions & 0 deletions tests/warn/i20132.future-Left.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
//> using options -Yexplicit-nulls -Yno-flexible-types

import scala.language.unsafeNulls

import java.util.concurrent.CompletableFuture
import scala.jdk.CollectionConverters._

class Test1:
def m1 =
val fut: CompletableFuture[Either[String, List[String]]] = ???
fut.thenApply:
case Right(edits: List[String]) => edits.asJava
case Left(error: String) => throw new Exception(error)
10 changes: 10 additions & 0 deletions tests/warn/i20132.list-Seq.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
class D1
class D2

class Test1:
type Ds = List[D1] | List[D2]
def m1(dss: List[Ds]) =
dss.flatMap:
case Seq(d) => Some(1)
case Seq(head, tail*) => Some(2)
case Seq() => None
8 changes: 8 additions & 0 deletions tests/warn/i20132.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
sealed trait Foo[A]
case class Bar[C](x: Foo[C]) extends Foo[Int]
case class End[B]() extends Foo[B]
class Test:
def m1[M](foo: Foo[M]): Int = foo match // warn: not exhaustive
case End() => 0
case Bar(End()) => 1
def t1 = m1[Int](Bar[Int](Bar[Int](End[Int]())))
8 changes: 8 additions & 0 deletions tests/warn/i20132.stream-Tuple2.safeNulls.check
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
-- [E029] Pattern Match Exhaustivity Warning: tests/warn/i20132.stream-Tuple2.safeNulls.scala:8:24 ---------------------
8 | xs.asJava.forEach { case (a, b) => // warn
| ^
| match may not be exhaustive.
|
| It would fail on pattern case: _: Null
|
| longer explanation available when compiling with `-explain`
Loading

0 comments on commit 976133a

Please sign in to comment.