From 9ef3898d15363a4292f20b074469e5080f799d13 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Mon, 20 Aug 2018 11:38:51 +0200 Subject: [PATCH 01/49] Add MatchType as a type form Reduction of such types is not included in this commit --- .../dotty/tools/dotc/core/TypeComparer.scala | 2 + .../src/dotty/tools/dotc/core/Types.scala | 47 ++++++++++++++++++- .../tools/dotc/printing/PlainPrinter.scala | 6 +++ .../src/dotty/tools/dotc/typer/Checking.scala | 4 ++ .../dotty/tools/dotc/typer/ProtoTypes.scala | 3 +- docs/docs/internals/syntax.md | 3 ++ 6 files changed, 63 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 0f54da0573a2..61086134fb95 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -362,6 +362,8 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { false } joinOK || recur(tp11, tp2) && recur(tp12, tp2) + case tp1: MatchType => + recur(tp1.underlying, tp2) case _: FlexType => true case _ => diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 40b843cee067..dfb1a8528ede 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -68,6 +68,7 @@ object Types { * | +- AnnotatedType * | +- TypeVar * | +- HKTypeLambda + * | +- MatchType * | * +- GroundType -+- AndType * +- OrType @@ -3515,7 +3516,43 @@ object Types { type TypeVars = SimpleIdentitySet[TypeVar] - // ------ ClassInfo, Type Bounds ------------------------------------------------------------ + // ------ MatchType --------------------------------------------------------------- + + abstract case class MatchType(scrutinee: Type, cases: List[Type]) extends CachedProxyType with TermType { + override def computeHash(bs: Binders) = doHash(bs, scrutinee, cases) + + override def eql(that: Type) = that match { + case that: MatchType => scrutinee.eq(that.scrutinee) && cases.eqElements(that.cases) + case _ => false + } + + def derivedMatchType(scrutinee: Type, cases: List[Type])(implicit ctx: Context) = + if (scrutinee.eq(this.scrutinee) && cases.eqElements(this.cases)) this + else MatchType(scrutinee, cases) + + def caseType(tp: Type)(implicit ctx: Context): Type = tp match { + case tp: HKTypeLambda => caseType(tp.resType) + case defn.FunctionOf(_, restpe, _, _) => restpe + } + + def alternatives(implicit ctx: Context): List[Type] = cases.map(caseType) + + private var myUnderlying: Type = null + + def underlying(implicit ctx: Context): Type = { + if (myUnderlying == null) myUnderlying = alternatives.reduceLeft(OrType(_, _)) + myUnderlying + } + } + + class CachedMatchType(scrutinee: Type, cases: List[Type]) extends MatchType(scrutinee, cases) + + object MatchType { + def apply(scrutinee: Type, cases: List[Type])(implicit ctx: Context) = + unique(new CachedMatchType(scrutinee, cases)) + } + + // ------ ClassInfo, Type Bounds -------------------------------------------------- type TypeOrSymbol = AnyRef /* should be: Type | Symbol */ @@ -4003,6 +4040,8 @@ object Types { tp.derivedAndType(tp1, tp2) protected def derivedOrType(tp: OrType, tp1: Type, tp2: Type): Type = tp.derivedOrType(tp1, tp2) + protected def derivedMatchType(tp: MatchType, scrutinee: Type, cases: List[Type]): Type = + tp.derivedMatchType(scrutinee, cases) protected def derivedAnnotatedType(tp: AnnotatedType, underlying: Type, annot: Annotation): Type = tp.derivedAnnotatedType(underlying, annot) protected def derivedWildcardType(tp: WildcardType, bounds: Type): Type = @@ -4100,6 +4139,9 @@ object Types { case tp: OrType => derivedOrType(tp, this(tp.tp1), this(tp.tp2)) + case tp: MatchType => + derivedMatchType(tp, this(tp.scrutinee), tp.cases.mapConserve(this)) + case tp: SkolemType => tp @@ -4474,6 +4516,9 @@ object Types { case tp: OrType => this(this(x, tp.tp1), tp.tp2) + case tp: MatchType => + foldOver(this(x, tp.scrutinee), tp.cases) + case AnnotatedType(underlying, annot) => this(applyToAnnot(x, annot), underlying) diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index 778650f1ba1b..c8e72be144c5 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -164,6 +164,12 @@ class PlainPrinter(_ctx: Context) extends Printer { changePrec(AndTypePrec) { toText(tp1) ~ " & " ~ atPrec(AndTypePrec + 1) { toText(tp2) } } case OrType(tp1, tp2) => changePrec(OrTypePrec) { toText(tp1) ~ " | " ~ atPrec(OrTypePrec + 1) { toText(tp2) } } + case MatchType(scrutinee, cases) => + changePrec(GlobalPrec) { + def caseText(tp: Type): Text = "case " ~ toText(tp) + def casesText = Text(cases.map(caseText), "\n") + atPrec(InfixPrec) { toText(scrutinee) } ~ " match {" ~ casesText ~ "}" + }.close case tp: ErrorType => s"" case tp: WildcardType => diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 31281b413f88..59b7772e37b7 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -214,6 +214,10 @@ object Checking { tp.derivedRefinedType(this(parent), name, this(rinfo, nestedCycleOK, nestedCycleOK)) case tp: RecType => tp.rebind(this(tp.parent)) + case tp @ MatchType(scrutinee, cases) => + tp.derivedMatchType( + this(scrutinee), + cases.map(this(_, cycleOK = this.cycleOK, nestedCycleOK = true))) case tp @ TypeRef(pre, _) => try { // A prefix is interesting if it might contain (transitively) a reference diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index 1421bad81b34..39643d446423 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -447,7 +447,8 @@ object ProtoTypes { * If the constraint contains already some of these parameters in its domain, * make a copy of the type lambda and add the copy's type parameters instead. * Return either the original type lambda, or the copy, if one was made. - * Also, if `owningTree` is non-empty, add a type variable for each parameter. + * Also, if `owningTree` is non-empty ot `alwaysAddTypeVars` is true, add a type variable + * for each parameter. * @return The added type lambda, and the list of created type variables. */ def constrained(tl: TypeLambda, owningTree: untpd.Tree, alwaysAddTypeVars: Boolean = false)(implicit ctx: Context): (TypeLambda, List[TypeTree]) = { diff --git a/docs/docs/internals/syntax.md b/docs/docs/internals/syntax.md index 4dd9c3e8ff20..ae69980a56a9 100644 --- a/docs/docs/internals/syntax.md +++ b/docs/docs/internals/syntax.md @@ -119,6 +119,7 @@ ClassQualifier ::= ‘[’ id ‘]’ ```ebnf Type ::= [FunArgMods] FunArgTypes ‘=>’ Type Function(ts, t) | HkTypeParamClause ‘=>’ Type TypeLambda(ps, t) + | InfixType `match` TypeCaseClauses | InfixType FunArgMods ::= { ‘implicit’ | ‘erased’ } FunArgTypes ::= InfixType @@ -227,6 +228,8 @@ CaseClauses ::= CaseClause { CaseClause } CaseClause ::= ‘case’ (Pattern [Guard] ‘=>’ Block | INT) CaseDef(pat, guard?, block) // block starts at => ImplicitCaseClauses ::= ImplicitCaseClause { ImplicitCaseClause } ImplicitCaseClause ::= ‘case’ PatVar [‘:’ RefinedType] [Guard] ‘=>’ Block +TypeCaseClauses ::= TypeCaseClause { TypeCaseClause } +TypeCaseClause ::= ‘case’ InfixType ‘=>’ Type Pattern ::= Pattern1 { ‘|’ Pattern1 } Alternative(pats) Pattern1 ::= PatVar ‘:’ RefinedType Bind(name, Typed(Ident(wildcard), tpe)) From 1c172742347acfc946ca559e331761b3fe377a5d Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Mon, 20 Aug 2018 11:41:21 +0200 Subject: [PATCH 02/49] Represent literals used as types with SingletonTypeTrees This way, it is always decidable whether a typed tree represents a type or a term. Previously, such literals could be used in type position, but were classified as terms. --- compiler/src/dotty/tools/dotc/typer/Typer.scala | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index bf9c173ba3ab..6f371694e489 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -488,7 +488,9 @@ class Typer extends Namer } def typedLiteral(tree: untpd.Literal)(implicit ctx: Context): Tree = track("typedLiteral") { - assignType(tree) + val tree1 = assignType(tree) + if (ctx.mode.is(Mode.Type)) tpd.SingletonTypeTree(tree1) // this ensures that tree is classified as a type tree + else tree1 } def typedNew(tree: untpd.New, pt: Type)(implicit ctx: Context) = track("typedNew") { From 24a2f738e9cd66d4715ea6dc7de8c9159398db24 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Mon, 20 Aug 2018 11:45:57 +0200 Subject: [PATCH 03/49] Syntax, parsing, and type-checking of match types --- compiler/src/dotty/tools/dotc/ast/tpd.scala | 10 +-- .../dotty/tools/dotc/parsing/Parsers.scala | 36 +++++++-- .../dotty/tools/dotc/typer/TypeAssigner.scala | 23 +++++- .../src/dotty/tools/dotc/typer/Typer.scala | 81 ++++++++++++------- docs/docs/internals/syntax.md | 2 +- tests/pos/matchtype.scala | 19 +++++ 6 files changed, 122 insertions(+), 49 deletions(-) create mode 100644 tests/pos/matchtype.scala diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index ec0884e6ee99..f5b70728418d 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -115,10 +115,10 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { } def CaseDef(pat: Tree, guard: Tree, body: Tree)(implicit ctx: Context): CaseDef = - ta.assignType(untpd.CaseDef(pat, guard, body), body) + ta.assignType(untpd.CaseDef(pat, guard, body), pat, body) def Match(selector: Tree, cases: List[CaseDef])(implicit ctx: Context): Match = - ta.assignType(untpd.Match(selector, cases), cases) + ta.assignType(untpd.Match(selector, cases), selector, cases) def Labeled(bind: Bind, expr: Tree)(implicit ctx: Context): Labeled = ta.assignType(untpd.Labeled(bind, expr)) @@ -575,7 +575,6 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { } } - override def Closure(tree: Tree)(env: List[Tree], meth: Tree, tpt: Tree)(implicit ctx: Context): Closure = { val tree1 = untpd.cpy.Closure(tree)(env, meth, tpt) tree match { @@ -584,11 +583,12 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { case _ => ta.assignType(tree1, meth, tpt) } } + override def Match(tree: Tree)(selector: Tree, cases: List[CaseDef])(implicit ctx: Context): Match = { val tree1 = untpd.cpy.Match(tree)(selector, cases) tree match { case tree: Match if sameTypes(cases, tree.cases) => tree1.withTypeUnchecked(tree.tpe) - case _ => ta.assignType(tree1, cases) + case _ => ta.assignType(tree1, selector, cases) } } @@ -596,7 +596,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { val tree1 = untpd.cpy.CaseDef(tree)(pat, guard, body) tree match { case tree: CaseDef if body.tpe eq tree.body.tpe => tree1.withTypeUnchecked(tree.tpe) - case _ => ta.assignType(tree1, body) + case _ => ta.assignType(tree1, pat, body) } } diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index f4abf16104bf..5d86392766b4 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -819,6 +819,7 @@ object Parsers { in.token match { case ARROW => functionRest(t :: Nil) + case MATCH => matchType(t) case FORSOME => syntaxError(ExistentialTypesNoLongerSupported()); t case _ => if (imods.is(Implicit) && !t.isInstanceOf[FunctionWithMods]) @@ -1288,7 +1289,7 @@ object Parsers { */ def matchExpr(t: Tree, start: Offset, mkMatch: (Tree, List[CaseDef]) => Match) = atPos(start, in.skipToken()) { - inBraces(mkMatch(t, caseClauses())) + inBraces(mkMatch(t, caseClauses(caseClause))) } /** `match' { ImplicitCaseClauses } @@ -1315,6 +1316,13 @@ object Parsers { result } + /** `match' { TypeCaseClauses } + */ + def matchType(t: Tree) = + atPos(t.pos.start, in.skipToken()) { + inBraces(Match(t, caseClauses(typeCaseClause))) + } + /** FunParams ::= Bindings * | id * | `_' @@ -1531,7 +1539,7 @@ object Parsers { */ def blockExpr(): Tree = atPos(in.offset) { inDefScopeBraces { - if (in.token == CASE) Match(EmptyTree, caseClauses()) + if (in.token == CASE) Match(EmptyTree, caseClauses(caseClause)) else block() } } @@ -1621,22 +1629,34 @@ object Parsers { /** CaseClauses ::= CaseClause {CaseClause} * ImplicitCaseClauses ::= ImplicitCaseClause {ImplicitCaseClause} + * TypeCaseClauses ::= TypeCaseClause {TypeCaseClause} */ - def caseClauses(): List[CaseDef] = { + def caseClauses(clause: () => CaseDef): List[CaseDef] = { val buf = new ListBuffer[CaseDef] - buf += caseClause() - while (in.token == CASE) buf += caseClause() + buf += clause() + while (in.token == CASE) buf += clause() buf.toList } - /** CaseClause ::= case Pattern [Guard] `=>' Block - * ImplicitCaseClause ::= case PatVar [Ascription] [Guard] `=>' Block + /** CaseClause ::= ‘case’ Pattern [Guard] `=>' Block + * ImplicitCaseClause ::= ‘case’ PatVar [Ascription] [Guard] `=>' Block */ - def caseClause(): CaseDef = atPos(in.offset) { + val caseClause = () => atPos(in.offset) { accept(CASE) CaseDef(pattern(), guard(), atPos(accept(ARROW)) { block() }) } + /** TypeCaseClause ::= ‘case’ InfixType ‘=>’ Type [nl] + */ + val typeCaseClause = () => atPos(in.offset) { + accept(CASE) + CaseDef(infixType(), EmptyTree, atPos(accept(ARROW)) { + val t = typ() + if (isStatSep) in.nextToken() + t + }) + } + /* -------- PATTERNS ------------------------------------------- */ /** Pattern ::= Pattern1 { `|' Pattern1 } diff --git a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala index 1f2a7a4813d1..b30e853f153c 100644 --- a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -460,11 +460,26 @@ trait TypeAssigner { if (target.isEmpty) meth.tpe.widen.toFunctionType(tree.env.length) else target.tpe) - def assignType(tree: untpd.CaseDef, body: Tree)(implicit ctx: Context) = - tree.withType(body.tpe) + def assignType(tree: untpd.CaseDef, pat: Tree, body: Tree)(implicit ctx: Context) = { + val ownType = + if (body.isType) { + val params = new TreeAccumulator[mutable.ListBuffer[TypeSymbol]] { + def apply(ps: mutable.ListBuffer[TypeSymbol], t: Tree)(implicit ctx: Context) = t match { + case t: Bind if t.symbol.isType => foldOver(ps += t.symbol.asType, t) + case _ => foldOver(ps, t) + } + } + HKTypeLambda.fromParams( + params(new mutable.ListBuffer[TypeSymbol](), pat).toList, + defn.FunctionOf(pat.tpe :: Nil, body.tpe)) + } + else body.tpe + tree.withType(ownType) + } - def assignType(tree: untpd.Match, cases: List[CaseDef])(implicit ctx: Context) = - tree.withType(ctx.typeComparer.lub(cases.tpes)) + def assignType(tree: untpd.Match, scrutinee: Tree, cases: List[CaseDef])(implicit ctx: Context) = + if (scrutinee.isType) tree.withType(MatchType(scrutinee.tpe, cases.tpes)) + else tree.withType(ctx.typeComparer.lub(cases.tpes)) def assignType(tree: untpd.Labeled)(implicit ctx: Context) = tree.withType(tree.bind.symbol.info) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 6f371694e489..956aa0a6143a 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -982,9 +982,15 @@ class Typer extends Namer typedMatchFinish(tree, sel1, sel1.tpe, pt) case _ => if (tree.isInstanceOf[untpd.RewriteMatch]) checkInRewriteContext("rewrite match", tree.pos) - val sel1 = typedExpr(tree.selector) - val selType = fullyDefinedType(sel1.tpe, "pattern selector", tree.pos).widen - typedMatchFinish(tree, sel1, selType, pt) + val sel1 = typed(tree.selector) + if (ctx.mode.is(Mode.Type)) { + val cases1 = tree.cases.mapconserve(typedTypeCase(_, sel1.tpe, pt)) + assignType(cpy.Match(tree)(sel1, cases1), sel1, cases1) + } + else { + val selType = fullyDefinedType(sel1.tpe, "pattern selector", tree.pos).widen + typedMatchFinish(tree, sel1, selType, pt) + } } } @@ -992,7 +998,7 @@ class Typer extends Namer def typedMatchFinish(tree: untpd.Match, sel: Tree, selType: Type, pt: Type)(implicit ctx: Context): Tree = { val cases1 = harmonic(harmonize)(typedCases(tree.cases, selType, pt.notApplied)) .asInstanceOf[List[CaseDef]] - assignType(cpy.Match(tree)(sel, cases1), cases1) + assignType(cpy.Match(tree)(sel, cases1), sel, cases1) } /** gadtSyms = "all type parameters of enclosing methods that appear @@ -1029,43 +1035,42 @@ class Typer extends Namer cases.mapconserve(typedCase(_, selType, pt, gadts)) } + /** - strip all instantiated TypeVars from pattern types. + * run/reducable.scala is a test case that shows stripping typevars is necessary. + * - enter all symbols introduced by a Bind in current scope + */ + private def indexPattern(cdef: untpd.CaseDef)(implicit ctx: Context) = new TreeMap { + val stripTypeVars = new TypeMap { + def apply(t: Type) = mapOver(t) + } + override def transform(trt: Tree)(implicit ctx: Context) = + super.transform(trt.withType(stripTypeVars(trt.tpe))) match { + case b: Bind => + val sym = b.symbol + if (sym.name != tpnme.WILDCARD) + if (ctx.scope.lookup(b.name) == NoSymbol) ctx.enter(sym) + else ctx.error(new DuplicateBind(b, cdef), b.pos) + if (!ctx.isAfterTyper) { + val bounds = ctx.gadt.bounds(sym) + if (bounds != null) sym.info = bounds + } + b + case t => t + } + } + /** Type a case. */ def typedCase(tree: untpd.CaseDef, selType: Type, pt: Type, gadtSyms: Set[Symbol])(implicit ctx: Context): CaseDef = track("typedCase") { val originalCtx = ctx - val gadtCtx = gadtContext(gadtSyms) - /** - strip all instantiated TypeVars from pattern types. - * run/reducable.scala is a test case that shows stripping typevars is necessary. - * - enter all symbols introduced by a Bind in current scope - */ - val indexPattern = new TreeMap { - val stripTypeVars = new TypeMap { - def apply(t: Type) = mapOver(t) - } - override def transform(trt: Tree)(implicit ctx: Context) = - super.transform(trt.withType(stripTypeVars(trt.tpe))) match { - case b: Bind => - val sym = b.symbol - if (sym.name != tpnme.WILDCARD) - if (ctx.scope.lookup(b.name) == NoSymbol) ctx.enter(sym) - else ctx.error(new DuplicateBind(b, tree), b.pos) - if (!ctx.isAfterTyper) { - val bounds = ctx.gadt.bounds(sym) - if (bounds != null) sym.info = bounds - } - b - case t => t - } - } - def caseRest(pat: Tree)(implicit ctx: Context) = { - val pat1 = indexPattern.transform(pat) + val pat1 = indexPattern(tree).transform(pat) val guard1 = typedExpr(tree.guard, defn.BooleanType) var body1 = ensureNoLocalRefs(typedExpr(tree.body, pt), pt, ctx.scope.toList) if (pt.isValueType) // insert a cast if body does not conform to expected type if we disregard gadt bounds body1 = body1.ensureConforms(pt)(originalCtx) - assignType(cpy.CaseDef(tree)(pat1, guard1, body1), body1) + assignType(cpy.CaseDef(tree)(pat1, guard1, body1), pat1, body1) } val pat1 = typedPattern(tree.pat, selType)(gadtCtx) @@ -1078,6 +1083,20 @@ class Typer extends Namer assignType(cpy.Labeled(tree)(bind1, expr1)) } + /** Type a case of a type match */ + def typedTypeCase(cdef: untpd.CaseDef, selType: Type, pt: Type)(implicit ctx: Context): CaseDef = { + def caseRest(implicit ctx: Context) = { + val pat1 = checkSimpleKinded(typedType(cdef.pat)(ctx.addMode(Mode.Pattern))) + if (!ctx.isAfterTyper) + constrainPatternType(pat1.tpe, pt)(ctx.addMode(Mode.GADTflexible)) + val pat2 = indexPattern(cdef).transform(pat1) + val body1 = typedType(cdef.body, pt) + assignType(cpy.CaseDef(cdef)(pat2, EmptyTree, body1), pat2, body1) + } + caseRest(ctx.fresh.setFreshGADTBounds.setNewScope) + } + + def typedReturn(tree: untpd.Return)(implicit ctx: Context): Return = track("typedReturn") { def returnProto(owner: Symbol, locals: Scope): Type = if (owner.isConstructor) defn.UnitType diff --git a/docs/docs/internals/syntax.md b/docs/docs/internals/syntax.md index ae69980a56a9..ecf73ff2bd29 100644 --- a/docs/docs/internals/syntax.md +++ b/docs/docs/internals/syntax.md @@ -229,7 +229,7 @@ CaseClause ::= ‘case’ (Pattern [Guard] ‘=>’ Block | INT) ImplicitCaseClauses ::= ImplicitCaseClause { ImplicitCaseClause } ImplicitCaseClause ::= ‘case’ PatVar [‘:’ RefinedType] [Guard] ‘=>’ Block TypeCaseClauses ::= TypeCaseClause { TypeCaseClause } -TypeCaseClause ::= ‘case’ InfixType ‘=>’ Type +TypeCaseClause ::= ‘case’ InfixType ‘=>’ Type [nl] Pattern ::= Pattern1 { ‘|’ Pattern1 } Alternative(pats) Pattern1 ::= PatVar ‘:’ RefinedType Bind(name, Typed(Ident(wildcard), tpe)) diff --git a/tests/pos/matchtype.scala b/tests/pos/matchtype.scala new file mode 100644 index 000000000000..8329bb59261a --- /dev/null +++ b/tests/pos/matchtype.scala @@ -0,0 +1,19 @@ +object Test { + type T[X] = X match { + case String => Int + case Int => String + } + + trait Nat { + def toInt: Int = ??? + } + + case object Z extends Nat + case class S[N <: Nat] extends Nat + type Z = Z.type + + type Len[X] = X match { + case Unit => Z + case (x, xs) => S[Len[xs]] + } +} \ No newline at end of file From 6d472f5915f395bea7a08211997391bee04b84d5 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Mon, 20 Aug 2018 13:06:53 +0200 Subject: [PATCH 04/49] Fix unpickling of match types --- compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala index f14f0c76019a..e5e04ccc1acf 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala @@ -513,7 +513,8 @@ object TastyFormat { | ANDtpt | ORtpt | BYNAMEtpt - | BIND => true + | BIND + | MATCH => true case _ => false } From 0877a368a45613009858acc15dc2cac8ea1d6942 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Mon, 20 Aug 2018 13:07:27 +0200 Subject: [PATCH 05/49] Blacklist match type fromtasty test Need to implement printing of match types from tasty first --- compiler/test/dotc/pos-from-tasty.blacklist | 3 +++ 1 file changed, 3 insertions(+) diff --git a/compiler/test/dotc/pos-from-tasty.blacklist b/compiler/test/dotc/pos-from-tasty.blacklist index a78823e99acd..a48abcd017a0 100644 --- a/compiler/test/dotc/pos-from-tasty.blacklist +++ b/compiler/test/dotc/pos-from-tasty.blacklist @@ -15,3 +15,6 @@ repeatedArgs213.scala # Error printing parent constructors that are blocks default-super.scala + +# Need to implement printing of match types +matchtype.scala \ No newline at end of file From bd8b3cddefe8ffb6bcdc32da62ef1661b9b4e775 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 22 Aug 2018 16:56:22 +0200 Subject: [PATCH 06/49] Classify type defs with matches as RHS as abstract This is necessary since we would otherwise get direct type recursion. We have to special case these abstract types later for subtyping. The commit means that ad-hoc fixes in cyclic checking and variance checking can be reverted (done also as part of this commit). --- compiler/src/dotty/tools/dotc/ast/TreeInfo.scala | 1 + compiler/src/dotty/tools/dotc/core/TypeApplications.scala | 7 +++++-- compiler/src/dotty/tools/dotc/core/Types.scala | 8 ++++++++ compiler/src/dotty/tools/dotc/typer/Checking.scala | 4 ---- 4 files changed, 14 insertions(+), 6 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 5587be82333d..dc400d79ba63 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -306,6 +306,7 @@ trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] case mdef: TypeDef => def isBounds(rhs: Tree): Boolean = rhs match { case _: TypeBoundsTree => true + case _: Match => true // Typedefs with Match rhs classify as abstract case LambdaTypeTree(_, body) => isBounds(body) case _ => false } diff --git a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala index da1554fb9d67..fd49239893c7 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala @@ -434,10 +434,13 @@ class TypeApplications(val self: Type) extends AnyVal { appliedTo(args) } - /** Turns non-bounds types to type aliases */ + /** Turns non-bounds types to type bounds. + * A (possible lambda abstracted) match type is turned into an abstract type. + * Every other type is turned into a type alias + */ final def toBounds(implicit ctx: Context): TypeBounds = self match { case self: TypeBounds => self // this can happen for wildcard args - case _ => TypeAlias(self) + case _ => if (self.isMatch) TypeBounds.upper(self) else TypeAlias(self) } /** Translate a type of the form From[T] to To[T], keep other types as they are. diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index dfb1a8528ede..6d1b4bdb4afa 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -330,6 +330,14 @@ object Types { /** Is this a MethodType for which the parameters will not be used */ def isErasedMethod: Boolean = false + /** Is this a match type or a higher-kinded abstraction of one? + */ + def isMatch(implicit ctx: Context): Boolean = stripTypeVar.stripAnnots match { + case _: MatchType => true + case tp: HKTypeLambda => tp.resType.isMatch + case _ => false + } + // ----- Higher-order combinators ----------------------------------- /** Returns true if there is a part of this type that satisfies predicate `p`. diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 59b7772e37b7..31281b413f88 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -214,10 +214,6 @@ object Checking { tp.derivedRefinedType(this(parent), name, this(rinfo, nestedCycleOK, nestedCycleOK)) case tp: RecType => tp.rebind(this(tp.parent)) - case tp @ MatchType(scrutinee, cases) => - tp.derivedMatchType( - this(scrutinee), - cases.map(this(_, cycleOK = this.cycleOK, nestedCycleOK = true))) case tp @ TypeRef(pre, _) => try { // A prefix is interesting if it might contain (transitively) a reference From 993c40c7c2004ea815fb156af5d6cbe4b8ece8fd Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 22 Aug 2018 16:58:30 +0200 Subject: [PATCH 07/49] Implement subtyping for match types --- .../tools/dotc/core/ConstraintHandling.scala | 49 +++++++------ .../dotty/tools/dotc/core/TypeComparer.scala | 70 +++++++++++++++++-- .../src/dotty/tools/dotc/core/Types.scala | 36 ++++++++++ .../dotty/tools/dotc/typer/ProtoTypes.scala | 8 ++- tests/pos/matchtype.scala | 5 +- 5 files changed, 136 insertions(+), 32 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index 2a9843394c9d..6d7d4d3ccb78 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -35,6 +35,11 @@ trait ConstraintHandling { /** If the constraint is frozen we cannot add new bounds to the constraint. */ protected var frozenConstraint = false + /** Potentially a type lambda that is still instantiatable, even though the constraint + * is generally frozen. + */ + protected var unfrozen: Type = NoType + /** If set, align arguments `S1`, `S2`when taking the glb * `T1 { X = S1 } & T2 { X = S2 }` of a constraint upper bound for some type parameter. * Aligning means computing `S1 =:= S2` which may change the current constraint. @@ -167,19 +172,20 @@ trait ConstraintHandling { isSubType(tp1, tp2) } - final def isSubTypeWhenFrozen(tp1: Type, tp2: Type): Boolean = { - val saved = frozenConstraint + @forceInline final def inFrozenConstraint[T](op: => T): T = { + val savedFrozen = frozenConstraint + val savedUnfrozen = unfrozen frozenConstraint = true - try isSubType(tp1, tp2) - finally frozenConstraint = saved + unfrozen = NoType + try op + finally { + frozenConstraint = savedFrozen + unfrozen = savedUnfrozen + } } - final def isSameTypeWhenFrozen(tp1: Type, tp2: Type): Boolean = { - val saved = frozenConstraint - frozenConstraint = true - try isSameType(tp1, tp2) - finally frozenConstraint = saved - } + final def isSubTypeWhenFrozen(tp1: Type, tp2: Type): Boolean = inFrozenConstraint(isSubType(tp1, tp2)) + final def isSameTypeWhenFrozen(tp1: Type, tp2: Type): Boolean = inFrozenConstraint(isSameType(tp1, tp2)) /** Test whether the lower bounds of all parameters in this * constraint are a solution to the constraint. @@ -355,7 +361,7 @@ trait ConstraintHandling { /** Can `param` be constrained with new bounds? */ final def canConstrain(param: TypeParamRef): Boolean = - !frozenConstraint && (constraint contains param) + (!frozenConstraint || (unfrozen `eq` param.binder)) && constraint.contains(param) /** Add constraint `param <: bound` if `fromBelow` is false, `param >: bound` otherwise. * `bound` is assumed to be in normalized form, as specified in `firstTry` and @@ -492,19 +498,18 @@ trait ConstraintHandling { /** Check that constraint is fully propagated. See comment in Config.checkConstraintsPropagated */ def checkPropagated(msg: => String)(result: Boolean): Boolean = { if (Config.checkConstraintsPropagated && result && addConstraintInvocations == 0) { - val saved = frozenConstraint - frozenConstraint = true - for (p <- constraint.domainParams) { - def check(cond: => Boolean, q: TypeParamRef, ordering: String, explanation: String): Unit = - assert(cond, i"propagation failure for $p $ordering $q: $explanation\n$msg") - for (u <- constraint.upper(p)) - check(bounds(p).hi <:< bounds(u).hi, u, "<:", "upper bound not propagated") - for (l <- constraint.lower(p)) { - check(bounds(l).lo <:< bounds(p).hi, l, ">:", "lower bound not propagated") - check(constraint.isLess(l, p), l, ">:", "reverse ordering (<:) missing") + inFrozenConstraint { + for (p <- constraint.domainParams) { + def check(cond: => Boolean, q: TypeParamRef, ordering: String, explanation: String): Unit = + assert(cond, i"propagation failure for $p $ordering $q: $explanation\n$msg") + for (u <- constraint.upper(p)) + check(bounds(p).hi <:< bounds(u).hi, u, "<:", "upper bound not propagated") + for (l <- constraint.lower(p)) { + check(bounds(l).lo <:< bounds(p).hi, l, ">:", "lower bound not propagated") + check(constraint.isLess(l, p), l, ">:", "reverse ordering (<:) missing") + } } } - frozenConstraint = saved } result } diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 61086134fb95..b52272eab71d 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -12,6 +12,7 @@ import config.Printers.{typr, constr, subtyping, gadts, noPrinter} import TypeErasure.{erasedLub, erasedGlb} import TypeApplications._ import scala.util.control.NonFatal +import typer.ProtoTypes.constrained import reporting.trace /** Provides methods to compare types. @@ -336,7 +337,7 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { recur(tp1.underlying, tp2) case tp1: WildcardType => def compareWild = tp1.optBounds match { - case TypeBounds(lo, _) => recur(lo, tp2) + case bounds: TypeBounds => recur(bounds.effectiveLo, tp2) case _ => true } compareWild @@ -363,7 +364,8 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { } joinOK || recur(tp11, tp2) && recur(tp12, tp2) case tp1: MatchType => - recur(tp1.underlying, tp2) + val reduced = tp1.reduced + if (reduced.exists) recur(reduced, tp2) else thirdTry case _: FlexType => true case _ => @@ -371,7 +373,7 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { } def thirdTryNamed(tp2: NamedType): Boolean = tp2.info match { - case TypeBounds(lo2, _) => + case info2: TypeBounds => def compareGADT: Boolean = { val gbounds2 = ctx.gadt.bounds(tp2.symbol) (gbounds2 != null) && @@ -379,7 +381,7 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { narrowGADTBounds(tp2, tp1, approx, isUpper = false)) && GADTusage(tp2.symbol) } - isSubApproxHi(tp1, lo2) || compareGADT || fourthTry + isSubApproxHi(tp1, info2.effectiveLo) || compareGADT || fourthTry case _ => val cls2 = tp2.symbol @@ -423,7 +425,7 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { // So if the constraint is not yet frozen, we do the same comparison again // with a frozen constraint, which means that we get a chance to do the // widening in `fourthTry` before adding to the constraint. - if (frozenConstraint) isSubType(tp1, bounds(tp2).lo) + if (frozenConstraint) isSubType(tp1, bounds(tp2).effectiveLo) else isSubTypeWhenFrozen(tp1, tp2) alwaysTrue || { if (canConstrain(tp2) && !approx.low) @@ -534,6 +536,9 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { case _ => } either(recur(tp1, tp21), recur(tp1, tp22)) || fourthTry + case tp2: MatchType => + val reduced = tp2.reduced + if (reduced.exists) recur(tp1, reduced) else fourthTry case tp2: MethodType => def compareMethod = tp1 match { case tp1: MethodType => @@ -667,6 +672,14 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { case _ => } either(recur(tp11, tp2), recur(tp12, tp2)) + case tp1: MatchType => + def compareMatch = tp2 match { + case tp2: MatchType => + isSameType(tp1.scrutinee, tp2.scrutinee) && + tp1.cases.corresponds(tp2.cases)(isSubType) + case _ => false + } + recur(tp1.underlying, tp2) || compareMatch case tp1: AnnotatedType if tp1.isRefining => isNewSubType(tp1.parent) case JavaArrayType(elem1) => @@ -798,7 +811,7 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { if (tyconIsTypeRef) recur(tp1, tp2.superType) else isSubApproxHi(tp1, tycon2bounds.lo.applyIfParameterized(args2)) else - fallback(tycon2bounds.lo) + fallback(tycon2bounds.effectiveLo) tycon2 match { case param2: TypeParamRef => @@ -932,6 +945,51 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { } } && isSubArgs(args1.tail, args2.tail, tp1, tparams.tail) + def matchCase(scrut: Type, cas: Type, instantiate: Boolean)(implicit ctx: Context): Type = { + + def paramInstances = new TypeAccumulator[Array[Type]] { + def apply(inst: Array[Type], t: Type) = t match { + case t @ TypeParamRef(b, n) if b `eq` unfrozen => + inst(n) = instanceType(t, fromBelow = variance >= 0) + inst + case _ => + foldOver(inst, t) + } + } + + def instantiateParams(inst: Array[Type]) = new TypeMap { + def apply(t: Type) = t match { + case t @ TypeParamRef(b, n) if b `eq` unfrozen => inst(n) + case t: LazyRef => apply(t.ref) + case _ => mapOver(t) + } + } + + val saved = constraint + try { + inFrozenConstraint { + val cas1 = cas match { + case cas: HKTypeLambda => + unfrozen = constrained(cas) + unfrozen.resultType + case _ => + cas + } + val defn.FunctionOf(pat :: Nil, body, _, _) = cas1 + if (isSubType(scrut, pat)) + unfrozen match { + case unfrozen: HKTypeLambda if instantiate => + val instances = paramInstances(new Array(unfrozen.paramNames.length), pat) + instantiateParams(instances)(body) + case _ => + body + } + else NoType + } + } + finally constraint = saved + } + /** Test whether `tp1` has a base type of the form `B[T1, ..., Tn]` where * - `B` derives from one of the class symbols of `tp2`, * - the type parameters of `B` match one-by-one the variances of `tparams`, diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 6d1b4bdb4afa..7d3e1070421a 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -3551,6 +3551,39 @@ object Types { if (myUnderlying == null) myUnderlying = alternatives.reduceLeft(OrType(_, _)) myUnderlying } + + private def wildApproxMap(implicit ctx: Context) = new TypeMap { + def apply(t: Type) = t match { + case t: TypeRef => + t.info match { + case TypeBounds(lo, hi) if lo `ne` hi => WildcardType + case _ => mapOver(t) + } + case t: ParamRef => WildcardType + case _ => mapOver(t) + } + } + + private var myApproxScrut: Type = null + + def approximatedScrutinee(implicit ctx: Context): Type = { + if (myApproxScrut == null) myApproxScrut = wildApproxMap.apply(scrutinee) + myApproxScrut + } + + def reduced(implicit ctx: Context): Type = { + def recur(cases: List[Type]): Type = cases match { + case Nil => NoType + case cas :: cases1 => + def tryReduce(scrut: Type, instantiate: Boolean) = + ctx.typeComparer.matchCase(scrut, cas, instantiate) + val r = tryReduce(scrutinee, true) + if (r.exists) r + else if (tryReduce(approximatedScrutinee, false).exists) NoType + else recur(cases1) + } + recur(cases) + } } class CachedMatchType(scrutinee: Type, cases: List[Type]) extends MatchType(scrutinee, cases) @@ -3721,6 +3754,9 @@ object Types { case _ => super.| (that) } + def effectiveLo(implicit ctx: Context) = + if (hi.isMatch) hi else lo + override def computeHash(bs: Binders) = doHash(bs, lo, hi) override def stableHash = lo.stableHash && hi.stableHash diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index 39643d446423..250aa2a8e80b 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -454,9 +454,11 @@ object ProtoTypes { def constrained(tl: TypeLambda, owningTree: untpd.Tree, alwaysAddTypeVars: Boolean = false)(implicit ctx: Context): (TypeLambda, List[TypeTree]) = { val state = ctx.typerState val addTypeVars = alwaysAddTypeVars || !owningTree.isEmpty - assert(!(ctx.typerState.isCommittable && !addTypeVars), - s"inconsistent: no typevars were added to committable constraint ${state.constraint}") - + if (tl.isInstanceOf[PolyType]) + assert(!(ctx.typerState.isCommittable && !addTypeVars), + s"inconsistent: no typevars were added to committable constraint ${state.constraint}") + // hk type lambdas can be added to constraints without typevars during match reduction + def newTypeVars(tl: TypeLambda): List[TypeTree] = for (paramRef <- tl.paramRefs) yield { diff --git a/tests/pos/matchtype.scala b/tests/pos/matchtype.scala index 8329bb59261a..cf84ea8acbaa 100644 --- a/tests/pos/matchtype.scala +++ b/tests/pos/matchtype.scala @@ -14,6 +14,9 @@ object Test { type Len[X] = X match { case Unit => Z - case (x, xs) => S[Len[xs]] + case x *: xs => S[Len[xs]] } + + type T2 = Len[(1, 2, 3)] + erased val x: S[S[S[Z]]] = typelevel.erasedValue[T2] } \ No newline at end of file From 151a37cac9017d191c14d6f5f5d216418ba0b779 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 22 Aug 2018 18:37:22 +0200 Subject: [PATCH 08/49] Cache match reduce results --- .../dotty/tools/dotc/core/TypeComparer.scala | 94 ++++++++++--------- .../src/dotty/tools/dotc/core/Types.scala | 12 +-- 2 files changed, 55 insertions(+), 51 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index b52272eab71d..f515ca59fc55 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -945,51 +945,6 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { } } && isSubArgs(args1.tail, args2.tail, tp1, tparams.tail) - def matchCase(scrut: Type, cas: Type, instantiate: Boolean)(implicit ctx: Context): Type = { - - def paramInstances = new TypeAccumulator[Array[Type]] { - def apply(inst: Array[Type], t: Type) = t match { - case t @ TypeParamRef(b, n) if b `eq` unfrozen => - inst(n) = instanceType(t, fromBelow = variance >= 0) - inst - case _ => - foldOver(inst, t) - } - } - - def instantiateParams(inst: Array[Type]) = new TypeMap { - def apply(t: Type) = t match { - case t @ TypeParamRef(b, n) if b `eq` unfrozen => inst(n) - case t: LazyRef => apply(t.ref) - case _ => mapOver(t) - } - } - - val saved = constraint - try { - inFrozenConstraint { - val cas1 = cas match { - case cas: HKTypeLambda => - unfrozen = constrained(cas) - unfrozen.resultType - case _ => - cas - } - val defn.FunctionOf(pat :: Nil, body, _, _) = cas1 - if (isSubType(scrut, pat)) - unfrozen match { - case unfrozen: HKTypeLambda if instantiate => - val instances = paramInstances(new Array(unfrozen.paramNames.length), pat) - instantiateParams(instances)(body) - case _ => - body - } - else NoType - } - } - finally constraint = saved - } - /** Test whether `tp1` has a base type of the form `B[T1, ..., Tn]` where * - `B` derives from one of the class symbols of `tp2`, * - the type parameters of `B` match one-by-one the variances of `tparams`, @@ -1779,6 +1734,55 @@ object TypeComparer { } } +class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { + import state.constraint + + def matchCase(scrut: Type, cas: Type, instantiate: Boolean)(implicit ctx: Context): Type = { + + def paramInstances = new TypeAccumulator[Array[Type]] { + def apply(inst: Array[Type], t: Type) = t match { + case t @ TypeParamRef(b, n) if b `eq` unfrozen => + inst(n) = instanceType(t, fromBelow = variance >= 0) + inst + case _ => + foldOver(inst, t) + } + } + + def instantiateParams(inst: Array[Type]) = new TypeMap { + def apply(t: Type) = t match { + case t @ TypeParamRef(b, n) if b `eq` unfrozen => inst(n) + case t: LazyRef => apply(t.ref) + case _ => mapOver(t) + } + } + + val saved = constraint + try { + inFrozenConstraint { + val cas1 = cas match { + case cas: HKTypeLambda => + unfrozen = constrained(cas) + unfrozen.resultType + case _ => + cas + } + val defn.FunctionOf(pat :: Nil, body, _, _) = cas1 + if (isSubType(scrut, pat)) + unfrozen match { + case unfrozen: HKTypeLambda if instantiate => + val instances = paramInstances(new Array(unfrozen.paramNames.length), pat) + instantiateParams(instances)(body) + case _ => + body + } + else NoType + } + } + finally constraint = saved + } +} + /** A type comparer that can record traces of subtype operations */ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { import TypeComparer._ diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 7d3e1070421a..6b721738cf6f 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -3572,17 +3572,17 @@ object Types { } def reduced(implicit ctx: Context): Type = { - def recur(cases: List[Type]): Type = cases match { + val trackingCtx = ctx.fresh.setTypeComparerFn(new TrackingTypeComparer(_)) + val cmp = trackingCtx.typeComparer.asInstanceOf[TrackingTypeComparer] + def recur(cases: List[Type])(implicit ctx: Context): Type = cases match { case Nil => NoType case cas :: cases1 => - def tryReduce(scrut: Type, instantiate: Boolean) = - ctx.typeComparer.matchCase(scrut, cas, instantiate) - val r = tryReduce(scrutinee, true) + val r = cmp.matchCase(scrutinee, cas, instantiate = true) if (r.exists) r - else if (tryReduce(approximatedScrutinee, false).exists) NoType + else if (cmp.matchCase(approximatedScrutinee, cas, instantiate = false).exists) NoType else recur(cases1) } - recur(cases) + recur(cases)(trackingCtx) } } From ffac30eb2160c5d25a7914a7921a1c603972405e Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 23 Aug 2018 17:18:01 +0200 Subject: [PATCH 09/49] Cache results of attempts to reduce match types --- .../src/dotty/tools/dotc/config/Config.scala | 1 + .../tools/dotc/core/ConstraintHandling.scala | 14 +++--- .../dotty/tools/dotc/core/TypeComparer.scala | 47 ++++++++++++++----- .../src/dotty/tools/dotc/core/Types.scala | 33 +++++++++++-- 4 files changed, 74 insertions(+), 21 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/config/Config.scala b/compiler/src/dotty/tools/dotc/config/Config.scala index a8d09ba7b6d8..4366ac45170f 100644 --- a/compiler/src/dotty/tools/dotc/config/Config.scala +++ b/compiler/src/dotty/tools/dotc/config/Config.scala @@ -6,6 +6,7 @@ object Config { final val cacheAsSeenFrom = true final val cacheMemberNames = true final val cacheImplicitScopes = true + final val cacheMatchReduced = true final val checkCacheMembersNamed = false diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index 6d7d4d3ccb78..682eeb552507 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -38,7 +38,7 @@ trait ConstraintHandling { /** Potentially a type lambda that is still instantiatable, even though the constraint * is generally frozen. */ - protected var unfrozen: Type = NoType + protected var caseLambda: Type = NoType /** If set, align arguments `S1`, `S2`when taking the glb * `T1 { X = S1 } & T2 { X = S2 }` of a constraint upper bound for some type parameter. @@ -52,7 +52,7 @@ trait ConstraintHandling { */ protected var comparedTypeLambdas: Set[TypeLambda] = Set.empty - private def addOneBound(param: TypeParamRef, bound: Type, isUpper: Boolean): Boolean = + protected def addOneBound(param: TypeParamRef, bound: Type, isUpper: Boolean): Boolean = !constraint.contains(param) || { def occursIn(bound: Type): Boolean = { val b = bound.dealias @@ -174,13 +174,13 @@ trait ConstraintHandling { @forceInline final def inFrozenConstraint[T](op: => T): T = { val savedFrozen = frozenConstraint - val savedUnfrozen = unfrozen + val savedLambda = caseLambda frozenConstraint = true - unfrozen = NoType + caseLambda = NoType try op finally { frozenConstraint = savedFrozen - unfrozen = savedUnfrozen + caseLambda = savedLambda } } @@ -325,7 +325,7 @@ trait ConstraintHandling { } /** The current bounds of type parameter `param` */ - final def bounds(param: TypeParamRef): TypeBounds = { + def bounds(param: TypeParamRef): TypeBounds = { val e = constraint.entry(param) if (e.exists) e.bounds else { @@ -361,7 +361,7 @@ trait ConstraintHandling { /** Can `param` be constrained with new bounds? */ final def canConstrain(param: TypeParamRef): Boolean = - (!frozenConstraint || (unfrozen `eq` param.binder)) && constraint.contains(param) + (!frozenConstraint || (caseLambda `eq` param.binder)) && constraint.contains(param) /** Add constraint `param <: bound` if `fromBelow` is false, `param >: bound` otherwise. * `bound` is assumed to be in normalized form, as specified in `firstTry` and diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index f515ca59fc55..d680b31db194 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -103,6 +103,9 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { true } + protected def gadtBounds(sym: Symbol)(implicit ctx: Context) = ctx.gadt.bounds(sym) + protected def gadtSetBounds(sym: Symbol, b: TypeBounds) = ctx.gadt.setBounds(sym, b) + // Subtype testing `<:<` def topLevelSubType(tp1: Type, tp2: Type): Boolean = { @@ -375,7 +378,7 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { def thirdTryNamed(tp2: NamedType): Boolean = tp2.info match { case info2: TypeBounds => def compareGADT: Boolean = { - val gbounds2 = ctx.gadt.bounds(tp2.symbol) + val gbounds2 = gadtBounds(tp2.symbol) (gbounds2 != null) && (isSubTypeWhenFrozen(tp1, gbounds2.lo) || narrowGADTBounds(tp2, tp1, approx, isUpper = false)) && @@ -601,7 +604,7 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { tp1.info match { case TypeBounds(_, hi1) => def compareGADT = { - val gbounds1 = ctx.gadt.bounds(tp1.symbol) + val gbounds1 = gadtBounds(tp1.symbol) (gbounds1 != null) && (isSubTypeWhenFrozen(gbounds1.hi, tp2) || narrowGADTBounds(tp1, tp2, approx, isUpper = true)) && @@ -1146,12 +1149,12 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { gadts.println(i"narrow gadt bound of $tparam: ${tparam.info} from ${if (isUpper) "above" else "below"} to $bound ${bound.toString} ${bound.isRef(tparam)}") if (bound.isRef(tparam)) false else { - val oldBounds = ctx.gadt.bounds(tparam) + val oldBounds = gadtBounds(tparam) val newBounds = if (isUpper) TypeBounds(oldBounds.lo, oldBounds.hi & bound) else TypeBounds(oldBounds.lo | bound, oldBounds.hi) isSubType(newBounds.lo, newBounds.hi) && - { ctx.gadt.setBounds(tparam, newBounds); true } + { gadtSetBounds(tparam, newBounds); true } } } } @@ -1737,11 +1740,33 @@ object TypeComparer { class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { import state.constraint + val footprint = mutable.Set[Type]() + + override def bounds(param: TypeParamRef): TypeBounds = { + if (param.binder `ne` caseLambda) footprint += param + super.bounds(param) + } + + override def addOneBound(param: TypeParamRef, bound: Type, isUpper: Boolean): Boolean = { + if (param.binder `ne` caseLambda) footprint += param + super.addOneBound(param, bound, isUpper) + } + + override def gadtBounds(sym: Symbol)(implicit ctx: Context) = { + footprint += sym.typeRef + super.gadtBounds(sym) + } + + override def gadtSetBounds(sym: Symbol, b: TypeBounds) = { + footprint += sym.typeRef + super.gadtSetBounds(sym, b) + } + def matchCase(scrut: Type, cas: Type, instantiate: Boolean)(implicit ctx: Context): Type = { def paramInstances = new TypeAccumulator[Array[Type]] { def apply(inst: Array[Type], t: Type) = t match { - case t @ TypeParamRef(b, n) if b `eq` unfrozen => + case t @ TypeParamRef(b, n) if b `eq` caseLambda => inst(n) = instanceType(t, fromBelow = variance >= 0) inst case _ => @@ -1751,7 +1776,7 @@ class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { def instantiateParams(inst: Array[Type]) = new TypeMap { def apply(t: Type) = t match { - case t @ TypeParamRef(b, n) if b `eq` unfrozen => inst(n) + case t @ TypeParamRef(b, n) if b `eq` caseLambda => inst(n) case t: LazyRef => apply(t.ref) case _ => mapOver(t) } @@ -1762,16 +1787,16 @@ class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { inFrozenConstraint { val cas1 = cas match { case cas: HKTypeLambda => - unfrozen = constrained(cas) - unfrozen.resultType + caseLambda = constrained(cas) + caseLambda.resultType case _ => cas } val defn.FunctionOf(pat :: Nil, body, _, _) = cas1 if (isSubType(scrut, pat)) - unfrozen match { - case unfrozen: HKTypeLambda if instantiate => - val instances = paramInstances(new Array(unfrozen.paramNames.length), pat) + caseLambda match { + case caseLambda: HKTypeLambda if instantiate => + val instances = paramInstances(new Array(caseLambda.paramNames.length), pat) instantiateParams(instances)(body) case _ => body diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 6b721738cf6f..360ed8a73eae 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -3545,7 +3545,7 @@ object Types { def alternatives(implicit ctx: Context): List[Type] = cases.map(caseType) - private var myUnderlying: Type = null + private[this] var myUnderlying: Type = null def underlying(implicit ctx: Context): Type = { if (myUnderlying == null) myUnderlying = alternatives.reduceLeft(OrType(_, _)) @@ -3564,16 +3564,20 @@ object Types { } } - private var myApproxScrut: Type = null + private[this] var myApproxScrut: Type = null def approximatedScrutinee(implicit ctx: Context): Type = { if (myApproxScrut == null) myApproxScrut = wildApproxMap.apply(scrutinee) myApproxScrut } + private[this] var myReduced: Type = null + private[this] var reductionContext: mutable.Map[Type, TypeBounds] = null + def reduced(implicit ctx: Context): Type = { val trackingCtx = ctx.fresh.setTypeComparerFn(new TrackingTypeComparer(_)) val cmp = trackingCtx.typeComparer.asInstanceOf[TrackingTypeComparer] + def recur(cases: List[Type])(implicit ctx: Context): Type = cases match { case Nil => NoType case cas :: cases1 => @@ -3582,7 +3586,30 @@ object Types { else if (cmp.matchCase(approximatedScrutinee, cas, instantiate = false).exists) NoType else recur(cases1) } - recur(cases)(trackingCtx) + + def contextBounds(tp: Type): TypeBounds = tp match { + case tp: TypeParamRef => ctx.typerState.constraint.fullBounds(tp) + case tp: TypeRef => ctx.gadt.bounds(tp.symbol) + } + + def updateReductionContext() = { + reductionContext = new mutable.HashMap + for (tp <- cmp.footprint) reductionContext(tp) = contextBounds(tp) + } + + def upToDate = + cmp.footprint.forall { tp => + reductionContext.get(tp) match { + case Some(bounds) => bounds `eq` contextBounds(tp) + case None => false + } + } + + if (!Config.cacheMatchReduced || myReduced == null || !upToDate) { + myReduced = recur(cases)(trackingCtx) + updateReductionContext() + } + myReduced } } From e2d8bc35591817e27aba472af7bf1ff5ee24b02b Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Fri, 24 Aug 2018 10:22:38 +0200 Subject: [PATCH 10/49] Use a special type for match aliases This is a more principled implementation of the situation that match aliases are not full type aliases because one side cannot be freely substituted for the other. Match aliases are checked in some respects like abstract type bounds (in particular, nested cycles are allowed), but are treated as aliases in others (in particular, lower and upper "bound" are always guaranteed to be the same). --- .../tools/dotc/core/TypeApplications.scala | 10 +- .../dotty/tools/dotc/core/TypeComparer.scala | 8 +- .../src/dotty/tools/dotc/core/TypeOps.scala | 4 +- .../src/dotty/tools/dotc/core/Types.scala | 91 ++++++++++++------- .../tools/dotc/core/tasty/TreeUnpickler.scala | 5 +- .../tools/dotc/printing/PlainPrinter.scala | 4 +- .../tools/dotc/printing/ReplPrinter.scala | 2 +- .../dotty/tools/dotc/typer/Applications.scala | 2 +- .../src/dotty/tools/dotc/typer/Checking.scala | 37 ++++---- .../dotty/tools/dotc/typer/Implicits.scala | 2 +- .../dotty/tools/dotc/typer/ProtoTypes.scala | 8 +- .../dotty/tools/dotc/typer/TypeAssigner.scala | 4 +- .../src/dotty/tools/repl/ReplDriver.scala | 2 +- 13 files changed, 104 insertions(+), 75 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala index fd49239893c7..329270348206 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala @@ -337,8 +337,8 @@ class TypeApplications(val self: Type) extends AnyVal { tl => arg.paramInfos.map(_.subst(arg, tl).bounds), tl => arg.resultType.subst(arg, tl) ) - case arg @ TypeAlias(alias) => - arg.derivedTypeAlias(adaptArg(alias)) + case arg: AliasingBounds => + arg.derivedAlias(adaptArg(arg.alias)) case arg @ TypeBounds(lo, hi) => arg.derivedTypeBounds(adaptArg(lo), adaptArg(hi)) case _ => @@ -401,8 +401,8 @@ class TypeApplications(val self: Type) extends AnyVal { dealiased.derivedAndType(dealiased.tp1.appliedTo(args), dealiased.tp2.appliedTo(args)) case dealiased: OrType => dealiased.derivedOrType(dealiased.tp1.appliedTo(args), dealiased.tp2.appliedTo(args)) - case dealiased: TypeAlias => - dealiased.derivedTypeAlias(dealiased.alias.appliedTo(args)) + case dealiased: AliasingBounds => + dealiased.derivedAlias(dealiased.alias.appliedTo(args)) case dealiased: TypeBounds => dealiased.derivedTypeBounds(dealiased.lo.appliedTo(args), dealiased.hi.appliedTo(args)) case dealiased: LazyRef => @@ -440,7 +440,7 @@ class TypeApplications(val self: Type) extends AnyVal { */ final def toBounds(implicit ctx: Context): TypeBounds = self match { case self: TypeBounds => self // this can happen for wildcard args - case _ => if (self.isMatch) TypeBounds.upper(self) else TypeAlias(self) + case _ => if (self.isMatch) MatchAlias(self) else TypeAlias(self) } /** Translate a type of the form From[T] to To[T], keep other types as they are. diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index d680b31db194..7343b8de48ad 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -340,7 +340,7 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { recur(tp1.underlying, tp2) case tp1: WildcardType => def compareWild = tp1.optBounds match { - case bounds: TypeBounds => recur(bounds.effectiveLo, tp2) + case bounds: TypeBounds => recur(bounds.lo, tp2) case _ => true } compareWild @@ -384,7 +384,7 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { narrowGADTBounds(tp2, tp1, approx, isUpper = false)) && GADTusage(tp2.symbol) } - isSubApproxHi(tp1, info2.effectiveLo) || compareGADT || fourthTry + isSubApproxHi(tp1, info2.lo) || compareGADT || fourthTry case _ => val cls2 = tp2.symbol @@ -428,7 +428,7 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { // So if the constraint is not yet frozen, we do the same comparison again // with a frozen constraint, which means that we get a chance to do the // widening in `fourthTry` before adding to the constraint. - if (frozenConstraint) isSubType(tp1, bounds(tp2).effectiveLo) + if (frozenConstraint) isSubType(tp1, bounds(tp2).lo) else isSubTypeWhenFrozen(tp1, tp2) alwaysTrue || { if (canConstrain(tp2) && !approx.low) @@ -814,7 +814,7 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { if (tyconIsTypeRef) recur(tp1, tp2.superType) else isSubApproxHi(tp1, tycon2bounds.lo.applyIfParameterized(args2)) else - fallback(tycon2bounds.effectiveLo) + fallback(tycon2bounds.lo) tycon2 match { case param2: TypeParamRef => diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index 91703bb22dbf..d6cb9f528b2e 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -101,8 +101,8 @@ trait TypeOps { this: Context => // TODO: Make standalone object. } case _: ThisType | _: BoundType => tp - case tp: TypeAlias => - tp.derivedTypeAlias(simplify(tp.alias, theMap)) + case tp: AliasingBounds => + tp.derivedAlias(simplify(tp.alias, theMap)) case AndType(l, r) if !ctx.mode.is(Mode.Type) => simplify(l, theMap) & simplify(r, theMap) case OrType(l, r) if !ctx.mode.is(Mode.Type) => diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 360ed8a73eae..d05ea3137a92 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -118,7 +118,7 @@ object Types { t.symbol.is(Provisional) || apply(x, t.prefix) || { t.info match { - case TypeAlias(alias) => apply(x, alias) + case info: AliasingBounds => apply(x, info.alias) case TypeBounds(lo, hi) => apply(apply(x, lo), hi) case _ => false } @@ -319,7 +319,7 @@ object Types { } /** Is this an alias TypeBounds? */ - final def isAlias: Boolean = this.isInstanceOf[TypeAlias] + final def isTypeAlias: Boolean = this.isInstanceOf[TypeAlias] /** Is this a MethodType which is from Java */ def isJavaMethod: Boolean = false @@ -628,8 +628,8 @@ object Types { val rinfo = tp.refinedInfo if (name.isTypeName && !pinfo.isInstanceOf[ClassInfo]) { // simplified case that runs more efficiently val jointInfo = - if (rinfo.isAlias) rinfo - else if (pinfo.isAlias) pinfo + if (rinfo.isTypeAlias) rinfo + else if (pinfo.isTypeAlias) pinfo else if (ctx.base.pendingMemberSearches.contains(name)) pinfo safe_& rinfo else pinfo recoverable_& rinfo pdenot.asSingleDenotation.derivedSingleDenotation(pdenot.symbol, jointInfo) @@ -1238,7 +1238,7 @@ object Types { */ @tailrec final def normalizedPrefix(implicit ctx: Context): Type = this match { case tp: NamedType => - if (tp.symbol.info.isAlias) tp.info.normalizedPrefix else tp.prefix + if (tp.symbol.info.isTypeAlias) tp.info.normalizedPrefix else tp.prefix case tp: ClassInfo => tp.prefix case tp: TypeProxy => @@ -3085,8 +3085,8 @@ object Types { def derivedLambdaAbstraction(paramNames: List[TypeName], paramInfos: List[TypeBounds], resType: Type)(implicit ctx: Context): Type = resType match { - case resType @ TypeAlias(alias) => - resType.derivedTypeAlias(newLikeThis(paramNames, paramInfos, alias)) + case resType: AliasingBounds => + resType.derivedAlias(newLikeThis(paramNames, paramInfos, resType.alias)) case resType @ TypeBounds(lo, hi) => resType.derivedTypeBounds( if (lo.isRef(defn.NothingClass)) lo else newLikeThis(paramNames, paramInfos, lo), @@ -3187,8 +3187,8 @@ object Types { override def fromParams[PI <: ParamInfo.Of[TypeName]](params: List[PI], resultType: Type)(implicit ctx: Context): Type = { def expand(tp: Type) = super.fromParams(params, tp) resultType match { - case rt: TypeAlias => - rt.derivedTypeAlias(expand(rt.alias)) + case rt: AliasingBounds => + rt.derivedAlias(expand(rt.alias)) case rt @ TypeBounds(lo, hi) => rt.derivedTypeBounds( if (lo.isRef(defn.NothingClass)) lo else expand(lo), expand(hi)) @@ -3526,6 +3526,14 @@ object Types { // ------ MatchType --------------------------------------------------------------- + /** scrutinee match { case_1 ... case_n } + * + * where + * + * case_i = [X1, ..., Xn] patternType => resultType + * + * and `X_1,...X_n` are the type variables bound in `patternType` + */ abstract case class MatchType(scrutinee: Type, cases: List[Type]) extends CachedProxyType with TermType { override def computeHash(bs: Binders) = doHash(bs, scrutinee, cases) @@ -3781,22 +3789,19 @@ object Types { case _ => super.| (that) } - def effectiveLo(implicit ctx: Context) = - if (hi.isMatch) hi else lo - override def computeHash(bs: Binders) = doHash(bs, lo, hi) override def stableHash = lo.stableHash && hi.stableHash override def equals(that: Any): Boolean = equals(that, null) override def iso(that: Any, bs: BinderPairs): Boolean = that match { - case that: TypeAlias => false + case that: AliasingBounds => false case that: TypeBounds => lo.equals(that.lo, bs) && hi.equals(that.hi, bs) case _ => false } override def eql(that: Type) = that match { - case that: TypeAlias => false + case that: AliasingBounds => false case that: TypeBounds => lo.eq(that.lo) && hi.eq(that.hi) case _ => false } @@ -3804,28 +3809,44 @@ object Types { class RealTypeBounds(lo: Type, hi: Type) extends TypeBounds(lo, hi) - abstract class TypeAlias(val alias: Type) extends TypeBounds(alias, alias) { + /** Common supertype of `TypeAlias` and `MatchAlias` */ + abstract class AliasingBounds(val alias: Type) extends TypeBounds(alias, alias) { - /** pre: this is a type alias */ - def derivedTypeAlias(alias: Type)(implicit ctx: Context) = - if (alias eq this.alias) this else TypeAlias(alias) + def derivedAlias(alias: Type)(implicit ctx: Context): AliasingBounds override def computeHash(bs: Binders) = doHash(bs, alias) override def stableHash = alias.stableHash override def iso(that: Any, bs: BinderPairs): Boolean = that match { - case that: TypeAlias => alias.equals(that.alias, bs) + case that: AliasingBounds => this.isTypeAlias == that.isTypeAlias && alias.equals(that.alias, bs) case _ => false } // equals comes from case class; no matching override is needed override def eql(that: Type): Boolean = that match { - case that: TypeAlias => alias.eq(that.alias) + case that: AliasingBounds => this.isTypeAlias == that.isTypeAlias && alias.eq(that.alias) case _ => false } } - class CachedTypeAlias(alias: Type) extends TypeAlias(alias) + /** = T + */ + class TypeAlias(alias: Type) extends AliasingBounds(alias) { + def derivedAlias(alias: Type)(implicit ctx: Context) = + if (alias eq this.alias) this else TypeAlias(alias) + } + + /** = T where `T` is a `MatchType` + * + * Match aliases are treated differently from type aliases. Their sides are mutually + * subtypes of each other but one side is not generally substitutable for the other. + * If we assumed full substitutivity, we would have to reject all recursive match + * aliases (or else take the jump and allow full recursive types). + */ + class MatchAlias(alias: Type) extends AliasingBounds(alias) { + def derivedAlias(alias: Type)(implicit ctx: Context) = + if (alias eq this.alias) this else MatchAlias(alias) + } object TypeBounds { def apply(lo: Type, hi: Type)(implicit ctx: Context): TypeBounds = @@ -3836,11 +3857,15 @@ object Types { } object TypeAlias { - def apply(alias: Type)(implicit ctx: Context) = - unique(new CachedTypeAlias(alias)) + def apply(alias: Type)(implicit ctx: Context) = unique(new TypeAlias(alias)) def unapply(tp: TypeAlias): Option[Type] = Some(tp.alias) } + object MatchAlias { + def apply(alias: Type)(implicit ctx: Context) = unique(new MatchAlias(alias)) + def unapply(tp: MatchAlias): Option[Type] = Some(tp.alias) + } + // ----- Annotated and Import types ----------------------------------------------- /** An annotated type tpe @ annot */ @@ -4042,8 +4067,8 @@ object Types { def apply(tp: Type): Type = tp match { case tp: TypeRef if tp.symbol.is(ClassTypeParam) && tp.symbol.owner == cls => tp.info match { - case TypeAlias(alias) => - mapOver(alias) + case info: AliasingBounds => + mapOver(info.alias) case TypeBounds(lo, hi) => range(atVariance(-variance)(apply(lo)), apply(hi)) case _ => @@ -4099,8 +4124,8 @@ object Types { tp.derivedRefinedType(parent, tp.refinedName, info) protected def derivedRecType(tp: RecType, parent: Type): Type = tp.rebind(parent) - protected def derivedTypeAlias(tp: TypeAlias, alias: Type): Type = - tp.derivedTypeAlias(alias) + protected def derivedAlias(tp: AliasingBounds, alias: Type): Type = + tp.derivedAlias(alias) protected def derivedTypeBounds(tp: TypeBounds, lo: Type, hi: Type): Type = tp.derivedTypeBounds(lo, hi) protected def derivedSuperType(tp: SuperType, thistp: Type, supertp: Type): Type = @@ -4167,8 +4192,8 @@ object Types { case tp: RefinedType => derivedRefinedType(tp, this(tp.parent), this(tp.refinedInfo)) - case tp: TypeAlias => - derivedTypeAlias(tp, atVariance(0)(this(tp.alias))) + case tp: AliasingBounds => + derivedAlias(tp, atVariance(0)(this(tp.alias))) case tp: TypeBounds => variance = -variance @@ -4387,7 +4412,7 @@ object Types { else info match { case Range(infoLo: TypeBounds, infoHi: TypeBounds) => assert(variance == 0) - if (!infoLo.isAlias && !infoHi.isAlias) propagate(infoLo, infoHi) + if (!infoLo.isTypeAlias && !infoHi.isTypeAlias) propagate(infoLo, infoHi) else range(defn.NothingType, tp.parent) case Range(infoLo, infoHi) => propagate(infoLo, infoHi) @@ -4403,13 +4428,13 @@ object Types { case _ => tp.rebind(parent) } - override protected def derivedTypeAlias(tp: TypeAlias, alias: Type) = + override protected def derivedAlias(tp: AliasingBounds, alias: Type) = if (alias eq tp.alias) tp else alias match { case Range(lo, hi) => if (variance > 0) TypeBounds(lo, hi) - else range(TypeAlias(lo), TypeAlias(hi)) - case _ => tp.derivedTypeAlias(alias) + else range(tp.derivedAlias(lo), tp.derivedAlias(hi)) + case _ => tp.derivedAlias(alias) } override protected def derivedTypeBounds(tp: TypeBounds, lo: Type, hi: Type) = diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 767edd9a6a84..09d945b73df9 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -325,7 +325,10 @@ class TreeUnpickler(reader: TastyReader, case APPLIEDtype => readType().appliedTo(until(end)(readType())) case TYPEBOUNDS => - TypeBounds(readType(), readType()) + val lo = readType() + val hi = readType() + if (lo.isMatch && (lo `eq` hi)) MatchAlias(lo) + else TypeBounds(lo, hi) case ANNOTATEDtype => AnnotatedType(readType(), Annotation(readTerm())) case ANDtype => diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index c8e72be144c5..db410e859c2c 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -204,7 +204,7 @@ class PlainPrinter(_ctx: Context) extends Printer { val bounds = if (constr.contains(tp)) constr.fullBounds(tp.origin)(ctx.addMode(Mode.Printing)) else TypeBounds.empty - if (bounds.isAlias) toText(bounds.lo) ~ (Str("^") provided ctx.settings.YprintDebug.value) + if (bounds.isTypeAlias) toText(bounds.lo) ~ (Str("^") provided ctx.settings.YprintDebug.value) else if (ctx.settings.YshowVarBounds.value) "(" ~ toText(tp.origin) ~ "?" ~ toText(bounds) ~ ")" else toText(tp.origin) } @@ -316,7 +316,7 @@ class PlainPrinter(_ctx: Context) extends Printer { /** String representation of a definition's type following its name */ protected def toTextRHS(tp: Type): Text = controlled { homogenize(tp) match { - case tp: TypeAlias => + case tp: AliasingBounds => " = " ~ toText(tp.alias) case tp @ TypeBounds(lo, hi) => (if (lo isRef defn.NothingClass) Text() else " >: " ~ toText(lo)) ~ diff --git a/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala b/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala index 84d0cae359cd..700bc8bcccb4 100644 --- a/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala @@ -32,7 +32,7 @@ class ReplPrinter(_ctx: Context) extends DecompilerPrinter(_ctx) { override def dclText(sym: Symbol): Text = { toText(sym) ~ { if (sym.is(Method)) toText(sym.info) - else if (sym.isType && sym.info.isInstanceOf[TypeAlias]) toText(sym.info) + else if (sym.isType && sym.info.isTypeAlias) toText(sym.info) else if (sym.isType || sym.isClass) "" else ":" ~~ toText(sym.info) } diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index bce6fb03d7eb..7a1a3564bb56 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -877,7 +877,7 @@ trait Applications extends Compatibility { self: Typer with Dynamic => val ttree = typedType(untpd.rename(tree, tree.name.toTypeName))(nestedCtx) ttree.tpe match { - case alias: TypeRef if alias.info.isAlias && !nestedCtx.reporter.hasErrors => + case alias: TypeRef if alias.info.isTypeAlias && !nestedCtx.reporter.hasErrors => companionRef(alias) match { case companion: TermRef => return untpd.ref(companion) withPos tree.pos case _ => diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 31281b413f88..fd9266fe20ae 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -165,29 +165,30 @@ object Checking { /** The last type top-level type checked when a CyclicReference occurs. */ var lastChecked: Type = NoType + private def checkPart(tp: Type, w: String) = + try apply(tp) + finally { + where = w + lastChecked = tp + } + + private def checkUpper(tp: Type, w: String) = { + val saved = nestedCycleOK + nestedCycleOK = true + try checkPart(tp, w) + finally nestedCycleOK = saved + } + /** Check info `tp` for cycles. Throw CyclicReference for illegal cycles, * break direct cycle with a LazyRef for legal, F-bounded cycles. */ def checkInfo(tp: Type): Type = tp match { case tp @ TypeAlias(alias) => - try tp.derivedTypeAlias(apply(alias)) - finally { - where = "alias" - lastChecked = alias - } + tp.derivedAlias(checkPart(alias, "alias")) + case tp @ MatchAlias(alias) => + tp.derivedAlias(checkUpper(alias, "match")) case tp @ TypeBounds(lo, hi) => - val lo1 = try apply(lo) finally { - where = "lower bound" - lastChecked = lo - } - val saved = nestedCycleOK - nestedCycleOK = true - try tp.derivedTypeBounds(lo1, apply(hi)) - finally { - nestedCycleOK = saved - where = "upper bound" - lastChecked = hi - } + tp.derivedTypeBounds(checkPart(lo, "lower bound"), checkUpper(hi, "upper bound")) case _ => tp } @@ -476,7 +477,7 @@ object Checking { tp } else mapOver(tp) - if ((errors ne prevErrors) && !sym.isType && tp.info.isAlias) { + if ((errors ne prevErrors) && !sym.isType && tp.info.isTypeAlias) { // try to dealias to avoid a leak error val savedErrors = errors errors = prevErrors diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 476f1b7c079f..2e210cdb7db5 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1202,7 +1202,7 @@ class SearchHistory(val searchDepth: Int, val seen: Map[ClassSymbol, Int]) { foldOver(n + 1, tp) case tp: RefinedType => foldOver(n + 1, tp) - case tp: TypeRef if tp.info.isAlias => + case tp: TypeRef if tp.info.isTypeAlias => apply(n, tp.superType) case _ => foldOver(n, tp) diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index 250aa2a8e80b..9dcf519b157f 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -454,11 +454,11 @@ object ProtoTypes { def constrained(tl: TypeLambda, owningTree: untpd.Tree, alwaysAddTypeVars: Boolean = false)(implicit ctx: Context): (TypeLambda, List[TypeTree]) = { val state = ctx.typerState val addTypeVars = alwaysAddTypeVars || !owningTree.isEmpty - if (tl.isInstanceOf[PolyType]) + if (tl.isInstanceOf[PolyType]) assert(!(ctx.typerState.isCommittable && !addTypeVars), s"inconsistent: no typevars were added to committable constraint ${state.constraint}") // hk type lambdas can be added to constraints without typevars during match reduction - + def newTypeVars(tl: TypeLambda): List[TypeTree] = for (paramRef <- tl.paramRefs) yield { @@ -576,8 +576,8 @@ object ProtoTypes { wildApprox(tp.parent, theMap, seen), tp.refinedName, wildApprox(tp.refinedInfo, theMap, seen)) - case tp: TypeAlias => // default case, inlined for speed - tp.derivedTypeAlias(wildApprox(tp.alias, theMap, seen)) + case tp: AliasingBounds => // default case, inlined for speed + tp.derivedAlias(wildApprox(tp.alias, theMap, seen)) case tp @ TypeParamRef(poly, pnum) => def wildApproxBounds(bounds: TypeBounds) = if (seen.contains(tp)) WildcardType diff --git a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala index b30e853f153c..049828e4fe47 100644 --- a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -99,8 +99,8 @@ trait TypeAssigner { } case tp: TypeRef if toAvoid(tp.symbol) => tp.info match { - case TypeAlias(alias) => - apply(alias) + case info: AliasingBounds => + apply(info.alias) case TypeBounds(lo, hi) => range(atVariance(-variance)(apply(lo)), apply(hi)) case info: ClassInfo => diff --git a/compiler/src/dotty/tools/repl/ReplDriver.scala b/compiler/src/dotty/tools/repl/ReplDriver.scala index 514287a7ad52..063d88497b88 100644 --- a/compiler/src/dotty/tools/repl/ReplDriver.scala +++ b/compiler/src/dotty/tools/repl/ReplDriver.scala @@ -263,7 +263,7 @@ class ReplDriver(settings: Array[String], .filter(_.symbol.name.is(SimpleNameKind)) val typeAliases = - info.bounds.hi.typeMembers.filter(_.symbol.info.isInstanceOf[TypeAlias]) + info.bounds.hi.typeMembers.filter(_.symbol.info.isTypeAlias) ( typeAliases.map("// defined alias " + _.symbol.showUser) ++ From b261ff70db4d7b3512a36c44ea8e805c7145ef8c Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Fri, 24 Aug 2018 17:01:29 +0200 Subject: [PATCH 11/49] Applications of erased functions are always pure ...by definition since no code is generated, no side effects can be performed either. --- compiler/src/dotty/tools/dotc/ast/TreeInfo.scala | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index dc400d79ba63..ea5345df3e84 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -393,11 +393,12 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => case Ident(_) => refPurity(tree) case Select(qual, _) => - refPurity(tree).min(exprPurity(qual)) + if (tree.symbol.is(Erased)) Pure + else refPurity(tree).min(exprPurity(qual)) case New(_) => SimplyPure case TypeApply(fn, _) => - exprPurity(fn) + if (fn.symbol.is(Erased)) Pure else exprPurity(fn) case Apply(fn, args) => def isKnownPureOp(sym: Symbol) = sym.owner.isPrimitiveValueClass || sym.owner == defn.StringClass @@ -405,8 +406,8 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => // A constant expression with pure arguments is pure. || fn.symbol.isStable) minOf(exprPurity(fn), args.map(exprPurity)) `min` Pure - else - Impure + else if (fn.symbol.is(Erased)) Pure + else Impure case Typed(expr, _) => exprPurity(expr) case Block(stats, expr) => From a144c35877b507c75175dfff393e3a7b01541f61 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Fri, 24 Aug 2018 18:29:50 +0200 Subject: [PATCH 12/49] Allow user-defined error diagnostics when rewriting So far, it's very rudimentary. There's a method `typelevel.error` which produces an error with a constant string argument. To make this more powerful we need a rewriting framework that interpolates strings and can report compiler trees and terms in such strings. --- compiler/src/dotty/tools/dotc/core/Definitions.scala | 4 ++++ compiler/src/dotty/tools/dotc/typer/Inliner.scala | 12 ++++++++++++ library/src-scala3/scala/typelevel/package.scala | 4 ++++ 3 files changed, 20 insertions(+) diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 0483f699fc3d..f9e47c588e4b 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -218,6 +218,10 @@ class Definitions { lazy val Sys_errorR = SysPackage.moduleClass.requiredMethodRef(nme.error) def Sys_error(implicit ctx: Context) = Sys_errorR.symbol + lazy val TypelevelPackageObjectRef = ctx.requiredModuleRef("scala.typelevel.package") + lazy val Typelevel_errorR = TypelevelPackageObjectRef.symbol.requiredMethodRef(nme.error) + def Typelevel_error(implicit ctx: Context) = Typelevel_errorR.symbol + /** The `scalaShadowing` package is used to safely modify classes and * objects in scala so that they can be used from dotty. They will * be visible as members of the `scala` package, replacing any objects diff --git a/compiler/src/dotty/tools/dotc/typer/Inliner.scala b/compiler/src/dotty/tools/dotc/typer/Inliner.scala index 5f4b1d5a76b9..c9931becd35f 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inliner.scala @@ -425,6 +425,16 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { expansion } + def issueError() = callValueArgss match { + case (msgArg :: Nil) :: Nil => + msgArg.tpe match { + case ConstantType(Constant(msg: String)) => + ctx.error(msg, call.pos) + case _ => + } + case _ => + } + trace(i"inlining $call", inlining, show = true) { // The normalized bindings collected in `bindingsBuf` @@ -444,6 +454,8 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { val (finalBindings, finalExpansion) = dropUnusedDefs(bindingsBuf.toList ++ matchBindings, expansion1) val (finalMatchBindings, finalArgBindings) = finalBindings.partition(matchBindings.contains(_)) + if (inlinedMethod == defn.Typelevel_error) issueError() + // Take care that only argument bindings go into `bindings`, since positions are // different for bindings from arguments and bindings from body. tpd.Inlined(call, finalArgBindings, seq(finalMatchBindings, finalExpansion)) diff --git a/library/src-scala3/scala/typelevel/package.scala b/library/src-scala3/scala/typelevel/package.scala index 17220ab3187c..7ece41cfb8f0 100644 --- a/library/src-scala3/scala/typelevel/package.scala +++ b/library/src-scala3/scala/typelevel/package.scala @@ -1,6 +1,10 @@ package scala package object typelevel { + erased def erasedValue[T]: T = ??? + case class Typed[T](val value: T) { type Type = T } + + rewrite def error(transparent msg: String): Nothing = ??? } \ No newline at end of file From 5147d3222dc25b581b9848b4b3dfc72603bc5870 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Fri, 24 Aug 2018 18:42:23 +0200 Subject: [PATCH 13/49] Fix two issues when comparing match types --- .../dotty/tools/dotc/core/TypeComparer.scala | 22 ++++++++----- tests/pos/matchtype.scala | 31 ++++++++++++++++++- 2 files changed, 44 insertions(+), 9 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 7343b8de48ad..e2f5f92d244c 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -396,10 +396,8 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { // Note: We would like to replace this by `if (tp1.hasHigherKind)` // but right now we cannot since some parts of the standard library rely on the // idiom that e.g. `List <: Any`. We have to bootstrap without scalac first. - val base = tp1.baseType(cls2) - if (base.exists && base.ne(tp1)) - return isSubType(base, tp2, if (tp1.isRef(cls2)) approx else approx.addLow) if (cls2 == defn.SingletonClass && tp1.isStable) return true + return tryBaseType(cls2) } else if (cls2.is(JavaDefined)) { // If `cls2` is parameterized, we are seeing a raw type, so we need to compare only the symbol @@ -599,6 +597,17 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { fourthTry } + def tryBaseType(cls2: Symbol) = { + val base = tp1.baseType(cls2) + if (base.exists && (base `ne` tp1)) + isSubType(base, tp2, if (tp1.isRef(cls2)) approx else approx.addLow) || + base.isInstanceOf[OrType] && fourthTry + // if base is a disjunction, this might have come from a tp1 type that + // expands to a match type. In this case, we should try to reduce the type + // and compare the redux. This is done in fourthTry + else fourthTry + } + def fourthTry: Boolean = tp1 match { case tp1: TypeRef => tp1.info match { @@ -810,7 +819,7 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { * tp1 <:< app2 using isSubType (this might instantiate params in tp2) */ def compareLower(tycon2bounds: TypeBounds, tyconIsTypeRef: Boolean): Boolean = - if (tycon2bounds.lo eq tycon2bounds.hi) + if ((tycon2bounds.lo `eq` tycon2bounds.hi) && !tycon2bounds.isInstanceOf[MatchAlias]) if (tyconIsTypeRef) recur(tp1, tp2.superType) else isSubApproxHi(tp1, tycon2bounds.lo.applyIfParameterized(args2)) else @@ -827,10 +836,7 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { case info2: TypeBounds => compareLower(info2, tyconIsTypeRef = true) case info2: ClassInfo => - val base = tp1.baseType(info2.cls) - if (base.exists && base.ne(tp1)) - isSubType(base, tp2, if (tp1.isRef(info2.cls)) approx else approx.addLow) - else fourthTry + tryBaseType(info2.cls) case _ => fourthTry } diff --git a/tests/pos/matchtype.scala b/tests/pos/matchtype.scala index cf84ea8acbaa..12ce253f1e5d 100644 --- a/tests/pos/matchtype.scala +++ b/tests/pos/matchtype.scala @@ -1,3 +1,4 @@ +import typelevel._ object Test { type T[X] = X match { case String => Int @@ -18,5 +19,33 @@ object Test { } type T2 = Len[(1, 2, 3)] - erased val x: S[S[S[Z]]] = typelevel.erasedValue[T2] + erased val x: S[S[S[Z]]] = erasedValue[T2] + + rewrite def checkSub[T1, T2] = + rewrite typelevel.erasedValue[T1] match { + case _: T2 => // OK + case _ => error("not a subtype T1/T2") + } + + rewrite def checkSame[T1, T2] = { + checkSub[T1, T2] + checkSub[T2, T1] + } + + checkSame[T2, S[S[S[Z]]]] + + type Head[X <: Tuple] = X match { + case (x1, _) => x1 + } + + checkSame[Head[(Int, String)], Int] + + type Concat[X <: Tuple, Y <: Tuple] = X match { + case Unit => Y + case x1 *: xs1 => x1 *: Concat[xs1, Y] + } + + checkSame[Concat[Unit, (String, Int)], (String, Int)] + checkSame[Concat[(Boolean, Boolean), (String, Int)], Boolean *: Boolean *: (String, Int)] + checkSub[(Boolean, Boolean, String, Int), Concat[(Boolean, Boolean), String *: Int *: Unit]] } \ No newline at end of file From fb5c5548cf76ab1b7b5a2915105829ee7e290bf1 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Mon, 27 Aug 2018 13:04:24 +0200 Subject: [PATCH 14/49] MatchType reorg - add upper bound to MatchType - have special MatchTypeTree to represent MatchTypes --- .../src/dotty/tools/dotc/ast/TreeInfo.scala | 2 +- compiler/src/dotty/tools/dotc/ast/Trees.scala | 15 ++++++ compiler/src/dotty/tools/dotc/ast/untpd.scala | 1 + .../src/dotty/tools/dotc/core/Types.scala | 49 +++++++++---------- .../tools/dotc/core/tasty/TastyFormat.scala | 15 ++++-- .../tools/dotc/core/tasty/TreePickler.scala | 21 ++++++++ .../tools/dotc/core/tasty/TreeUnpickler.scala | 12 +++++ .../dotty/tools/dotc/parsing/Parsers.scala | 28 +++++++---- .../tools/dotc/printing/PlainPrinter.scala | 6 ++- .../tools/dotc/printing/RefinedPrinter.scala | 5 ++ .../src/dotty/tools/dotc/typer/Checking.scala | 2 +- .../dotty/tools/dotc/typer/TypeAssigner.scala | 8 ++- .../src/dotty/tools/dotc/typer/Typer.scala | 23 +++++---- docs/docs/internals/syntax.md | 7 +-- 14 files changed, 137 insertions(+), 57 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index ea5345df3e84..1f4ddc45da1a 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -306,7 +306,7 @@ trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] case mdef: TypeDef => def isBounds(rhs: Tree): Boolean = rhs match { case _: TypeBoundsTree => true - case _: Match => true // Typedefs with Match rhs classify as abstract + case _: MatchTypeTree => true // Typedefs with Match rhs classify as abstract case LambdaTypeTree(_, body) => isBounds(body) case _ => false } diff --git a/compiler/src/dotty/tools/dotc/ast/Trees.scala b/compiler/src/dotty/tools/dotc/ast/Trees.scala index e7fea191eeb7..e512e8520359 100644 --- a/compiler/src/dotty/tools/dotc/ast/Trees.scala +++ b/compiler/src/dotty/tools/dotc/ast/Trees.scala @@ -661,6 +661,12 @@ object Trees { type ThisTree[-T >: Untyped] = LambdaTypeTree[T] } + /** [bound] selector match { cases } */ + case class MatchTypeTree[-T >: Untyped] private[ast] (bound: Tree[T], selector: Tree[T], cases: List[CaseDef[T]]) + extends TypTree[T] { + type ThisTree[-T >: Untyped] = MatchTypeTree[T] + } + /** => T */ case class ByNameTypeTree[-T >: Untyped] private[ast] (result: Tree[T]) extends TypTree[T] { @@ -916,6 +922,7 @@ object Trees { type RefinedTypeTree = Trees.RefinedTypeTree[T] type AppliedTypeTree = Trees.AppliedTypeTree[T] type LambdaTypeTree = Trees.LambdaTypeTree[T] + type MatchTypeTree = Trees.MatchTypeTree[T] type ByNameTypeTree = Trees.ByNameTypeTree[T] type TypeBoundsTree = Trees.TypeBoundsTree[T] type Bind = Trees.Bind[T] @@ -1099,6 +1106,10 @@ object Trees { case tree: LambdaTypeTree if (tparams eq tree.tparams) && (body eq tree.body) => tree case _ => finalize(tree, untpd.LambdaTypeTree(tparams, body)) } + def MatchTypeTree(tree: Tree)(bound: Tree, selector: Tree, cases: List[CaseDef]): MatchTypeTree = tree match { + case tree: MatchTypeTree if (bound eq tree.bound) && (selector eq tree.selector) && (cases eq tree.cases) => tree + case _ => finalize(tree, untpd.MatchTypeTree(bound, selector, cases)) + } def ByNameTypeTree(tree: Tree)(result: Tree): ByNameTypeTree = tree match { case tree: ByNameTypeTree if result eq tree.result => tree case _ => finalize(tree, untpd.ByNameTypeTree(result)) @@ -1255,6 +1266,8 @@ object Trees { case LambdaTypeTree(tparams, body) => implicit val ctx = localCtx cpy.LambdaTypeTree(tree)(transformSub(tparams), transform(body)) + case MatchTypeTree(bound, selector, cases) => + cpy.MatchTypeTree(tree)(transform(bound), transform(selector), transformSub(cases)) case ByNameTypeTree(result) => cpy.ByNameTypeTree(tree)(transform(result)) case TypeBoundsTree(lo, hi) => @@ -1389,6 +1402,8 @@ object Trees { case LambdaTypeTree(tparams, body) => implicit val ctx = localCtx this(this(x, tparams), body) + case MatchTypeTree(bound, selector, cases) => + this(this(this(x, bound), selector), cases) case ByNameTypeTree(result) => this(x, result) case TypeBoundsTree(lo, hi) => diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index b2278088a6c8..e0821f0325e0 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -293,6 +293,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def RefinedTypeTree(tpt: Tree, refinements: List[Tree]): RefinedTypeTree = new RefinedTypeTree(tpt, refinements) def AppliedTypeTree(tpt: Tree, args: List[Tree]): AppliedTypeTree = new AppliedTypeTree(tpt, args) def LambdaTypeTree(tparams: List[TypeDef], body: Tree): LambdaTypeTree = new LambdaTypeTree(tparams, body) + def MatchTypeTree(bound: Tree, selector: Tree, cases: List[CaseDef]): MatchTypeTree = new MatchTypeTree(bound, selector, cases) def ByNameTypeTree(result: Tree): ByNameTypeTree = new ByNameTypeTree(result) def TypeBoundsTree(lo: Tree, hi: Tree): TypeBoundsTree = new TypeBoundsTree(lo, hi) def Bind(name: Name, body: Tree): Bind = new Bind(name, body) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index d05ea3137a92..9a210d60a5d6 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -3534,17 +3534,11 @@ object Types { * * and `X_1,...X_n` are the type variables bound in `patternType` */ - abstract case class MatchType(scrutinee: Type, cases: List[Type]) extends CachedProxyType with TermType { - override def computeHash(bs: Binders) = doHash(bs, scrutinee, cases) + abstract case class MatchType(bound: Type, scrutinee: Type, cases: List[Type]) extends CachedProxyType with TermType { - override def eql(that: Type) = that match { - case that: MatchType => scrutinee.eq(that.scrutinee) && cases.eqElements(that.cases) - case _ => false - } - - def derivedMatchType(scrutinee: Type, cases: List[Type])(implicit ctx: Context) = - if (scrutinee.eq(this.scrutinee) && cases.eqElements(this.cases)) this - else MatchType(scrutinee, cases) + def derivedMatchType(bound: Type, scrutinee: Type, cases: List[Type])(implicit ctx: Context) = + if (bound.eq(this.bound) && scrutinee.eq(this.scrutinee) && cases.eqElements(this.cases)) this + else MatchType(bound, scrutinee, cases) def caseType(tp: Type)(implicit ctx: Context): Type = tp match { case tp: HKTypeLambda => caseType(tp.resType) @@ -3552,13 +3546,7 @@ object Types { } def alternatives(implicit ctx: Context): List[Type] = cases.map(caseType) - - private[this] var myUnderlying: Type = null - - def underlying(implicit ctx: Context): Type = { - if (myUnderlying == null) myUnderlying = alternatives.reduceLeft(OrType(_, _)) - myUnderlying - } + def underlying(implicit ctx: Context): Type = bound private def wildApproxMap(implicit ctx: Context) = new TypeMap { def apply(t: Type) = t match { @@ -3596,7 +3584,10 @@ object Types { } def contextBounds(tp: Type): TypeBounds = tp match { - case tp: TypeParamRef => ctx.typerState.constraint.fullBounds(tp) + case tp: TypeParamRef => + if (ctx.typerState.constraint.entry(tp).exists) + ctx.typerState.constraint.fullBounds(tp) + else TypeBounds.empty case tp: TypeRef => ctx.gadt.bounds(tp.symbol) } @@ -3619,13 +3610,21 @@ object Types { } myReduced } + + override def computeHash(bs: Binders) = doHash(bs, scrutinee, bound :: cases) + + override def eql(that: Type) = that match { + case that: MatchType => + bound.eq(that.bound) && scrutinee.eq(that.scrutinee) && cases.eqElements(that.cases) + case _ => false + } } - class CachedMatchType(scrutinee: Type, cases: List[Type]) extends MatchType(scrutinee, cases) + class CachedMatchType(bound: Type, scrutinee: Type, cases: List[Type]) extends MatchType(bound, scrutinee, cases) object MatchType { - def apply(scrutinee: Type, cases: List[Type])(implicit ctx: Context) = - unique(new CachedMatchType(scrutinee, cases)) + def apply(bound: Type, scrutinee: Type, cases: List[Type])(implicit ctx: Context) = + unique(new CachedMatchType(bound, scrutinee, cases)) } // ------ ClassInfo, Type Bounds -------------------------------------------------- @@ -4136,8 +4135,8 @@ object Types { tp.derivedAndType(tp1, tp2) protected def derivedOrType(tp: OrType, tp1: Type, tp2: Type): Type = tp.derivedOrType(tp1, tp2) - protected def derivedMatchType(tp: MatchType, scrutinee: Type, cases: List[Type]): Type = - tp.derivedMatchType(scrutinee, cases) + protected def derivedMatchType(tp: MatchType, bound: Type, scrutinee: Type, cases: List[Type]): Type = + tp.derivedMatchType(bound, scrutinee, cases) protected def derivedAnnotatedType(tp: AnnotatedType, underlying: Type, annot: Annotation): Type = tp.derivedAnnotatedType(underlying, annot) protected def derivedWildcardType(tp: WildcardType, bounds: Type): Type = @@ -4236,7 +4235,7 @@ object Types { derivedOrType(tp, this(tp.tp1), this(tp.tp2)) case tp: MatchType => - derivedMatchType(tp, this(tp.scrutinee), tp.cases.mapConserve(this)) + derivedMatchType(tp, this(tp.bound), this(tp.scrutinee), tp.cases.mapConserve(this)) case tp: SkolemType => tp @@ -4613,7 +4612,7 @@ object Types { this(this(x, tp.tp1), tp.tp2) case tp: MatchType => - foldOver(this(x, tp.scrutinee), tp.cases) + foldOver(this(this(x, tp.bound), tp.scrutinee), tp.cases) case AnnotatedType(underlying, annot) => this(applyToAnnot(x, annot), underlying) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala index e5e04ccc1acf..7de5d2f76aa4 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala @@ -101,11 +101,12 @@ Standard-Section: "ASTs" TopLevelStat* SINGLETONtpt ref_Term REFINEDtpt Length underlying_Term refinement_Stat* APPLIEDtpt Length tycon_Term arg_Term* - POLYtpt Length TypeParam* body_Term + LAMBDAtpt Length TypeParam* body_Term TYPEBOUNDStpt Length low_Term high_Term? ANNOTATEDtpt Length underlying_Term fullAnnotation_Term ANDtpt Length left_Term right_Term ORtpt Length left_Term right_Term + MATCHtpt Length bound_Term? sel_Term CaseDef* BYNAMEtpt underlying_Term EMPTYTREE SHAREDterm term_ASTRef @@ -157,6 +158,7 @@ Standard-Section: "ASTs" TopLevelStat* ANNOTATEDtype Length underlying_Type fullAnnotation_Term ANDtype Length left_Type right_Type ORtype Length left_Type right_Type + MATCHtype Length bound_Type sel_Type case_Type* BIND Length boundName_NameRef bounds_Type // for type-variables defined in a type pattern BYNAMEtype underlying_Type @@ -431,6 +433,9 @@ object TastyFormat { final val ERASEDMETHODtype = 178 final val ERASEDIMPLICITMETHODtype = 179 + final val MATCHtype = 180 + final val MATCHtpt = 181 + final val UNTYPEDSPLICE = 199 // Tags for untyped trees only: @@ -459,7 +464,7 @@ object TastyFormat { firstNatTreeTag <= tag && tag <= SYMBOLconst || firstASTTreeTag <= tag && tag <= SINGLETONtpt || firstNatASTTreeTag <= tag && tag <= NAMEDARG || - firstLengthTreeTag <= tag && tag <= TYPEREFin || + firstLengthTreeTag <= tag && tag <= MATCHtpt || tag == HOLE def isParamTag(tag: Int) = tag == PARAM || tag == TYPEPARAM @@ -513,8 +518,8 @@ object TastyFormat { | ANDtpt | ORtpt | BYNAMEtpt - | BIND - | MATCH => true + | MATCHtpt + | BIND => true case _ => false } @@ -649,6 +654,8 @@ object TastyFormat { case ERASEDIMPLICITMETHODtype => "ERASEDIMPLICITMETHODtype" case TYPELAMBDAtype => "TYPELAMBDAtype" case LAMBDAtpt => "LAMBDAtpt" + case MATCHtype => "MATCHtype" + case MATCHtpt => "MATCHtpt" case PARAMtype => "PARAMtype" case ANNOTATION => "ANNOTATION" case PRIVATEqualified => "PRIVATEqualified" diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 3df4dca3f33d..ed4ae37592c7 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -248,6 +248,13 @@ class TreePickler(pickler: TastyPickler) { pickleType(tpe.underlying) case tpe: HKTypeLambda => pickleMethodic(TYPELAMBDAtype, tpe) + case tpe: MatchType => + writeByte(MATCHtype) + withLength { + pickleType(tpe.bound) + pickleType(tpe.scrutinee) + tpe.cases.foreach(pickleType(_)) + } case tpe: PolyType if richTypes => pickleMethodic(POLYtype, tpe) case tpe: MethodType if richTypes => @@ -531,6 +538,13 @@ class TreePickler(pickler: TastyPickler) { case OrTypeTree(tp1, tp2) => writeByte(ORtpt) withLength { pickleTree(tp1); pickleTree(tp2) } + case MatchTypeTree(bound, selector, cases) => + writeByte(MATCHtpt) + withLength { + if (!bound.isEmpty) pickleTree(bound) + pickleTree(selector) + cases.foreach(pickleTree) + } case ByNameTypeTree(tp) => writeByte(BYNAMEtpt) pickleTree(tp) @@ -838,6 +852,13 @@ class TreePickler(pickler: TastyPickler) { case Annotated(tree, annot) => writeByte(ANNOTATEDtpt) withLength { pickleTpt(tree); pickleTerm(annot) } + case MatchTypeTree(bound, selector, cases) => + writeByte(MATCHtpt) + withLength { + if (!bound.isEmpty) pickleTpt(bound) + pickleTpt(selector) + cases.foreach(pickleUntyped) + } case LambdaTypeTree(tparams, body) => writeByte(LAMBDAtpt) withLength { pickleParams(tparams); pickleTpt(body) } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 09d945b73df9..786fae8c1bd5 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -337,6 +337,8 @@ class TreeUnpickler(reader: TastyReader, OrType(readType(), readType()) case SUPERtype => SuperType(readType(), readType()) + case MATCHtype => + MatchType(readType(), readType(), until(end)(readType())) case POLYtype => readMethodic(PolyType, _.toTypeName) case METHODtype => @@ -1130,6 +1132,11 @@ class TreeUnpickler(reader: TastyReader, val tparams = readParams[TypeDef](TYPEPARAM) val body = readTpt() LambdaTypeTree(tparams, body) + case MATCHtpt => + val fst = readTpt() + val (bound, scrut) = + if (nextUnsharedTag == CASEDEF) (EmptyTree, fst) else (fst, readTpt()) + MatchTypeTree(bound, scrut, readCases(end)) case TYPEBOUNDStpt => val lo = readTpt() val hi = if (currentAddr == end) lo else readTpt() @@ -1372,6 +1379,11 @@ class TreeUnpickler(reader: TastyReader, val tparams = readParams[TypeDef](TYPEPARAM) val body = readUntyped() untpd.LambdaTypeTree(tparams, body) + case MATCHtpt => + val fst = readUntyped() + val (bound, scrut) = + if (nextUnsharedTag == CASEDEF) (EmptyTree, fst) else (fst, readUntyped()) + MatchTypeTree(bound, scrut, readCases(end)) case TYPEBOUNDStpt => val lo = readUntyped() val hi = ifBefore(end)(readUntyped(), lo) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 5d86392766b4..b9478b373a9c 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -819,7 +819,7 @@ object Parsers { in.token match { case ARROW => functionRest(t :: Nil) - case MATCH => matchType(t) + case MATCH => matchType(EmptyTree, t) case FORSOME => syntaxError(ExistentialTypesNoLongerSupported()); t case _ => if (imods.is(Implicit) && !t.isInstanceOf[FunctionWithMods]) @@ -1318,9 +1318,9 @@ object Parsers { /** `match' { TypeCaseClauses } */ - def matchType(t: Tree) = - atPos(t.pos.start, in.skipToken()) { - inBraces(Match(t, caseClauses(typeCaseClause))) + def matchType(bound: Tree, t: Tree) = + atPos((if (bound.isEmpty) t else bound).pos.start, accept(MATCH)) { + inBraces(MatchTypeTree(bound, t, caseClauses(typeCaseClause))) } /** FunParams ::= Bindings @@ -2281,20 +2281,30 @@ object Parsers { Block(stats, Literal(Constant(()))) } - /** TypeDef ::= type id [TypeParamClause] `=' Type - * TypeDcl ::= type id [TypeParamClause] TypeBounds + /** TypeDcl ::= id [TypeParamClause] (TypeBounds | ‘=’ Type) + * | id [TypeParamClause] <: Type = MatchType */ def typeDefOrDcl(start: Offset, mods: Modifiers): Tree = { newLinesOpt() atPos(start, nameStart) { val name = ident().toTypeName val tparams = typeParamClauseOpt(ParamOwner.Type) + def makeTypeDef(rhs: Tree): Tree = + TypeDef(name, lambdaAbstract(tparams, rhs)).withMods(mods).setComment(in.getDocComment(start)) in.token match { case EQUALS => in.nextToken() - TypeDef(name, lambdaAbstract(tparams, toplevelTyp())).withMods(mods).setComment(in.getDocComment(start)) - case SUPERTYPE | SUBTYPE | SEMI | NEWLINE | NEWLINES | COMMA | RBRACE | EOF => - TypeDef(name, lambdaAbstract(tparams, typeBounds())).withMods(mods).setComment(in.getDocComment(start)) + makeTypeDef(toplevelTyp()) + case SUBTYPE => + in.nextToken() + val bound = toplevelTyp() + if (in.token == EQUALS) { + in.nextToken() + makeTypeDef(matchType(bound, infixType())) + } + else makeTypeDef(TypeBoundsTree(EmptyTree, bound)) + case SUPERTYPE | SEMI | NEWLINE | NEWLINES | COMMA | RBRACE | EOF => + makeTypeDef(typeBounds()) case _ => syntaxErrorOrIncomplete(ExpectedTypeBoundOrEquals(in.token)) EmptyTree diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index db410e859c2c..19ed444e5e7a 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -164,11 +164,13 @@ class PlainPrinter(_ctx: Context) extends Printer { changePrec(AndTypePrec) { toText(tp1) ~ " & " ~ atPrec(AndTypePrec + 1) { toText(tp2) } } case OrType(tp1, tp2) => changePrec(OrTypePrec) { toText(tp1) ~ " | " ~ atPrec(OrTypePrec + 1) { toText(tp2) } } - case MatchType(scrutinee, cases) => + case MatchType(bound, scrutinee, cases) => changePrec(GlobalPrec) { def caseText(tp: Type): Text = "case " ~ toText(tp) def casesText = Text(cases.map(caseText), "\n") - atPrec(InfixPrec) { toText(scrutinee) } ~ " match {" ~ casesText ~ "}" + atPrec(InfixPrec) { toText(scrutinee) } ~ + keywordStr(" match ") ~ "{" ~ casesText ~ "}" ~ + (" <: " ~ toText(bound) provided !bound.isRef(defn.AnyClass)) }.close case tp: ErrorType => s"" diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 5f42fa346f27..ff146ed9ded2 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -412,6 +412,11 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { changePrec(GlobalPrec) { tparamsText(tparams) ~ " -> " ~ toText(body) } + case MatchTypeTree(bound, sel, cases) => + changePrec(GlobalPrec) { + toText(sel) ~ keywordStr(" match ") ~ blockText(cases) ~ + (" <: " ~ toText(bound) provided !bound.isEmpty) + } case ByNameTypeTree(tpt) => "=> " ~ toTextLocal(tpt) case TypeBoundsTree(lo, hi) => diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index fd9266fe20ae..de7b21f30366 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -210,7 +210,7 @@ object Checking { this(tp.info) mapOver(tp) case tp @ AppliedType(tycon, args) => - tp.derivedAppliedType(this(tycon), args.map(this(_, nestedCycleOK, nestedCycleOK))) + tp.derivedAppliedType(this(tycon), args.mapConserve(this(_, nestedCycleOK, nestedCycleOK))) case tp @ RefinedType(parent, name, rinfo) => tp.derivedRefinedType(this(parent), name, this(rinfo, nestedCycleOK, nestedCycleOK)) case tp: RecType => diff --git a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala index 049828e4fe47..9bad43a3d740 100644 --- a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -478,8 +478,7 @@ trait TypeAssigner { } def assignType(tree: untpd.Match, scrutinee: Tree, cases: List[CaseDef])(implicit ctx: Context) = - if (scrutinee.isType) tree.withType(MatchType(scrutinee.tpe, cases.tpes)) - else tree.withType(ctx.typeComparer.lub(cases.tpes)) + tree.withType(ctx.typeComparer.lub(cases.tpes)) def assignType(tree: untpd.Labeled)(implicit ctx: Context) = tree.withType(tree.bind.symbol.info) @@ -535,6 +534,11 @@ trait TypeAssigner { def assignType(tree: untpd.LambdaTypeTree, tparamDefs: List[TypeDef], body: Tree)(implicit ctx: Context) = tree.withType(HKTypeLambda.fromParams(tparamDefs.map(_.symbol.asType), body.tpe)) + def assignType(tree: untpd.MatchTypeTree, bound: Tree, scrutinee: Tree, cases: List[CaseDef])(implicit ctx: Context) = { + val boundType = if (bound.isEmpty) defn.AnyType else bound.tpe + tree.withType(MatchType(boundType, scrutinee.tpe, cases.tpes)) + } + def assignType(tree: untpd.ByNameTypeTree, result: Tree)(implicit ctx: Context) = tree.withType(ExprType(result.tpe)) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 956aa0a6143a..78489a0bec04 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -982,15 +982,9 @@ class Typer extends Namer typedMatchFinish(tree, sel1, sel1.tpe, pt) case _ => if (tree.isInstanceOf[untpd.RewriteMatch]) checkInRewriteContext("rewrite match", tree.pos) - val sel1 = typed(tree.selector) - if (ctx.mode.is(Mode.Type)) { - val cases1 = tree.cases.mapconserve(typedTypeCase(_, sel1.tpe, pt)) - assignType(cpy.Match(tree)(sel1, cases1), sel1, cases1) - } - else { - val selType = fullyDefinedType(sel1.tpe, "pattern selector", tree.pos).widen - typedMatchFinish(tree, sel1, selType, pt) - } + val sel1 = typedExpr(tree.selector) + val selType = fullyDefinedType(sel1.tpe, "pattern selector", tree.pos).widen + typedMatchFinish(tree, sel1, selType, pt) } } @@ -1088,7 +1082,7 @@ class Typer extends Namer def caseRest(implicit ctx: Context) = { val pat1 = checkSimpleKinded(typedType(cdef.pat)(ctx.addMode(Mode.Pattern))) if (!ctx.isAfterTyper) - constrainPatternType(pat1.tpe, pt)(ctx.addMode(Mode.GADTflexible)) + constrainPatternType(pat1.tpe, selType)(ctx.addMode(Mode.GADTflexible)) val pat2 = indexPattern(cdef).transform(pat1) val body1 = typedType(cdef.body, pt) assignType(cpy.CaseDef(cdef)(pat2, EmptyTree, body1), pat2, body1) @@ -1322,6 +1316,14 @@ class Typer extends Namer assignType(cpy.LambdaTypeTree(tree)(tparams1, body1), tparams1, body1) } + def typedMatchTypeTree(tree: untpd.MatchTypeTree, pt: Type)(implicit ctx: Context): Tree = { + val bound1 = typed(tree.bound) + val sel1 = typed(tree.selector) + val pt1 = if (bound1.isEmpty) pt else bound1.tpe + val cases1 = tree.cases.mapconserve(typedTypeCase(_, sel1.tpe, pt1)) + assignType(cpy.MatchTypeTree(tree)(bound1, sel1, cases1), bound1, sel1, cases1) + } + def typedByNameTypeTree(tree: untpd.ByNameTypeTree)(implicit ctx: Context): ByNameTypeTree = track("typedByNameTypeTree") { val result1 = typed(tree.result) assignType(cpy.ByNameTypeTree(tree)(result1), result1) @@ -1898,6 +1900,7 @@ class Typer extends Namer case tree: untpd.RefinedTypeTree => typedRefinedTypeTree(tree) case tree: untpd.AppliedTypeTree => typedAppliedTypeTree(tree) case tree: untpd.LambdaTypeTree => typedLambdaTypeTree(tree)(ctx.localContext(tree, NoSymbol).setNewScope) + case tree: untpd.MatchTypeTree => typedMatchTypeTree(tree, pt) case tree: untpd.ByNameTypeTree => typedByNameTypeTree(tree) case tree: untpd.TypeBoundsTree => typedTypeBoundsTree(tree, pt) case tree: untpd.Alternative => typedAlternative(tree, pt) diff --git a/docs/docs/internals/syntax.md b/docs/docs/internals/syntax.md index ecf73ff2bd29..83438de57693 100644 --- a/docs/docs/internals/syntax.md +++ b/docs/docs/internals/syntax.md @@ -119,13 +119,14 @@ ClassQualifier ::= ‘[’ id ‘]’ ```ebnf Type ::= [FunArgMods] FunArgTypes ‘=>’ Type Function(ts, t) | HkTypeParamClause ‘=>’ Type TypeLambda(ps, t) - | InfixType `match` TypeCaseClauses + | MatchType | InfixType FunArgMods ::= { ‘implicit’ | ‘erased’ } FunArgTypes ::= InfixType | ‘(’ [ FunArgType {‘,’ FunArgType } ] ‘)’ | ‘(’ TypedFunParam {‘,’ TypedFunParam } ‘)’ TypedFunParam ::= id ‘:’ Type +MatchType ::= InfixType `match` TypeCaseClauses InfixType ::= RefinedType {id [nl] RefinedType} InfixOp(t1, op, t2) RefinedType ::= WithType {[nl] Refinement} RefinedTypeTree(t, ds) WithType ::= AnnotType {‘with’ AnnotType} (deprecated) @@ -319,8 +320,8 @@ ValDcl ::= ids ‘:’ Type VarDcl ::= ids ‘:’ Type PatDef(_, ids, tpe, EmptyTree) DefDcl ::= DefSig [‘:’ Type] DefDef(_, name, tparams, vparamss, tpe, EmptyTree) DefSig ::= id [DefTypeParamClause] DefParamClauses -TypeDcl ::= id [TypTypeParamClause] [‘=’ Type] TypeDefTree(_, name, tparams, tpt) - | id [HkTypeParamClause] TypeBounds TypeDefTree(_, name, tparams, bounds) +TypeDcl ::= id [TypeParamClause] (TypeBounds | ‘=’ Type) TypeDefTree(_, name, tparams, bounds) + | id [TypeParamClause] <: Type = MatchType Def ::= ‘val’ PatDef | ‘var’ VarDef From 400495fa6b3292bccad810fea40f2ed8555b87c8 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Mon, 27 Aug 2018 14:20:24 +0200 Subject: [PATCH 15/49] Typelevel natural numbers Implemented through a "successor" operation `S`, which is interpreted when applied to constant numbers. --- .../dotty/tools/dotc/core/Definitions.scala | 4 ++ .../src/dotty/tools/dotc/core/StdNames.scala | 1 + .../tools/dotc/core/TypeApplications.scala | 12 +++- .../dotty/tools/dotc/core/TypeComparer.scala | 60 +++++++++++++++++-- .../src-scala3/scala/typelevel/package.scala | 2 + tests/pos/matchtype.scala | 42 +++++++++---- 6 files changed, 102 insertions(+), 19 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index f9e47c588e4b..44b62329fa09 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -219,6 +219,7 @@ class Definitions { def Sys_error(implicit ctx: Context) = Sys_errorR.symbol lazy val TypelevelPackageObjectRef = ctx.requiredModuleRef("scala.typelevel.package") + lazy val TypelevelPackageObject = TypelevelPackageObjectRef.symbol.moduleClass lazy val Typelevel_errorR = TypelevelPackageObjectRef.symbol.requiredMethodRef(nme.error) def Typelevel_error(implicit ctx: Context) = Typelevel_errorR.symbol @@ -888,6 +889,9 @@ class Definitions { } } + final def isTypelevel_S(sym: Symbol)(implicit ctx: Context) = + sym.name == tpnme.S && sym.owner == TypelevelPackageObject + // ----- Symbol sets --------------------------------------------------- lazy val AbstractFunctionType = mkArityArray("scala.runtime.AbstractFunction", MaxImplementedFunctionArity, 0) diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index 1a58805f389c..03c3f49ff94a 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -204,6 +204,7 @@ object StdNames { final val Object: N = "Object" final val PartialFunction: N = "PartialFunction" final val PrefixType: N = "PrefixType" + final val S: N = "S" final val Serializable: N = "Serializable" final val Singleton: N = "Singleton" final val Throwable: N = "Throwable" diff --git a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala index 329270348206..d5530ad873ff 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala @@ -12,6 +12,7 @@ import util.common._ import Names._ import NameOps._ import NameKinds._ +import Constants.Constant import Flags._ import StdNames.tpnme import util.Positions.Position @@ -409,8 +410,15 @@ class TypeApplications(val self: Type) extends AnyVal { LazyRef(c => dealiased.ref(c).appliedTo(args)) case dealiased: WildcardType => WildcardType(dealiased.optBounds.appliedTo(args).bounds) - case dealiased: TypeRef if dealiased.symbol == defn.NothingClass => - dealiased + case dealiased: TypeRef => + val sym = dealiased.symbol + if (sym == defn.NothingClass) return dealiased + if (defn.isTypelevel_S(sym) && args.length == 1) + args.head.safeDealias match { + case ConstantType(Constant(n: Int)) => return ConstantType(Constant(n + 1)) + case none => + } + AppliedType(self, args) case dealiased => AppliedType(self, args) } diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index e2f5f92d244c..2db3f33efda1 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -11,6 +11,7 @@ import config.Config import config.Printers.{typr, constr, subtyping, gadts, noPrinter} import TypeErasure.{erasedLub, erasedGlb} import TypeApplications._ +import Constants.Constant import scala.util.control.NonFatal import typer.ProtoTypes.constrained import reporting.trace @@ -288,6 +289,15 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { case ConstantType(v1) => v1.value == v2.value case _ => secondTry } + case tp2: AnyConstantType => + if (tp2.tpe.exists) recur(tp1, tp2.tpe) + else tp1 match { + case tp1: ConstantType => + tp2.tpe = tp1 + true + case _ => + secondTry + } case _: FlexType => true case _ => @@ -831,7 +841,8 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { canConstrain(param2) && canInstantiate(param2) || compareLower(bounds(param2), tyconIsTypeRef = false) case tycon2: TypeRef => - isMatchingApply(tp1) || { + isMatchingApply(tp1) || + defn.isTypelevel_S(tycon2.symbol) && compareS(tp2, tp1, fromBelow = true) || { tycon2.info match { case info2: TypeBounds => compareLower(info2, tyconIsTypeRef = true) @@ -865,14 +876,39 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { } canConstrain(param1) && canInstantiate || isSubType(bounds(param1).hi.applyIfParameterized(args1), tp2, approx.addLow) - case tycon1: TypeRef if tycon1.symbol.isClass => - false + case tycon1: TypeRef => + val sym = tycon1.symbol + !sym.isClass && ( + defn.isTypelevel_S(sym) && compareS(tp1, tp2, fromBelow = false) || + recur(tp1.superType, tp2)) case tycon1: TypeProxy => recur(tp1.superType, tp2) case _ => false } + /** Compare `tp` of form `S[arg]` with `other`, via ">:>` if fromBelowis true, "<:<" otherwise. + * If `arg` is a Nat constant `n`, proceed with comparing `n + 1` and `other`. + * Otherwise, if `other` is a Nat constant `n`, proceed with comparing `arg` and `n - 1`. + */ + def compareS(tp: AppliedType, other: Type, fromBelow: Boolean): Boolean = tp.args match { + case arg :: Nil => + natValue(arg) match { + case Some(n) => + val succ = ConstantType(Constant(n + 1)) + if (fromBelow) recur(other, succ) else recur(succ, other) + case none => + natValue(other) match { + case Some(n) if n > 0 => + val pred = ConstantType(Constant(n - 1)) + if (fromBelow) recur(pred, arg) else recur(arg, pred) + case none => + false + } + } + case _ => false + } + /** Like tp1 <:< tp2, but returns false immediately if we know that * the case was covered previously during subtyping. */ @@ -914,6 +950,17 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { } } + /** Optionally, the `n` such that `tp <:< ConstantType(Constant(n: Int))` */ + def natValue(tp: Type): Option[Int] = { + val ct = new AnyConstantType + if (isSubTypeWhenFrozen(tp, ct)) + ct.tpe match { + case ConstantType(Constant(n: Int)) if n >= 0 => Some(n) + case _ => None + } + else None + } + /** Subtype test for corresponding arguments in `args1`, `args2` according to * variances in type parameters `tparams`. */ @@ -1713,6 +1760,11 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { object TypeComparer { + /** Class for unification variables used in `natValue`. */ + private class AnyConstantType extends UncachedGroundType with ValueType { + var tpe: Type = NoType + } + private[core] def show(res: Any)(implicit ctx: Context) = res match { case res: printing.Showable if !ctx.settings.YexplainLowlevel.value => res.show case _ => String.valueOf(res) @@ -1773,7 +1825,7 @@ class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { def paramInstances = new TypeAccumulator[Array[Type]] { def apply(inst: Array[Type], t: Type) = t match { case t @ TypeParamRef(b, n) if b `eq` caseLambda => - inst(n) = instanceType(t, fromBelow = variance >= 0) + inst(n) = approximation(t, fromBelow = variance >= 0).simplified inst case _ => foldOver(inst, t) diff --git a/library/src-scala3/scala/typelevel/package.scala b/library/src-scala3/scala/typelevel/package.scala index 7ece41cfb8f0..91fe9913f0c3 100644 --- a/library/src-scala3/scala/typelevel/package.scala +++ b/library/src-scala3/scala/typelevel/package.scala @@ -7,4 +7,6 @@ package object typelevel { case class Typed[T](val value: T) { type Type = T } rewrite def error(transparent msg: String): Nothing = ??? + + type S[X <: Int] <: Int } \ No newline at end of file diff --git a/tests/pos/matchtype.scala b/tests/pos/matchtype.scala index 12ce253f1e5d..fbf1ba877f5f 100644 --- a/tests/pos/matchtype.scala +++ b/tests/pos/matchtype.scala @@ -5,21 +5,17 @@ object Test { case Int => String } - trait Nat { - def toInt: Int = ??? - } - - case object Z extends Nat - case class S[N <: Nat] extends Nat - type Z = Z.type - - type Len[X] = X match { - case Unit => Z + type Len[X] <: Int = X match { + case Unit => 0 case x *: xs => S[Len[xs]] } type T2 = Len[(1, 2, 3)] - erased val x: S[S[S[Z]]] = erasedValue[T2] + erased val x: 3 = erasedValue[T2] + + type T1 = S[0] + + erased val x2: 1 = erasedValue[T1] rewrite def checkSub[T1, T2] = rewrite typelevel.erasedValue[T1] match { @@ -32,7 +28,7 @@ object Test { checkSub[T2, T1] } - checkSame[T2, S[S[S[Z]]]] + checkSame[T2, S[S[S[0]]]] type Head[X <: Tuple] = X match { case (x1, _) => x1 @@ -40,11 +36,31 @@ object Test { checkSame[Head[(Int, String)], Int] - type Concat[X <: Tuple, Y <: Tuple] = X match { + type Concat[X <: Tuple, Y <: Tuple] <: Tuple = X match { case Unit => Y case x1 *: xs1 => x1 *: Concat[xs1, Y] } + type Elem[X <: Tuple, N] = X match { + case x *: xs => + N match { + case 0 => x + case S[n1] => Elem[xs, n1] + } + } + + type Elem1[X <: Tuple, N] = (X, N) match { + case (x *: xs, 0) => x + case (x *: xs, S[n1]) => Elem1[xs, n1] + } + + erased val x3: String = erasedValue[Elem[(String, Int), 0]] + erased val x4: Int = erasedValue[Elem1[(String, Int), 1]] + + checkSame[Elem[(String, Int, Boolean), 0], String] + checkSame[Elem1[(String, Int, Boolean), 1], Int] + checkSame[Elem[(String, Int, Boolean), 2], Boolean] + checkSame[Concat[Unit, (String, Int)], (String, Int)] checkSame[Concat[(Boolean, Boolean), (String, Int)], Boolean *: Boolean *: (String, Int)] checkSub[(Boolean, Boolean, String, Int), Concat[(Boolean, Boolean), String *: Int *: Unit]] From 3d3d595c85e6b992be788a9c9abe5993df9cb0b9 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Mon, 27 Aug 2018 15:17:35 +0200 Subject: [PATCH 16/49] Reduce matches on creation --- compiler/src/dotty/tools/dotc/core/Types.scala | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 9a210d60a5d6..c584ddc2abaf 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -3623,8 +3623,10 @@ object Types { class CachedMatchType(bound: Type, scrutinee: Type, cases: List[Type]) extends MatchType(bound, scrutinee, cases) object MatchType { - def apply(bound: Type, scrutinee: Type, cases: List[Type])(implicit ctx: Context) = - unique(new CachedMatchType(bound, scrutinee, cases)) + def apply(bound: Type, scrutinee: Type, cases: List[Type])(implicit ctx: Context) = { + val mt = unique(new CachedMatchType(bound, scrutinee, cases)) + mt.reduced.orElse(mt) + } } // ------ ClassInfo, Type Bounds -------------------------------------------------- From 38312e0040579da86620fec799b079348394019e Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Aug 2018 13:39:23 +0200 Subject: [PATCH 17/49] Handle MatchTypeTrees in ExtractAPI Also, fix creators for typed/untyped MatchTypeTrees --- compiler/src/dotty/tools/dotc/ast/tpd.scala | 3 +++ compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala | 2 +- compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala | 5 ++++- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index f5b70728418d..e83030bfb593 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -165,6 +165,9 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def LambdaTypeTree(tparams: List[TypeDef], body: Tree)(implicit ctx: Context): LambdaTypeTree = ta.assignType(untpd.LambdaTypeTree(tparams, body), tparams, body) + def MatchTypeTree(bound: Tree, selector: Tree, cases: List[CaseDef])(implicit ctx: Context): MatchTypeTree = + ta.assignType(untpd.MatchTypeTree(bound, selector, cases), bound, selector, cases) + def TypeBoundsTree(lo: Tree, hi: Tree)(implicit ctx: Context): TypeBoundsTree = ta.assignType(untpd.TypeBoundsTree(lo, hi), lo, hi) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 786fae8c1bd5..90f54ca12ef7 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -1383,7 +1383,7 @@ class TreeUnpickler(reader: TastyReader, val fst = readUntyped() val (bound, scrut) = if (nextUnsharedTag == CASEDEF) (EmptyTree, fst) else (fst, readUntyped()) - MatchTypeTree(bound, scrut, readCases(end)) + untpd.MatchTypeTree(bound, scrut, readCases(end)) case TYPEBOUNDStpt => val lo = readUntyped() val hi = ifBefore(end)(readUntyped(), lo) diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index ce3ce629a70a..d1fbf9f2e39a 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -166,7 +166,7 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder api.Annotation.of(api.Constant.of(Constants.emptyType, name), Array()) val orMarker = marker("Or") val byNameMarker = marker("ByName") - + val matchMarker = marker("Match") /** Extract the API representation of a source file */ def apiSource(tree: Tree): Seq[api.ClassLike] = { @@ -507,6 +507,9 @@ private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder withMarker(s, orMarker) case ExprType(resultType) => withMarker(apiType(resultType), byNameMarker) + case MatchType(bound, scrut, cases) => + val s = combineApiTypes(apiType(bound) :: apiType(scrut) :: cases.map(apiType): _*) + withMarker(s, matchMarker) case ConstantType(constant) => api.Constant.of(apiType(constant.tpe), constant.stringValue) case AnnotatedType(tpe, annot) => From a9f9ced0a04385d2d9312e83aaf3050c5e37bb0c Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Aug 2018 14:32:55 +0200 Subject: [PATCH 18/49] Refine matchtype reduction caching --- .../src/dotty/tools/dotc/core/Types.scala | 24 ++++++++++++------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index c584ddc2abaf..d66a0a315c54 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -3583,28 +3583,36 @@ object Types { else recur(cases1) } + def isRelevant(tp: Type) = tp match { + case tp: TypeParamRef => ctx.typerState.constraint.entry(tp).exists + case tp: TypeRef => ctx.gadt.bounds.contains(tp.symbol) + } + def contextBounds(tp: Type): TypeBounds = tp match { - case tp: TypeParamRef => - if (ctx.typerState.constraint.entry(tp).exists) - ctx.typerState.constraint.fullBounds(tp) - else TypeBounds.empty + case tp: TypeParamRef => ctx.typerState.constraint.fullBounds(tp) case tp: TypeRef => ctx.gadt.bounds(tp.symbol) } def updateReductionContext() = { reductionContext = new mutable.HashMap - for (tp <- cmp.footprint) reductionContext(tp) = contextBounds(tp) + for (tp <- cmp.footprint if isRelevant(tp)) + reductionContext(tp) = contextBounds(tp) } def upToDate = cmp.footprint.forall { tp => - reductionContext.get(tp) match { - case Some(bounds) => bounds `eq` contextBounds(tp) - case None => false + !isRelevant(tp) || { + reductionContext.get(tp) match { + case Some(bounds) => bounds `eq` contextBounds(tp) + case None => false + } } } + record("MatchType.reduce called") if (!Config.cacheMatchReduced || myReduced == null || !upToDate) { + record("MatchType.reduce computed") + if (myReduced != null) record("MatchType.reduce cache miss") myReduced = recur(cases)(trackingCtx) updateReductionContext() } From 2408b548e97c5567d9b500bba7856ca06b34cc95 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Aug 2018 15:19:58 +0200 Subject: [PATCH 19/49] Coarser variance checking for match types Only check the bound, instead of all branches, in the definition of a match type. --- compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala b/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala index a5d3a1878d3b..869ed3e87d56 100644 --- a/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala +++ b/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala @@ -95,6 +95,8 @@ class VarianceChecker()(implicit ctx: Context) { this(status, tp.resultType) // params will be checked in their TypeDef or ValDef nodes. case AnnotatedType(_, annot) if annot.symbol == defn.UncheckedVarianceAnnot => status + case tp: MatchType => + apply(status, tp.bound) case tp: ClassInfo => foldOver(status, tp.classParents) case _ => From 7a51259ab881f093a4dd6f254ba274a3bdd33893 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Aug 2018 15:32:10 +0200 Subject: [PATCH 20/49] Add constValue function A function that produces the constant value represented by a type. --- compiler/src/dotty/tools/dotc/core/Definitions.scala | 2 ++ .../src/dotty/tools/dotc/core/TypeComparer.scala | 12 +++++++++--- compiler/src/dotty/tools/dotc/typer/Inliner.scala | 10 +++++++++- library/src-scala3/scala/typelevel/package.scala | 2 ++ 4 files changed, 22 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 44b62329fa09..983b9f7808c7 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -222,6 +222,8 @@ class Definitions { lazy val TypelevelPackageObject = TypelevelPackageObjectRef.symbol.moduleClass lazy val Typelevel_errorR = TypelevelPackageObjectRef.symbol.requiredMethodRef(nme.error) def Typelevel_error(implicit ctx: Context) = Typelevel_errorR.symbol + lazy val Typelevel_constValueR = TypelevelPackageObjectRef.symbol.requiredMethodRef("constValue") + def Typelevel_constValue(implicit ctx: Context) = Typelevel_constValueR.symbol /** The `scalaShadowing` package is used to safely modify classes and * objects in scala so that they can be used from dotty. They will diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 2db3f33efda1..8f46cf6bcb4c 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -887,7 +887,7 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { false } - /** Compare `tp` of form `S[arg]` with `other`, via ">:>` if fromBelowis true, "<:<" otherwise. + /** Compare `tp` of form `S[arg]` with `other`, via ">:>` if fromBelow is true, "<:<" otherwise. * If `arg` is a Nat constant `n`, proceed with comparing `n + 1` and `other`. * Otherwise, if `other` is a Nat constant `n`, proceed with comparing `arg` and `n - 1`. */ @@ -951,11 +951,17 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { } /** Optionally, the `n` such that `tp <:< ConstantType(Constant(n: Int))` */ - def natValue(tp: Type): Option[Int] = { + def natValue(tp: Type): Option[Int] = constValue(tp) match { + case Some(Constant(n: Int)) if n >= 0 => Some(n) + case _ => None + } + + /** Optionally, the constant `c` such that `tp <:< ConstantType(c)` */ + def constValue(tp: Type): Option[Constant] = { val ct = new AnyConstantType if (isSubTypeWhenFrozen(tp, ct)) ct.tpe match { - case ConstantType(Constant(n: Int)) if n >= 0 => Some(n) + case ConstantType(c) => Some(c) case _ => None } else None diff --git a/compiler/src/dotty/tools/dotc/typer/Inliner.scala b/compiler/src/dotty/tools/dotc/typer/Inliner.scala index c9931becd35f..5d3322262aa4 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inliner.scala @@ -350,7 +350,15 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { } /** The Inlined node representing the inlined call */ - def inlined(pt: Type) = { + def inlined(pt: Type): Tree = { + + if (inlinedMethod == defn.Typelevel_constValue && callTypeArgs.length == 1) { + ctx.typeComparer.constValue(callTypeArgs.head.tpe) match { + case Some(c) => return tpd.Literal(c).withPos(call.pos) + case _ => ctx.error(i"not a constant type: ${callTypeArgs.head}; cannot take constValue") + } + } + // Compute bindings for all parameters, appending them to bindingsBuf computeParamBindings(inlinedMethod.info, callTypeArgs, callValueArgss) diff --git a/library/src-scala3/scala/typelevel/package.scala b/library/src-scala3/scala/typelevel/package.scala index 91fe9913f0c3..ef7f57dab484 100644 --- a/library/src-scala3/scala/typelevel/package.scala +++ b/library/src-scala3/scala/typelevel/package.scala @@ -8,5 +8,7 @@ package object typelevel { rewrite def error(transparent msg: String): Nothing = ??? + rewrite def constValue[T]: T = ??? + type S[X <: Int] <: Int } \ No newline at end of file From dc669a63350af612d1c83bc906904eceab1e2e97 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Aug 2018 15:43:42 +0200 Subject: [PATCH 21/49] Add NonEmptyTuple abstract class ... and move `_(_)`, `_.head`, `_.tail` to it. That way, we no not need to specify a specific (probably wildcarded) pair type when invoking these operations. I first tried to move the three operations simply to Tuple. But since `Unit` is a `Tuple` as well, this gives us funny error messages for `apply`. For instance this one in errMsgTests: ``` object Scope{ def foo(a: Int) = () foo(1)("2") } ``` If `Tuple` had an `apply` method, this would give `expected: Int, found : String` --- library/src-scala3/scala/Tuple.scala | 6 ++++-- tests/pos/matchtype.scala | 6 ++++++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/library/src-scala3/scala/Tuple.scala b/library/src-scala3/scala/Tuple.scala index f058ec07d43a..bab1ee1debc7 100644 --- a/library/src-scala3/scala/Tuple.scala +++ b/library/src-scala3/scala/Tuple.scala @@ -166,8 +166,7 @@ object Tuple { } } -@showAsInfix -sealed class *:[+H, +T <: Tuple] extends Tuple { +abstract sealed class NonEmptyTuple extends Tuple { import Tuple._ rewrite def head: Any = { @@ -253,6 +252,9 @@ sealed class *:[+H, +T <: Tuple] extends Tuple { } } +@showAsInfix +sealed class *:[+H, +T <: Tuple] extends NonEmptyTuple + object *: { rewrite def unapply[H, T <: Tuple](x: H *: T) = (x.head, x.tail) } diff --git a/tests/pos/matchtype.scala b/tests/pos/matchtype.scala index fbf1ba877f5f..d3aa07a2561f 100644 --- a/tests/pos/matchtype.scala +++ b/tests/pos/matchtype.scala @@ -64,4 +64,10 @@ object Test { checkSame[Concat[Unit, (String, Int)], (String, Int)] checkSame[Concat[(Boolean, Boolean), (String, Int)], Boolean *: Boolean *: (String, Int)] checkSub[(Boolean, Boolean, String, Int), Concat[(Boolean, Boolean), String *: Int *: Unit]] + + rewrite def index[Xs <: NonEmptyTuple](xs: Xs, n: Int): Elem[Xs, n.type] = xs(n).asInstanceOf + + val test = (1, "hi", true, 2.0) + index(test, 0): Int + index(test, 1): String } \ No newline at end of file From d924ef17d24fddd0cdb91786af738fe69e4dbad1 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Aug 2018 16:07:32 +0200 Subject: [PATCH 22/49] More precise derivesFrom for MatchTypes Need to take possible reductions into account. --- compiler/src/dotty/tools/dotc/core/Types.scala | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index d66a0a315c54..3e5a04e3aee7 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -208,6 +208,10 @@ object Types { case tp: TypeRef => val sym = tp.symbol if (sym.isClass) sym.derivesFrom(cls) else loop(tp.superType): @tailrec + case tp: AppliedType => + tp.superType.derivesFrom(cls) + case tp: MatchType => + tp.bound.derivesFrom(cls) || tp.reduced.derivesFrom(cls) case tp: TypeProxy => loop(tp.underlying): @tailrec case tp: AndType => From 0d4a118f24d1422217785ee1f621d71a86b6b84b Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Aug 2018 16:17:37 +0200 Subject: [PATCH 23/49] Base Tuple computations on types Avoids rewrites of possibly very large terms --- library/src-scala3/scala/Tuple.scala | 90 +++++++++++++++++++--------- tests/run/tuples2.scala | 40 +++++++++++++ 2 files changed, 101 insertions(+), 29 deletions(-) create mode 100644 tests/run/tuples2.scala diff --git a/library/src-scala3/scala/Tuple.scala b/library/src-scala3/scala/Tuple.scala index bab1ee1debc7..e5ce81a3413b 100644 --- a/library/src-scala3/scala/Tuple.scala +++ b/library/src-scala3/scala/Tuple.scala @@ -4,7 +4,8 @@ import typelevel._ sealed trait Tuple extends Any { import Tuple._ - rewrite def toArray: Array[Object] = rewrite _size(this) match { + + rewrite def toArray: Array[Object] = rewrite constValue[BoundedSize[this.type]] match { case 0 => $emptyArray case 1 => @@ -47,39 +48,39 @@ sealed trait Tuple extends Any { } rewrite def ++(that: Tuple): Tuple = { - erased val resTpe = Typed(_concat(this, that)) - rewrite _size(this) match { + type Result = Concat[this.type, that.type] + rewrite constValue[BoundedSize[this.type]] match { case 0 => that case 1 => - if (_size(that) == 0) this - else (asInstanceOf[Tuple1[_]]._1 *: that).asInstanceOf[resTpe.Type] + if (constValue[BoundedSize[that.type]] == 0) this + else (asInstanceOf[Tuple1[_]]._1 *: that).asInstanceOf[Result] case 2 => val t = asInstanceOf[Tuple2[_, _]] - rewrite _size(that) match { + rewrite constValue[BoundedSize[that.type]] match { case 0 => this case 1 => val u = that.asInstanceOf[Tuple1[_]] - Tuple3(t._1, t._2, u._1).asInstanceOf[resTpe.Type] + Tuple3(t._1, t._2, u._1).asInstanceOf[Result] case 2 => val u = that.asInstanceOf[Tuple2[_, _]] - Tuple4(t._1, t._2, u._1, u._2).asInstanceOf[resTpe.Type] + Tuple4(t._1, t._2, u._1, u._2).asInstanceOf[Result] case _ => - genericConcat[resTpe.Type](this, that) + genericConcat[Result](this, that) } case 3 => val t = asInstanceOf[Tuple3[_, _, _]] - rewrite _size(that) match { + rewrite constValue[BoundedSize[that.type]] match { case 0 => this case 1 => val u = that.asInstanceOf[Tuple1[_]] - Tuple4(t._1, t._2, t._3, u._1).asInstanceOf[resTpe.Type] + Tuple4(t._1, t._2, t._3, u._1).asInstanceOf[Result] case _ => - genericConcat[resTpe.Type](this, that) + genericConcat[Result](this, that) } case _ => - if (_size(that) == 0) this - else genericConcat[resTpe.Type](this, that) + if (constValue[BoundedSize[that.type]] == 0) this + else genericConcat[Result](this, that) } } @@ -89,6 +90,37 @@ sealed trait Tuple extends Any { object Tuple { transparent val $MaxSpecialized = 22 + transparent private val XXL = $MaxSpecialized + 1 + + type Concat[X <: Tuple, Y <: Tuple] <: Tuple = X match { + case Unit => Y + case x1 *: xs1 => x1 *: Concat[xs1, Y] + } + + type Elem[X <: Tuple, N] = (X, N) match { + case (x *: xs, 0) => x + case (x *: xs, S[n1]) => Elem[xs, n1] + } + + type Size[X] <: Int = X match { + case Unit => 0 + case x *: xs => S[Size[xs]] + } + + private type XXL = S[$MaxSpecialized.type] + + private type BoundedS[N <: Int] = N match { + case XXL => XXL + case _ => S[N] + } + + private[scala] type BoundedSize[X] <: Int = X match { + case Unit => 0 + case x *: xs => BoundedSize[xs] match { + case XXL => XXL + case _ => S[BoundedSize[xs]] + } + } val $emptyArray = Array[Object]() @@ -138,7 +170,7 @@ object Tuple { } rewrite def fromArray[T <: Tuple](xs: Array[Object]): T = - rewrite _size(erasedValue[T]) match { + rewrite constValue[BoundedSize[T]] match { case 0 => ().asInstanceOf[T] case 1 => Tuple1(xs(0)).asInstanceOf[T] case 2 => Tuple2(xs(0), xs(1)).asInstanceOf[T] @@ -216,38 +248,38 @@ abstract sealed class NonEmptyTuple extends Tuple { } rewrite def apply(n: Int): Any = { - erased val resTpe = Typed(_index(this, n)) - rewrite _size(this) match { + type Result = Elem[this.type, n.type] + rewrite constValue[BoundedSize[this.type]] match { case 1 => val t = asInstanceOf[Tuple1[_]] rewrite n match { - case 0 => t._1.asInstanceOf[resTpe.Type] + case 0 => t._1.asInstanceOf[Result] } case 2 => val t = asInstanceOf[Tuple2[_, _]] rewrite n match { - case 0 => t._1.asInstanceOf[resTpe.Type] - case 1 => t._2.asInstanceOf[resTpe.Type] + case 0 => t._1.asInstanceOf[Result] + case 1 => t._2.asInstanceOf[Result] } case 3 => val t = asInstanceOf[Tuple3[_, _, _]] rewrite n match { - case 0 => t._1.asInstanceOf[resTpe.Type] - case 1 => t._2.asInstanceOf[resTpe.Type] - case 2 => t._3.asInstanceOf[resTpe.Type] + case 0 => t._1.asInstanceOf[Result] + case 1 => t._2.asInstanceOf[Result] + case 2 => t._3.asInstanceOf[Result] } case 4 => val t = asInstanceOf[Tuple4[_, _, _, _]] rewrite n match { - case 0 => t._1.asInstanceOf[resTpe.Type] - case 1 => t._2.asInstanceOf[resTpe.Type] - case 2 => t._3.asInstanceOf[resTpe.Type] - case 3 => t._4.asInstanceOf[resTpe.Type] + case 0 => t._1.asInstanceOf[Result] + case 1 => t._2.asInstanceOf[Result] + case 2 => t._3.asInstanceOf[Result] + case 3 => t._4.asInstanceOf[Result] } case s if s > 4 && s <= $MaxSpecialized && n >= 0 && n < s => - asInstanceOf[Product].productElement(n).asInstanceOf[resTpe.Type] + asInstanceOf[Product].productElement(n).asInstanceOf[Result] case s if s > $MaxSpecialized && n >= 0 && n < s => - asInstanceOf[TupleXXL].elems(n).asInstanceOf[resTpe.Type] + asInstanceOf[TupleXXL].elems(n).asInstanceOf[Result] } } } diff --git a/tests/run/tuples2.scala b/tests/run/tuples2.scala new file mode 100644 index 000000000000..6a4d44cb5b15 --- /dev/null +++ b/tests/run/tuples2.scala @@ -0,0 +1,40 @@ +object Test extends App { + val xs0 = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16) + assert(xs0(15) == 16) + // 2.733s + + val xs1 = xs0 ++ xs0 + assert(xs1(31) == 16) + // 3.089s + + val xs2 = xs1 ++ xs1 + assert(xs2(63) == 16) + // 3.329s + + val xs3 = xs2 ++ xs2 + assert(xs3(127) == 16) + // 3.416s + + val xs4 = xs3 ++ xs3 + assert(xs4(255) == 16) + // 3.765s + + val xs5a = xs3 ++ xs4 + assert(xs5a(383) == 16) + // 3.804s + +/* The following operations exhaust the standard 2MB stack, but succeed with -Xs10m: + + val xs5 = xs4 ++ xs4 + assert(xs5(511) == 16) + // 3.866s + + val xs6 = xs5 ++ xs5 + assert(xs6(1023) == 16) + // 4.115s + + val xs7 = xs6 ++ xs6 + assert(xs7(2047) == 16) + // 4.846s +*/ +} From 189973bd7d91e19c0a16f83f21e960a9db0ca773 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Aug 2018 16:36:29 +0200 Subject: [PATCH 24/49] Make MatchTypes value types This allows to base more Tuple operations on types instead of rewrite terms --- compiler/src/dotty/tools/dotc/core/Types.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 3e5a04e3aee7..44f72c412032 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -3538,7 +3538,7 @@ object Types { * * and `X_1,...X_n` are the type variables bound in `patternType` */ - abstract case class MatchType(bound: Type, scrutinee: Type, cases: List[Type]) extends CachedProxyType with TermType { + abstract case class MatchType(bound: Type, scrutinee: Type, cases: List[Type]) extends CachedProxyType with ValueType { def derivedMatchType(bound: Type, scrutinee: Type, cases: List[Type])(implicit ctx: Context) = if (bound.eq(this.bound) && scrutinee.eq(this.scrutinee) && cases.eqElements(this.cases)) this From 830768582bb73bfde2356f47b0ac50363fecd277 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Aug 2018 17:08:03 +0200 Subject: [PATCH 25/49] Fix inlining of parameters of singleton type --- compiler/src/dotty/tools/dotc/typer/Inliner.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Inliner.scala b/compiler/src/dotty/tools/dotc/typer/Inliner.scala index 5d3322262aa4..b65f83f42a18 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inliner.scala @@ -401,7 +401,7 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { } case tree: Ident => paramProxy.get(tree.tpe) match { - case Some(t) if tree.isTerm && t.isSingleton => singleton(t).withPos(tree.pos) + case Some(t) if tree.isTerm && t.isSingleton => singleton(t.dealias).withPos(tree.pos) case Some(t) if tree.isType => TypeTree(t).withPos(tree.pos) case _ => tree } From 8219a838738b7bfd8d7a50256be95faf4704a049 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Aug 2018 18:15:20 +0200 Subject: [PATCH 26/49] Test tuples2 needs to run with -Yno-deep-subtypes --- tests/{run => pos-deep-subtype}/tuples2.scala | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename tests/{run => pos-deep-subtype}/tuples2.scala (100%) diff --git a/tests/run/tuples2.scala b/tests/pos-deep-subtype/tuples2.scala similarity index 100% rename from tests/run/tuples2.scala rename to tests/pos-deep-subtype/tuples2.scala From 56759c8a5af698538e50e36449428f06b015a4f6 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Aug 2018 20:12:10 +0200 Subject: [PATCH 27/49] Add constValueOpt method useful to allow libraries to fail gracefully if something is not a constant (not necessarily a parameter, it could be a quantity computed by the rewrite method). --- .../dotty/tools/dotc/core/Definitions.scala | 2 ++ .../src/dotty/tools/dotc/typer/Inliner.scala | 23 +++++++++++++++---- .../src-scala3/scala/typelevel/package.scala | 2 ++ 3 files changed, 22 insertions(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 983b9f7808c7..e45feddb35d9 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -224,6 +224,8 @@ class Definitions { def Typelevel_error(implicit ctx: Context) = Typelevel_errorR.symbol lazy val Typelevel_constValueR = TypelevelPackageObjectRef.symbol.requiredMethodRef("constValue") def Typelevel_constValue(implicit ctx: Context) = Typelevel_constValueR.symbol + lazy val Typelevel_constValueOptR = TypelevelPackageObjectRef.symbol.requiredMethodRef("constValueOpt") + def Typelevel_constValueOpt(implicit ctx: Context) = Typelevel_constValueOptR.symbol /** The `scalaShadowing` package is used to safely modify classes and * objects in scala so that they can be used from dotty. They will diff --git a/compiler/src/dotty/tools/dotc/typer/Inliner.scala b/compiler/src/dotty/tools/dotc/typer/Inliner.scala index b65f83f42a18..2eb6edc6cf5e 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inliner.scala @@ -349,15 +349,28 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { else result } + def tryConstValue: Tree = + ctx.typeComparer.constValue(callTypeArgs.head.tpe) match { + case Some(c) => Literal(c).withPos(call.pos) + case _ => EmptyTree + } + /** The Inlined node representing the inlined call */ def inlined(pt: Type): Tree = { - if (inlinedMethod == defn.Typelevel_constValue && callTypeArgs.length == 1) { - ctx.typeComparer.constValue(callTypeArgs.head.tpe) match { - case Some(c) => return tpd.Literal(c).withPos(call.pos) - case _ => ctx.error(i"not a constant type: ${callTypeArgs.head}; cannot take constValue") + if (callTypeArgs.length == 1) + if (inlinedMethod == defn.Typelevel_constValue) { + val constVal = tryConstValue + if (!constVal.isEmpty) return constVal + ctx.error(i"not a constant type: ${callTypeArgs.head}; cannot take constValue") + } + else if (inlinedMethod == defn.Typelevel_constValueOpt) { + val constVal = tryConstValue + return ( + if (constVal.isEmpty) ref(defn.NoneModuleRef) + else New(defn.SomeClass.typeRef.appliedTo(constVal.tpe), constVal :: Nil) + ) } - } // Compute bindings for all parameters, appending them to bindingsBuf computeParamBindings(inlinedMethod.info, callTypeArgs, callValueArgss) diff --git a/library/src-scala3/scala/typelevel/package.scala b/library/src-scala3/scala/typelevel/package.scala index ef7f57dab484..9df026dab4c0 100644 --- a/library/src-scala3/scala/typelevel/package.scala +++ b/library/src-scala3/scala/typelevel/package.scala @@ -8,6 +8,8 @@ package object typelevel { rewrite def error(transparent msg: String): Nothing = ??? + rewrite def constValueOpt[T]: Option[T] = ??? + rewrite def constValue[T]: T = ??? type S[X <: Int] <: Int From 45fbf3ef494544ca9c9cc5c2d433632f10115e50 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Aug 2018 20:14:25 +0200 Subject: [PATCH 28/49] Fix unpickling of match type aliases --- compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 90f54ca12ef7..a453fc5184e1 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -800,7 +800,7 @@ class TreeUnpickler(reader: TastyReader, } sym.info = rhs.tpe match { case _: TypeBounds | _: ClassInfo => checkNonCyclic(sym, rhs.tpe, reportErrors = false) - case _ => TypeAlias(rhs.tpe) + case _ => rhs.tpe.toBounds } sym.resetFlag(Provisional) TypeDef(rhs) From e41bf9aa5332a38b205cc7ab9482229f97dee63e Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Aug 2018 21:35:06 +0200 Subject: [PATCH 29/49] Survive bottom values in pattern matches See test case. This caused two spurious overloding errors, one when looking for equality evidence and then again in the emit method of the pattern matcher. A spurious error was also eliminated in neg/ensureReported.scala. --- compiler/src/dotty/tools/dotc/ast/tpd.scala | 5 ++++- compiler/src/dotty/tools/dotc/typer/Typer.scala | 3 ++- tests/neg/NoneMatch.scala | 7 +++++++ tests/neg/ensureReported.scala | 2 +- 4 files changed, 14 insertions(+), 3 deletions(-) create mode 100644 tests/neg/NoneMatch.scala diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index e83030bfb593..e22537d8e88a 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -824,7 +824,10 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { /** `tree == that` */ def equal(that: Tree)(implicit ctx: Context) = - applyOverloaded(tree, nme.EQ, that :: Nil, Nil, defn.BooleanType) + if (that.tpe.widen.isRef(defn.NothingClass)) + Literal(Constant(false)) + else + applyOverloaded(tree, nme.EQ, that :: Nil, Nil, defn.BooleanType) /** `tree.isInstanceOf[tp]`, with special treatment of singleton types */ def isInstance(tp: Type)(implicit ctx: Context): Tree = tp.dealias match { diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 78489a0bec04..234624edea09 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -2650,7 +2650,8 @@ class Typer extends Namer tree match { case _: RefTree | _: Literal if !isVarPattern(tree) && - !(tree.tpe <:< pt) (ctx.addMode(Mode.GADTflexible)) => + !(pt <:< tree.tpe) && + !(tree.tpe <:< pt)(ctx.addMode(Mode.GADTflexible)) => val cmp = untpd.Apply( untpd.Select(untpd.TypedSplice(tree), nme.EQ), diff --git a/tests/neg/NoneMatch.scala b/tests/neg/NoneMatch.scala new file mode 100644 index 000000000000..7a17dd74103c --- /dev/null +++ b/tests/neg/NoneMatch.scala @@ -0,0 +1,7 @@ +object Test { + + None match { + case Some(0) => ??? // error: unreachable + } + +} diff --git a/tests/neg/ensureReported.scala b/tests/neg/ensureReported.scala index b40f8837511c..38e1e1307fd4 100644 --- a/tests/neg/ensureReported.scala +++ b/tests/neg/ensureReported.scala @@ -1,6 +1,6 @@ object AnonymousF { val f = { - case l @ List(1) => // error: missing parameter type // error: Ambiguous overload + case l @ List(1) => // error: missing parameter type Some(l) } } From 2d83885f0e226e328685b1d56c162451d6b96370 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Aug 2018 23:00:20 +0200 Subject: [PATCH 30/49] Report rewrite errors at outermost rewrite call --- compiler/src/dotty/tools/dotc/typer/Inliner.scala | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Inliner.scala b/compiler/src/dotty/tools/dotc/typer/Inliner.scala index 2eb6edc6cf5e..5b70f2a5265e 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inliner.scala @@ -362,7 +362,7 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { if (inlinedMethod == defn.Typelevel_constValue) { val constVal = tryConstValue if (!constVal.isEmpty) return constVal - ctx.error(i"not a constant type: ${callTypeArgs.head}; cannot take constValue") + ctx.error(i"not a constant type: ${callTypeArgs.head}; cannot take constValue", call.pos) } else if (inlinedMethod == defn.Typelevel_constValueOpt) { val constVal = tryConstValue @@ -450,7 +450,14 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { case (msgArg :: Nil) :: Nil => msgArg.tpe match { case ConstantType(Constant(msg: String)) => - ctx.error(msg, call.pos) + // Usually `error` is called from within a rewrite method. In this + // case we need to report the error at the point of the outermost enclosing inline + // call. This way, a defensively written rewrite methid can always + // report bad inputs at the point of call instead of revealing its internals. + val callToReport = if (enclosingInlineds.nonEmpty) enclosingInlineds.last else call + val ctxToReport = ctx.outersIterator.dropWhile(enclosingInlineds(_).nonEmpty).next + def issueInCtx(implicit ctx: Context) = ctx.error(msg, callToReport.pos) + issueInCtx(ctxToReport) case _ => } case _ => From b53bf1edccf9c40f599030039509611ac6998a23 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Aug 2018 23:00:42 +0200 Subject: [PATCH 31/49] Harden Tuple operations against wrong inputs --- library/src-scala3/scala/Tuple.scala | 197 ++++++++++++++------------- tests/neg/tuple-nonconstant.scala | 4 + tests/neg/tuple-nonconstant2.scala | 3 + tests/neg/tuple-nonconstant3.scala | 3 + tests/neg/tuple-oob1.scala | 3 + tests/neg/tuple-oob2.scala | 3 + 6 files changed, 115 insertions(+), 98 deletions(-) create mode 100644 tests/neg/tuple-nonconstant.scala create mode 100644 tests/neg/tuple-nonconstant2.scala create mode 100644 tests/neg/tuple-nonconstant3.scala create mode 100644 tests/neg/tuple-oob1.scala create mode 100644 tests/neg/tuple-oob2.scala diff --git a/library/src-scala3/scala/Tuple.scala b/library/src-scala3/scala/Tuple.scala index e5ce81a3413b..5fb9bc6b8930 100644 --- a/library/src-scala3/scala/Tuple.scala +++ b/library/src-scala3/scala/Tuple.scala @@ -5,60 +5,64 @@ import typelevel._ sealed trait Tuple extends Any { import Tuple._ - rewrite def toArray: Array[Object] = rewrite constValue[BoundedSize[this.type]] match { - case 0 => + rewrite def toArray: Array[Object] = rewrite constValueOpt[BoundedSize[this.type]] match { + case Some(0) => $emptyArray - case 1 => + case Some(1) => val t = asInstanceOf[Tuple1[Object]] Array(t._1) - case 2 => + case Some(2) => val t = asInstanceOf[Tuple2[Object, Object]] Array(t._1, t._2) - case 3 => + case Some(3) => val t = asInstanceOf[Tuple3[Object, Object, Object]] Array(t._1, t._2, t._3) - case 4 => + case Some(4) => val t = asInstanceOf[Tuple4[Object, Object, Object, Object]] Array(t._1, t._2, t._3, t._4) - case n if n <= $MaxSpecialized => + case Some(n) if n <= $MaxSpecialized => $toArray(this, n) - case n => + case Some(n) => asInstanceOf[TupleXXL].elems + case None => + error(".toArray cannot be applied to tuple of unknown size") } - rewrite def *: [H] (x: H): Tuple = { - erased val resTpe = Typed(_pair(x, this)) - rewrite _size(this) match { - case 0 => - Tuple1(x).asInstanceOf[resTpe.Type] - case 1 => - Tuple2(x, asInstanceOf[Tuple1[_]]._1).asInstanceOf[resTpe.Type] - case 2 => + rewrite def *: [H] (x: H): H *: this.type = { + type Result = H *: this.type + rewrite constValueOpt[BoundedSize[this.type]] match { + case Some(0) => + Tuple1(x).asInstanceOf[Result] + case Some(1) => + Tuple2(x, asInstanceOf[Tuple1[_]]._1).asInstanceOf[Result] + case Some(2) => val t = asInstanceOf[Tuple2[_, _]] - Tuple3(x, t._1, t._2).asInstanceOf[resTpe.Type] - case 3 => + Tuple3(x, t._1, t._2).asInstanceOf[Result] + case Some(3) => val t = asInstanceOf[Tuple3[_, _, _]] - Tuple4(x, t._1, t._2, t._3).asInstanceOf[resTpe.Type] - case 4 => + Tuple4(x, t._1, t._2, t._3).asInstanceOf[Result] + case Some(4) => val t = asInstanceOf[Tuple4[_, _, _, _]] - Tuple5(x, t._1, t._2, t._3, t._4).asInstanceOf[resTpe.Type] - case n => - fromArray[resTpe.Type]($consArray(x, toArray)) + Tuple5(x, t._1, t._2, t._3, t._4).asInstanceOf[Result] + case Some(n) => + fromArray[Result]($consArray(x, toArray)) + case _ => + error("*: cannot be applied to tuple of unknown size") } } - rewrite def ++(that: Tuple): Tuple = { + rewrite def ++(that: Tuple): Concat[this.type, that.type] = { type Result = Concat[this.type, that.type] - rewrite constValue[BoundedSize[this.type]] match { - case 0 => - that - case 1 => - if (constValue[BoundedSize[that.type]] == 0) this + rewrite constValueOpt[BoundedSize[this.type]] match { + case Some(0) => + that.asInstanceOf[Result] + case Some(1) => + if (constValue[BoundedSize[that.type]] == 0) this.asInstanceOf[Result] else (asInstanceOf[Tuple1[_]]._1 *: that).asInstanceOf[Result] - case 2 => + case Some(2) => val t = asInstanceOf[Tuple2[_, _]] rewrite constValue[BoundedSize[that.type]] match { - case 0 => this + case 0 => this.asInstanceOf[Result] case 1 => val u = that.asInstanceOf[Tuple1[_]] Tuple3(t._1, t._2, u._1).asInstanceOf[Result] @@ -66,21 +70,23 @@ sealed trait Tuple extends Any { val u = that.asInstanceOf[Tuple2[_, _]] Tuple4(t._1, t._2, u._1, u._2).asInstanceOf[Result] case _ => - genericConcat[Result](this, that) + genericConcat[Result](this, that).asInstanceOf[Result] } - case 3 => + case Some(3) => val t = asInstanceOf[Tuple3[_, _, _]] rewrite constValue[BoundedSize[that.type]] match { - case 0 => this + case 0 => this.asInstanceOf[Result] case 1 => val u = that.asInstanceOf[Tuple1[_]] Tuple4(t._1, t._2, t._3, u._1).asInstanceOf[Result] case _ => - genericConcat[Result](this, that) + genericConcat[Result](this, that).asInstanceOf[Result] } - case _ => - if (constValue[BoundedSize[that.type]] == 0) this - else genericConcat[Result](this, that) + case Some(_) => + if (constValue[BoundedSize[that.type]] == 0) this.asInstanceOf[Result] + else genericConcat[Result](this, that).asInstanceOf[Result] + case None => + error("++ cannot be applied to tuple of unknown size") } } @@ -92,6 +98,14 @@ object Tuple { transparent val $MaxSpecialized = 22 transparent private val XXL = $MaxSpecialized + 1 + type Head[X <: NonEmptyTuple] = X match { + case x *: _ => x + } + + type Tail[X <: NonEmptyTuple] <: Tuple = X match { + case _ *: xs => xs + } + type Concat[X <: Tuple, Y <: Tuple] <: Tuple = X match { case Unit => Y case x1 *: xs1 => x1 *: Concat[xs1, Y] @@ -142,33 +156,6 @@ object Tuple { elems1 } - private[scala] rewrite def _pair[H, T <: Tuple] (x: H, xs: T): Tuple = - erasedValue[H *: T] - - private[scala] rewrite def _size(xs: Tuple): Int = - rewrite xs match { - case _: Unit => 0 - case _: (_ *: xs1) => _size(erasedValue[xs1]) + 1 - } - - private[scala] rewrite def _head(xs: Tuple): Any = rewrite xs match { - case _: (x *: _) => erasedValue[x] - } - - private[scala] rewrite def _tail(xs: Tuple): Tuple = rewrite xs match { - case _: (_ *: xs1) => erasedValue[xs1] - } - - private[scala] rewrite def _index(xs: Tuple, n: Int): Any = rewrite xs match { - case _: (x *: _) if n == 0 => erasedValue[x] - case _: (_ *: xs1) if n > 0 => _index(erasedValue[xs1], n - 1) - } - - private[scala] rewrite def _concat(xs: Tuple, ys: Tuple): Tuple = rewrite xs match { - case _: Unit => ys - case _: (x1 *: xs1) => _pair(erasedValue[x1], _concat(erasedValue[xs1], ys)) - } - rewrite def fromArray[T <: Tuple](xs: Array[Object]): T = rewrite constValue[BoundedSize[T]] match { case 0 => ().asInstanceOf[T] @@ -201,85 +188,99 @@ object Tuple { abstract sealed class NonEmptyTuple extends Tuple { import Tuple._ - rewrite def head: Any = { - erased val resTpe = Typed(_head(this)) - val resVal = rewrite _size(this) match { - case 1 => + rewrite def head: Head[this.type] = { + type Result = Head[this.type] + val resVal = rewrite constValueOpt[BoundedSize[this.type]] match { + case Some(1) => val t = asInstanceOf[Tuple1[_]] t._1 - case 2 => + case Some(2) => val t = asInstanceOf[Tuple2[_, _]] t._1 - case 3 => + case Some(3) => val t = asInstanceOf[Tuple3[_, _, _]] t._1 - case 4 => + case Some(4) => val t = asInstanceOf[Tuple4[_, _, _, _]] t._1 - case n if n > 4 && n <= $MaxSpecialized => + case Some(n) if n > 4 && n <= $MaxSpecialized => asInstanceOf[Product].productElement(0) - case n if n > $MaxSpecialized => + case Some(n) if n > $MaxSpecialized => val t = asInstanceOf[TupleXXL] t.elems(0) + case None => + error(".head cannot be applied to tuple of unknown size") } - resVal.asInstanceOf[resTpe.Type] + resVal.asInstanceOf[Result] } - rewrite def tail: Tuple = { - erased val resTpe = Typed(_tail(this)) - rewrite _size(this) match { - case 1 => - () - case 2 => + rewrite def tail: Tail[this.type] = { + type Result = Tail[this.type] + rewrite constValueOpt[BoundedSize[this.type]] match { + case Some(1) => + ().asInstanceOf[Result] + case Some(2) => val t = asInstanceOf[Tuple2[_, _]] - Tuple1(t._2).asInstanceOf[resTpe.Type] - case 3 => + Tuple1(t._2).asInstanceOf[Result] + case Some(3) => val t = asInstanceOf[Tuple3[_, _, _]] - Tuple2(t._2, t._3).asInstanceOf[resTpe.Type] - case 4 => + Tuple2(t._2, t._3).asInstanceOf[Result] + case Some(4) => val t = asInstanceOf[Tuple4[_, _, _, _]] - Tuple3(t._2, t._3, t._4).asInstanceOf[resTpe.Type] - case 5 => + Tuple3(t._2, t._3, t._4).asInstanceOf[Result] + case Some(5) => val t = asInstanceOf[Tuple5[_, _, _, _, _]] - Tuple4(t._2, t._3, t._4, t._5).asInstanceOf[resTpe.Type] - case n if n > 5 => - fromArray[resTpe.Type](toArray.tail) + Tuple4(t._2, t._3, t._4, t._5).asInstanceOf[Result] + case Some(n) if n > 5 => + fromArray[Result](toArray.tail) + case None => + error(".tail cannot be applied to tuple of unknown size") } } - rewrite def apply(n: Int): Any = { + rewrite def indexOutOfBounds = error("index out of bounds") + + rewrite def apply(transparent n: Int): Elem[this.type, n.type] = { type Result = Elem[this.type, n.type] - rewrite constValue[BoundedSize[this.type]] match { - case 1 => + rewrite constValueOpt[BoundedSize[this.type]] match { + case Some(1) => val t = asInstanceOf[Tuple1[_]] rewrite n match { case 0 => t._1.asInstanceOf[Result] + case _ => indexOutOfBounds } - case 2 => + case Some(2) => val t = asInstanceOf[Tuple2[_, _]] rewrite n match { case 0 => t._1.asInstanceOf[Result] case 1 => t._2.asInstanceOf[Result] + case _ => indexOutOfBounds } - case 3 => + case Some(3) => val t = asInstanceOf[Tuple3[_, _, _]] rewrite n match { case 0 => t._1.asInstanceOf[Result] case 1 => t._2.asInstanceOf[Result] case 2 => t._3.asInstanceOf[Result] + case _ => indexOutOfBounds } - case 4 => + case Some(4) => val t = asInstanceOf[Tuple4[_, _, _, _]] rewrite n match { case 0 => t._1.asInstanceOf[Result] case 1 => t._2.asInstanceOf[Result] case 2 => t._3.asInstanceOf[Result] case 3 => t._4.asInstanceOf[Result] + case _ => indexOutOfBounds } - case s if s > 4 && s <= $MaxSpecialized && n >= 0 && n < s => + case Some(s) if s > 4 && s <= $MaxSpecialized && n >= 0 && n < s => asInstanceOf[Product].productElement(n).asInstanceOf[Result] - case s if s > $MaxSpecialized && n >= 0 && n < s => + case Some(s) if s > $MaxSpecialized && n >= 0 && n < s => asInstanceOf[TupleXXL].elems(n).asInstanceOf[Result] + case Some(s) => + indexOutOfBounds + case None => + error("selection (...) cannot be applied to tuple of unknown size") } } } diff --git a/tests/neg/tuple-nonconstant.scala b/tests/neg/tuple-nonconstant.scala new file mode 100644 index 000000000000..dc0f698826e7 --- /dev/null +++ b/tests/neg/tuple-nonconstant.scala @@ -0,0 +1,4 @@ +object Test { + def cons[X, Xs <: Tuple](x: X, xs: Xs) = x *: xs // error: *: cannot be applied to tuple of unknown size + def toArray[Xs <: Tuple](xs: Xs) = xs.toArray // (second error is suppressed right now) +} \ No newline at end of file diff --git a/tests/neg/tuple-nonconstant2.scala b/tests/neg/tuple-nonconstant2.scala new file mode 100644 index 000000000000..d5a8c4c9acc1 --- /dev/null +++ b/tests/neg/tuple-nonconstant2.scala @@ -0,0 +1,3 @@ +object Test { + def toArray[Xs <: Tuple](xs: Xs) = xs.toArray // error: toArray cannot be applied to tuple of unknown size +} \ No newline at end of file diff --git a/tests/neg/tuple-nonconstant3.scala b/tests/neg/tuple-nonconstant3.scala new file mode 100644 index 000000000000..c6f5da1f024d --- /dev/null +++ b/tests/neg/tuple-nonconstant3.scala @@ -0,0 +1,3 @@ +object Test { + def elem[Xs <: NonEmptyTuple](xs: Xs) = xs(1) // error: selection (...) cannot be applied to tuple of unknown size +} \ No newline at end of file diff --git a/tests/neg/tuple-oob1.scala b/tests/neg/tuple-oob1.scala new file mode 100644 index 000000000000..1c4c512cb477 --- /dev/null +++ b/tests/neg/tuple-oob1.scala @@ -0,0 +1,3 @@ +object Test { + def elem(xs: (Int, String)) = xs(2) // error: index out of bounds +} \ No newline at end of file diff --git a/tests/neg/tuple-oob2.scala b/tests/neg/tuple-oob2.scala new file mode 100644 index 000000000000..e819837d1429 --- /dev/null +++ b/tests/neg/tuple-oob2.scala @@ -0,0 +1,3 @@ +object Test { + def elem(xs: (Int, String), n: Int) = xs(n) // error: argument to transparent parameter must be a constant expression +} \ No newline at end of file From 71fbc158bd8d8ccf6f3a76c3bdc7e3bf7b391848 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 29 Aug 2018 23:30:34 +0200 Subject: [PATCH 32/49] Reduce sizes of tuples of tuples2.scala The previous version worked with a -Xss2m setting, but it seems stack size used in tests is significantly smaller than that. Is there a way to set the stack size for a specific test? Then we could re-enable the commented-out code. --- tests/pos-deep-subtype/tuples2.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/pos-deep-subtype/tuples2.scala b/tests/pos-deep-subtype/tuples2.scala index 6a4d44cb5b15..83921d48ab15 100644 --- a/tests/pos-deep-subtype/tuples2.scala +++ b/tests/pos-deep-subtype/tuples2.scala @@ -15,6 +15,8 @@ object Test extends App { assert(xs3(127) == 16) // 3.416s +/* The following operations exhaust the standard stack, but succeed with -Xs10m: + val xs4 = xs3 ++ xs3 assert(xs4(255) == 16) // 3.765s @@ -23,8 +25,6 @@ object Test extends App { assert(xs5a(383) == 16) // 3.804s -/* The following operations exhaust the standard 2MB stack, but succeed with -Xs10m: - val xs5 = xs4 ++ xs4 assert(xs5(511) == 16) // 3.866s From 2d763828c9832b36f37af334293fd066ae97045a Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 6 Sep 2018 11:37:17 +0200 Subject: [PATCH 33/49] Allow additional arguments for typelevel.error --- compiler/src/dotty/tools/dotc/typer/Inliner.scala | 15 +++++++++++++-- library/src-scala3/scala/typelevel/package.scala | 2 +- tests/neg/tuple-oob1.scala | 2 +- 3 files changed, 15 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Inliner.scala b/compiler/src/dotty/tools/dotc/typer/Inliner.scala index 5b70f2a5265e..4dca9ab9483f 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inliner.scala @@ -447,7 +447,7 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { } def issueError() = callValueArgss match { - case (msgArg :: Nil) :: Nil => + case (msgArg :: rest) :: Nil => msgArg.tpe match { case ConstantType(Constant(msg: String)) => // Usually `error` is called from within a rewrite method. In this @@ -456,7 +456,18 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(implicit ctx: Context) { // report bad inputs at the point of call instead of revealing its internals. val callToReport = if (enclosingInlineds.nonEmpty) enclosingInlineds.last else call val ctxToReport = ctx.outersIterator.dropWhile(enclosingInlineds(_).nonEmpty).next - def issueInCtx(implicit ctx: Context) = ctx.error(msg, callToReport.pos) + def issueInCtx(implicit ctx: Context) = { + def decompose(arg: Tree): String = arg match { + case Typed(arg, _) => decompose(arg) + case SeqLiteral(elems, _) => elems.map(decompose).mkString(", ") + case arg => + arg.tpe.widenTermRefExpr match { + case ConstantType(Constant(c)) => c.toString + case _ => arg.show + } + } + ctx.error(s"$msg${rest.map(decompose).mkString(", ")}", callToReport.pos) + } issueInCtx(ctxToReport) case _ => } diff --git a/library/src-scala3/scala/typelevel/package.scala b/library/src-scala3/scala/typelevel/package.scala index 9df026dab4c0..adfc59329b8a 100644 --- a/library/src-scala3/scala/typelevel/package.scala +++ b/library/src-scala3/scala/typelevel/package.scala @@ -6,7 +6,7 @@ package object typelevel { case class Typed[T](val value: T) { type Type = T } - rewrite def error(transparent msg: String): Nothing = ??? + rewrite def error(transparent msg: String, objs: Any*): Nothing = ??? rewrite def constValueOpt[T]: Option[T] = ??? diff --git a/tests/neg/tuple-oob1.scala b/tests/neg/tuple-oob1.scala index 1c4c512cb477..aacde0aeed2b 100644 --- a/tests/neg/tuple-oob1.scala +++ b/tests/neg/tuple-oob1.scala @@ -1,3 +1,3 @@ object Test { - def elem(xs: (Int, String)) = xs(2) // error: index out of bounds + def elem(xs: (Int, String)) = xs(2) // error: index out of bounds: 2 } \ No newline at end of file From 10f2acbb4a13c986938d9ad8e942a709059bec84 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 6 Sep 2018 13:58:33 +0200 Subject: [PATCH 34/49] GenericSignatures needs to consult erasedToObject Used to hard-code Any/AnyVal/Signature before. --- compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala index dce23de8f09d..baa1cbe926c5 100644 --- a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala +++ b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala @@ -198,7 +198,7 @@ object GenericSignatures { assert(!sym.isAliasType, "Unexpected alias type: " + sym) typeParamSig(sym.name.lastPart) } - else if (sym == defn.AnyClass || sym == defn.AnyValClass || sym == defn.SingletonClass) + else if (defn.erasedToObject.contains(sym)) jsig(defn.ObjectType) else if (sym == defn.UnitClass || sym == defn.BoxedUnitModule) jsig(defn.BoxedUnitType) From 03f5c4eb28cae10de0f7e23f63446d77db0c72e2 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 6 Sep 2018 13:58:51 +0200 Subject: [PATCH 35/49] Properly erase NonEmptyTuple --- compiler/src/dotty/tools/dotc/core/Definitions.scala | 4 +++- compiler/src/dotty/tools/dotc/transform/Erasure.scala | 1 + 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index e45feddb35d9..3851c0239b1b 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -717,6 +717,8 @@ class Definitions { lazy val TupleTypeRef = ctx.requiredClassRef("scala.Tuple") def TupleClass(implicit ctx: Context) = TupleTypeRef.symbol.asClass + lazy val NonEmptyTupleTypeRef = ctx.requiredClassRef("scala.NonEmptyTuple") + def NonEmptyTupleClass(implicit ctx: Context) = NonEmptyTupleTypeRef.symbol.asClass lazy val PairType = ctx.requiredClassRef("scala.*:") def PairClass(implicit ctx: Context) = PairType.symbol.asClass @@ -1225,7 +1227,7 @@ class Definitions { def isValueSubClass(sym1: Symbol, sym2: Symbol) = valueTypeEnc(sym2.asClass.name) % valueTypeEnc(sym1.asClass.name) == 0 - lazy val erasedToObject = Set[Symbol](AnyClass, AnyValClass, TupleClass, SingletonClass) + lazy val erasedToObject = Set[Symbol](AnyClass, AnyValClass, TupleClass, NonEmptyTupleClass, SingletonClass) // ----- Initialization --------------------------------------------------- diff --git a/compiler/src/dotty/tools/dotc/transform/Erasure.scala b/compiler/src/dotty/tools/dotc/transform/Erasure.scala index 41dd824027d2..e95f90348e03 100644 --- a/compiler/src/dotty/tools/dotc/transform/Erasure.scala +++ b/compiler/src/dotty/tools/dotc/transform/Erasure.scala @@ -141,6 +141,7 @@ class Erasure extends Phase with DenotTransformer { assert(isErasedType(tp) || isAllowed(defn.ArrayClass, "Array.scala") || isAllowed(defn.TupleClass, "Tuple.scala") || + isAllowed(defn.NonEmptyTupleClass, "Tuple.scala") || isAllowed(defn.PairClass, "Tuple.scala"), i"The type $tp - ${tp.toString} of class ${tp.getClass} of tree $tree : ${tree.tpe} / ${tree.getClass} is illegal after erasure, phase = ${ctx.phase.prev}") } From 365779910b98a347233409aab2a001f6169c6f01 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 6 Sep 2018 14:00:22 +0200 Subject: [PATCH 36/49] Make Tuple types covariant --- library/src-scala3/scala/Tuple.scala | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/library/src-scala3/scala/Tuple.scala b/library/src-scala3/scala/Tuple.scala index 5fb9bc6b8930..374eec97b9bc 100644 --- a/library/src-scala3/scala/Tuple.scala +++ b/library/src-scala3/scala/Tuple.scala @@ -98,25 +98,25 @@ object Tuple { transparent val $MaxSpecialized = 22 transparent private val XXL = $MaxSpecialized + 1 - type Head[X <: NonEmptyTuple] = X match { + type Head[+X <: NonEmptyTuple] = X match { case x *: _ => x } - type Tail[X <: NonEmptyTuple] <: Tuple = X match { + type Tail[+X <: NonEmptyTuple] <: Tuple = X match { case _ *: xs => xs } - type Concat[X <: Tuple, Y <: Tuple] <: Tuple = X match { + type Concat[+X <: Tuple, +Y <: Tuple] <: Tuple = X match { case Unit => Y case x1 *: xs1 => x1 *: Concat[xs1, Y] } - type Elem[X <: Tuple, N] = (X, N) match { + type Elem[+X <: Tuple, +N] = (X, N) match { case (x *: xs, 0) => x case (x *: xs, S[n1]) => Elem[xs, n1] } - type Size[X] <: Int = X match { + type Size[+X] <: Int = X match { case Unit => 0 case x *: xs => S[Size[xs]] } From d53b3fa6315045afa2acb9afe049951bf0e06407 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 6 Sep 2018 14:01:39 +0200 Subject: [PATCH 37/49] Allow generic tuple operations to be dynamic Rewrite to specialized version if static, fall back to generic runtime version otherwise. --- .../tastyreflect/StandardDefinitions.scala | 1 - library/src-scala3/scala/Tuple.scala | 209 +++++++++++++++--- library/src/scala/TupleXXL.scala | 1 + tests/neg/tuple-nonconstant.scala | 4 - tests/neg/tuple-nonconstant2.scala | 3 - tests/neg/tuple-oob2.scala | 3 - tests/run/tuples1.scala | 43 ++++ 7 files changed, 220 insertions(+), 44 deletions(-) delete mode 100644 tests/neg/tuple-nonconstant.scala delete mode 100644 tests/neg/tuple-nonconstant2.scala delete mode 100644 tests/neg/tuple-oob2.scala diff --git a/compiler/src/dotty/tools/dotc/tastyreflect/StandardDefinitions.scala b/compiler/src/dotty/tools/dotc/tastyreflect/StandardDefinitions.scala index 00cc6df81ddb..9d540858dab1 100644 --- a/compiler/src/dotty/tools/dotc/tastyreflect/StandardDefinitions.scala +++ b/compiler/src/dotty/tools/dotc/tastyreflect/StandardDefinitions.scala @@ -59,7 +59,6 @@ trait StandardDefinitions extends scala.tasty.reflect.StandardDefinitions { defn.FunctionClass(arity, isImplicit, isErased).asClass def TupleClass(arity: Int): Symbol = defn.TupleType(arity).classSymbol.asClass - def ScalaPrimitiveValueClasses: List[Symbol] = UnitClass :: BooleanClass :: ScalaNumericValueClasses def ScalaNumericValueClasses: List[Symbol] = diff --git a/library/src-scala3/scala/Tuple.scala b/library/src-scala3/scala/Tuple.scala index 374eec97b9bc..2ff81696c513 100644 --- a/library/src-scala3/scala/Tuple.scala +++ b/library/src-scala3/scala/Tuple.scala @@ -25,7 +25,7 @@ sealed trait Tuple extends Any { case Some(n) => asInstanceOf[TupleXXL].elems case None => - error(".toArray cannot be applied to tuple of unknown size") + dynamicToArray(this) } rewrite def *: [H] (x: H): H *: this.type = { @@ -47,7 +47,7 @@ sealed trait Tuple extends Any { case Some(n) => fromArray[Result]($consArray(x, toArray)) case _ => - error("*: cannot be applied to tuple of unknown size") + dynamic_*:[this.type, H](this, x) } } @@ -86,12 +86,20 @@ sealed trait Tuple extends Any { if (constValue[BoundedSize[that.type]] == 0) this.asInstanceOf[Result] else genericConcat[Result](this, that).asInstanceOf[Result] case None => - error("++ cannot be applied to tuple of unknown size") + dynamic_++[this.type, that.type](this, that) } } rewrite def genericConcat[T <: Tuple](xs: Tuple, ys: Tuple): Tuple = fromArray[T](xs.toArray ++ ys.toArray) + + rewrite def size: Size[this.type] = { + type Result = Size[this.type] + rewrite constValueOpt[BoundedSize[this.type]] match { + case Some(n) => n.asInstanceOf[Result] + case _ => dynamicSize(this).asInstanceOf[Result] + } + } } object Tuple { @@ -183,10 +191,98 @@ object Tuple { case 22 => Tuple22(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15), xs(16), xs(17), xs(18), xs(19), xs(20), xs(21)).asInstanceOf[T] case _ => TupleXXL(xs).asInstanceOf[T] } + + def dynamicFromArray[T <: Tuple](xs: Array[Object]): T = xs.length match { + case 0 => ().asInstanceOf[T] + case 1 => Tuple1(xs(0)).asInstanceOf[T] + case 2 => Tuple2(xs(0), xs(1)).asInstanceOf[T] + case 3 => Tuple3(xs(0), xs(1), xs(2)).asInstanceOf[T] + case 4 => Tuple4(xs(0), xs(1), xs(2), xs(3)).asInstanceOf[T] + case 5 => Tuple5(xs(0), xs(1), xs(2), xs(3), xs(4)).asInstanceOf[T] + case 6 => Tuple6(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5)).asInstanceOf[T] + case 7 => Tuple7(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6)).asInstanceOf[T] + case 8 => Tuple8(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7)).asInstanceOf[T] + case 9 => Tuple9(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8)).asInstanceOf[T] + case 10 => Tuple10(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9)).asInstanceOf[T] + case 11 => Tuple11(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10)).asInstanceOf[T] + case 12 => Tuple12(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11)).asInstanceOf[T] + case 13 => Tuple13(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12)).asInstanceOf[T] + case 14 => Tuple14(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13)).asInstanceOf[T] + case 15 => Tuple15(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14)).asInstanceOf[T] + case 16 => Tuple16(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15)).asInstanceOf[T] + case 17 => Tuple17(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15), xs(16)).asInstanceOf[T] + case 18 => Tuple18(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15), xs(16), xs(17)).asInstanceOf[T] + case 19 => Tuple19(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15), xs(16), xs(17), xs(18)).asInstanceOf[T] + case 20 => Tuple20(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15), xs(16), xs(17), xs(18), xs(19)).asInstanceOf[T] + case 21 => Tuple21(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15), xs(16), xs(17), xs(18), xs(19), xs(20)).asInstanceOf[T] + case 22 => Tuple22(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15), xs(16), xs(17), xs(18), xs(19), xs(20), xs(21)).asInstanceOf[T] + case _ => TupleXXL(xs).asInstanceOf[T] + } + + def dynamicToArray(self: Tuple): Array[Object] = (self: Any) match { + case self: Unit => + $emptyArray + case self: Tuple1[_] => + val t = self.asInstanceOf[Tuple1[Object]] + Array(t._1) + case self: Tuple2[_, _] => + val t = self.asInstanceOf[Tuple2[Object, Object]] + Array(t._1, t._2) + case self: Tuple3[_, _, _] => + val t = self.asInstanceOf[Tuple3[Object, Object, Object]] + Array(t._1, t._2, t._3) + case self: Tuple4[_, _, _, _] => + val t = self.asInstanceOf[Tuple4[Object, Object, Object, Object]] + Array(t._1, t._2, t._3, t._4) + case self: TupleXXL => + asInstanceOf[TupleXXL].elems + case self: Product => + val arr = new Array[Object](self.productArity) + for (i <- 0 until arr.length) arr(i) = self.productElement(i).asInstanceOf[Object] + arr + } + + def dynamic_*: [This <: Tuple, H] (self: Tuple, x: H): H *: This = { + type Result = H *: This + (self: Any) match { + case Unit => + Tuple1(x).asInstanceOf[Result] + case self: Tuple1[_] => + Tuple2(x, self._1).asInstanceOf[Result] + case self: Tuple2[_, _] => + Tuple3(x, self._1, self._2).asInstanceOf[Result] + case self: Tuple3[_, _, _] => + Tuple4(x, self._1, self._2, self._3).asInstanceOf[Result] + case self: Tuple4[_, _, _, _] => + Tuple5(x, self._1, self._2, self._3, self._4).asInstanceOf[Result] + case _ => + dynamicFromArray[Result]($consArray(x, dynamicToArray(self))) + } + } + + def dynamic_++[This <: Tuple, That <: Tuple](self: This, that: That): Concat[This, That] = { + type Result = Concat[This, That] + (this: Any) match { + case self: Unit => return self.asInstanceOf[Result] + case _ => + } + (that: Any) match { + case that: Unit => return self.asInstanceOf[Result] + case _ => + } + dynamicFromArray[Result](dynamicToArray(self) ++ dynamicToArray(that)) + } + + def dynamicSize[This <: Tuple](self: This) = (self: Any) match { + case self: Unit => 0 + case self: TupleXXL => self.elems.length + case self: Product => self.productArity + } } abstract sealed class NonEmptyTuple extends Tuple { import Tuple._ + import NonEmptyTuple._ rewrite def head: Head[this.type] = { type Result = Head[this.type] @@ -209,7 +305,7 @@ abstract sealed class NonEmptyTuple extends Tuple { val t = asInstanceOf[TupleXXL] t.elems(0) case None => - error(".head cannot be applied to tuple of unknown size") + dynamicHead[this.type](this) } resVal.asInstanceOf[Result] } @@ -234,54 +330,101 @@ abstract sealed class NonEmptyTuple extends Tuple { case Some(n) if n > 5 => fromArray[Result](toArray.tail) case None => - error(".tail cannot be applied to tuple of unknown size") + dynamicTail[this.type](this) } } - rewrite def indexOutOfBounds = error("index out of bounds") + rewrite def fallbackApply(n: Int) = + rewrite constValueOpt[n.type] match { + case Some(n: Int) => error("index out of bounds", n) + case None => dynamicApply[this.type](this, n) + } - rewrite def apply(transparent n: Int): Elem[this.type, n.type] = { + rewrite def apply(n: Int): Elem[this.type, n.type] = { type Result = Elem[this.type, n.type] rewrite constValueOpt[BoundedSize[this.type]] match { case Some(1) => val t = asInstanceOf[Tuple1[_]] - rewrite n match { - case 0 => t._1.asInstanceOf[Result] - case _ => indexOutOfBounds + rewrite constValueOpt[n.type] match { + case Some(0) => t._1.asInstanceOf[Result] + case _ => fallbackApply(n).asInstanceOf[Result] } case Some(2) => val t = asInstanceOf[Tuple2[_, _]] - rewrite n match { - case 0 => t._1.asInstanceOf[Result] - case 1 => t._2.asInstanceOf[Result] - case _ => indexOutOfBounds + rewrite constValueOpt[n.type] match { + case Some(0) => t._1.asInstanceOf[Result] + case Some(1) => t._2.asInstanceOf[Result] + case _ => fallbackApply(n).asInstanceOf[Result] } case Some(3) => val t = asInstanceOf[Tuple3[_, _, _]] - rewrite n match { - case 0 => t._1.asInstanceOf[Result] - case 1 => t._2.asInstanceOf[Result] - case 2 => t._3.asInstanceOf[Result] - case _ => indexOutOfBounds + rewrite constValueOpt[n.type] match { + case Some(0) => t._1.asInstanceOf[Result] + case Some(1) => t._2.asInstanceOf[Result] + case Some(2) => t._3.asInstanceOf[Result] + case _ => fallbackApply(n).asInstanceOf[Result] } case Some(4) => val t = asInstanceOf[Tuple4[_, _, _, _]] - rewrite n match { - case 0 => t._1.asInstanceOf[Result] - case 1 => t._2.asInstanceOf[Result] - case 2 => t._3.asInstanceOf[Result] - case 3 => t._4.asInstanceOf[Result] - case _ => indexOutOfBounds + rewrite constValueOpt[n.type] match { + case Some(0) => t._1.asInstanceOf[Result] + case Some(1) => t._2.asInstanceOf[Result] + case Some(2) => t._3.asInstanceOf[Result] + case Some(3) => t._4.asInstanceOf[Result] + case _ => fallbackApply(n).asInstanceOf[Result] } - case Some(s) if s > 4 && s <= $MaxSpecialized && n >= 0 && n < s => - asInstanceOf[Product].productElement(n).asInstanceOf[Result] - case Some(s) if s > $MaxSpecialized && n >= 0 && n < s => - asInstanceOf[TupleXXL].elems(n).asInstanceOf[Result] - case Some(s) => - indexOutOfBounds - case None => - error("selection (...) cannot be applied to tuple of unknown size") + case Some(s) if s > 4 && s <= $MaxSpecialized => + val t = asInstanceOf[Product] + rewrite constValueOpt[n.type] match { + case Some(n) if n >= 0 && n < s => t.productElement(n).asInstanceOf[Result] + case _ => fallbackApply(n).asInstanceOf[Result] + } + case Some(s) if s > $MaxSpecialized => + val t = asInstanceOf[TupleXXL] + rewrite constValueOpt[n.type] match { + case Some(n) if n >= 0 && n < s => t.elems(n).asInstanceOf[Result] + case _ => fallbackApply(n).asInstanceOf[Result] + } + case _ => fallbackApply(n).asInstanceOf[Result] + } + } +} + +object NonEmptyTuple { + import Tuple._ + + def dynamicHead[This <: NonEmptyTuple] (self: This): Head[This] = { + type Result = Head[This] + val res = (self: Any) match { + case self: Tuple1[_] => self._1 + case self: Tuple2[_, _] => self._1 + case self: Tuple3[_, _, _] => self._1 + case self: Tuple4[_, _, _, _] => self._1 + case self: TupleXXL => self.elems(0) + case self: Product => self.productElement(0) + } + res.asInstanceOf[Result] + } + + def dynamicTail[This <: NonEmptyTuple] (self: This): Tail[This] = { + type Result = Tail[This] + val res = (self: Any) match { + case self: Tuple1[_] => self._1 + case self: Tuple2[_, _] => Tuple1(self._2) + case self: Tuple3[_, _, _] => Tuple2(self._2, self._3) + case self: Tuple4[_, _, _, _] => Tuple3(self._2, self._3, self._4) + case _ => dynamicFromArray[Result](self.toArray.tail) + } + res.asInstanceOf[Result] + } + + def dynamicApply[This <: NonEmptyTuple] (self: This, n: Int): Elem[This, n.type] = { + type Result = Elem[This, n.type] + val res = (self: Any) match { + case self: TupleXXL => self.elems(n) + case self: Product => self.productElement(n) } + res.asInstanceOf[Result] } } diff --git a/library/src/scala/TupleXXL.scala b/library/src/scala/TupleXXL.scala index 48a1410574f0..fe46225a206b 100644 --- a/library/src/scala/TupleXXL.scala +++ b/library/src/scala/TupleXXL.scala @@ -2,6 +2,7 @@ package scala import java.util.Arrays.{deepEquals, deepHashCode} final class TupleXXL private (es: Array[Object]) { + assert(es.length > 22) override def toString = elems.mkString("(", ",", ")") override def hashCode = getClass.hashCode * 41 + deepHashCode(elems) override def equals(that: Any) = that match { diff --git a/tests/neg/tuple-nonconstant.scala b/tests/neg/tuple-nonconstant.scala deleted file mode 100644 index dc0f698826e7..000000000000 --- a/tests/neg/tuple-nonconstant.scala +++ /dev/null @@ -1,4 +0,0 @@ -object Test { - def cons[X, Xs <: Tuple](x: X, xs: Xs) = x *: xs // error: *: cannot be applied to tuple of unknown size - def toArray[Xs <: Tuple](xs: Xs) = xs.toArray // (second error is suppressed right now) -} \ No newline at end of file diff --git a/tests/neg/tuple-nonconstant2.scala b/tests/neg/tuple-nonconstant2.scala deleted file mode 100644 index d5a8c4c9acc1..000000000000 --- a/tests/neg/tuple-nonconstant2.scala +++ /dev/null @@ -1,3 +0,0 @@ -object Test { - def toArray[Xs <: Tuple](xs: Xs) = xs.toArray // error: toArray cannot be applied to tuple of unknown size -} \ No newline at end of file diff --git a/tests/neg/tuple-oob2.scala b/tests/neg/tuple-oob2.scala deleted file mode 100644 index e819837d1429..000000000000 --- a/tests/neg/tuple-oob2.scala +++ /dev/null @@ -1,3 +0,0 @@ -object Test { - def elem(xs: (Int, String), n: Int) = xs(n) // error: argument to transparent parameter must be a constant expression -} \ No newline at end of file diff --git a/tests/run/tuples1.scala b/tests/run/tuples1.scala index 3ba09dd116fa..b59fd67fee2a 100644 --- a/tests/run/tuples1.scala +++ b/tests/run/tuples1.scala @@ -72,4 +72,47 @@ object Test extends App { val xsc: Unit = xs println(s"$x23 -> $x, $y, $xs") } + + val x3s: 3 = x3.size + val us: 0 = ().size + val x23s: 23 = x23.size + +// dynamic operations + + def head1(x: NonEmptyTuple): Tuple.Head[x.type] = x.head + def head2[X <: NonEmptyTuple](x: X): Tuple.Head[X] = x.head + + val hd1: Int = head1(x3) + val hd2: Int = head2(x3) + + def tail1(x: NonEmptyTuple): Tuple.Tail[x.type] = x.tail + def tail2[X <: NonEmptyTuple](x: X): Tuple.Tail[X] = x.tail + + val tl1: (String, Int) = tail1(x3) + val tl2: (String, Int) = tail2(x3) + + def elem[X <: NonEmptyTuple](x: X, n: Int): Tuple.Elem[X, n.type] = x(n) + val elem1: String = x3(1) + + def toArray[X <: Tuple](x: X): Array[Object] = x.toArray + val toArray1 = x3.toArray + + def cons[X, Y <: Tuple](x: X, y: Y): X *: Y = x *: y + val cons1: Boolean *: Int *: (String, Int) = cons(true, x3) + + def concat[X <: Tuple, Y <: Tuple](x: X, y: Y): Tuple.Concat[X, Y] = x ++ y + def concat0(x: Tuple, y: Tuple): Tuple.Concat[x.type, y.type] = x ++ y + val conc1: String *: Int *: Unit = concat((), tl1) + val conc2: String *: Int *: Unit = concat(tl1, ()) + val conc3: String *: Int *: String *: Int *: Unit = concat(tl1, tl1) + val conc4: String *: Int *: Unit = concat0((), tl1) + val conc5: String *: Int *: Unit = concat0(tl1, ()) + val conc6: String *: Int *: String *: Int *: Unit = concat0(tl1, tl1) + + def size[X <: Tuple](x: X): Tuple.Size[X] = x.size + def size0(x: Tuple): Tuple.Size[x.type] = x.size + val x3s0: 3 = size(x3) + val us0: 0 = size(()) + val x3s1: 3 = size0(x3) + val us1: 0 = size0(()) } \ No newline at end of file From a7ea1053f699baeba8eb9d279f811f93dc463735 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 6 Sep 2018 16:01:00 +0200 Subject: [PATCH 38/49] Equate TupleN(...) and *: types Make `T1 *: ... *: Tn *: Unit` a subtype of `(T1, ..., Tn)`. This is sound since the two types are erased to the same representation. I tried alternatively to (...) types to `*:` types instead of `Tuple` types. But then we have to translate `TupleN` classes as well since these can come from Scala-2. This looked more fragile than the solution in this commit. --- .../dotty/tools/dotc/core/TypeComparer.scala | 3 ++ .../tools/dotc/printing/RefinedPrinter.scala | 7 +++-- .../tools/dotc/transform/TypeUtils.scala | 4 +++ tests/run/tuples1.scala | 30 +++++++++---------- 4 files changed, 26 insertions(+), 18 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 8f46cf6bcb4c..d3c73ef77530 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -12,6 +12,7 @@ import config.Printers.{typr, constr, subtyping, gadts, noPrinter} import TypeErasure.{erasedLub, erasedGlb} import TypeApplications._ import Constants.Constant +import transform.TypeUtils._ import scala.util.control.NonFatal import typer.ProtoTypes.constrained import reporting.trace @@ -847,6 +848,8 @@ class TypeComparer(initctx: Context) extends ConstraintHandling { case info2: TypeBounds => compareLower(info2, tyconIsTypeRef = true) case info2: ClassInfo => + tycon2.name.toString.startsWith("Tuple") && + defn.isTupleType(tp2) && isSubType(tp1, tp2.toNestedPairs) || tryBaseType(info2.cls) case _ => fourthTry diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index ff146ed9ded2..ecf85a649c88 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -24,7 +24,8 @@ import TypeApplications._ import Decorators._ import config.Config import util.Positions._ -import dotty.tools.dotc.transform.SymUtils._ +import transform.SymUtils._ +import transform.TypeUtils._ import dotty.tools.dotc.transform.FirstTransform import scala.annotation.switch @@ -176,11 +177,11 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } homogenize(tp) match { - case AppliedType(tycon, args) => + case tp @ AppliedType(tycon, args) => val cls = tycon.typeSymbol if (tycon.isRepeatedParam) return toTextLocal(args.head) ~ "*" if (defn.isFunctionClass(cls)) return toTextFunction(args, cls.name.isImplicitFunction, cls.name.isErasedFunction) - if (defn.isTupleClass(cls)) return toTextTuple(args) + if (tp.tupleArity >= 2) return toTextTuple(tp.tupleElementTypes) if (isInfixType(tp)) { val l :: r :: Nil = args val opName = tyconName(tycon) diff --git a/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala b/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala index f32b9df793e8..d1e7dce35c53 100644 --- a/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala +++ b/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala @@ -50,5 +50,9 @@ object TypeUtils { else if (defn.isTupleClass(tp1.classSymbol)) tp1.dealias.argInfos else throw new AssertionError("not a tuple") } + + /** The `*:` equivalent of an instantce of a Tuple class */ + def toNestedPairs(implicit ctx: Context): Type = + (tupleElementTypes :\ (defn.UnitType: Type))(defn.PairType.appliedTo(_, _)) } } diff --git a/tests/run/tuples1.scala b/tests/run/tuples1.scala index b59fd67fee2a..211f7df0c747 100644 --- a/tests/run/tuples1.scala +++ b/tests/run/tuples1.scala @@ -14,7 +14,7 @@ object Test extends App { val h8 = x8.head; val h8c: String = h8; println(s"h8 = $h8") val t1 = x1.tail; val t1c: Unit = t1; println(s"t1 = $t1") val t2 = x2.tail; val t2c: Int *: Unit = t2; println(s"t2 = $t2") - val t7 = x7.tail; val t7c: String *: Int *: Unit = t7.tail.tail.tail.tail; println(s"t7 = $t7") + val t7 = x7.tail; val t7c: (String, Int) = t7.tail.tail.tail.tail; println(s"t7 = $t7") val t8 = x8.tail; val t8c: Int = t8(6); println(s"t8 = $t8") val a1_0 = x1(0); val a1_0c: Int = a1_0; println(s"a1_0 = $a1_0") val a2_0 = x2(0); val a2_0c: String = a2_0; println(s"a2_0 = $a2_0") @@ -25,14 +25,14 @@ object Test extends App { val c0_0 = x0 ++ x0; val c0_0c: Unit = c0_0; println(s"c0_0 = $c0_0") val c0_1 = x0 ++ x1; val c0_1c: Int *: Unit = c0_1c; println(s"c0_1 = $c0_1") val c1_0 = x1 ++ x0; val c1_0c: Int *: Unit = c1_0c; println(s"c1_0 = $c1_0") - val c0_4 = x0 ++ x4; val c0_4c: String *: Int *: String *: Int *: Unit = c0_4; println(s"c0_4 = $c0_4") - val c4_0 = x4 ++ x0; val c4_0c: String *: Int *: String *: Int *: Unit = c4_0; println(s"c4_0 = $c4_0") - val c1_1 = x1 ++ x1; val c1_1c: Int *: Int *: Unit = c1_1; println(s"c1_1 = $c1_1") - val c1_8 = x1 ++ x8; val c1_8c: Int *: String *: Int *: String *: Int *: String *: Int *: String *: Int *: Unit = c1_8; println(s"c1_8 = $c1_8") - val c2_1 = x2 ++ x1; val c2_1c: String *: Int *: Int *: Unit = c2_1; println(s"c2_1 = $c2_1") - val c2_2 = x2 ++ x2; val c2_2c: String *: Int *: String *: Int *: Unit = c2_2; println(s"c2_2 = $c2_2") - val c2_3 = x2 ++ x3; val c2_3c: String *: Int *: Int *: String *: Int *: Unit = c2_3; println(s"c2_3 = $c2_3") - val c3_3 = x3 ++ x3; val c3_3c: Int *: String *: Int *: Int *: String *: Int *: Unit = c3_3; println(s"c3_3 = $c3_3") + val c0_4 = x0 ++ x4; val c0_4c: (String, Int, String, Int) = c0_4; println(s"c0_4 = $c0_4") + val c4_0 = x4 ++ x0; val c4_0c: (String, Int, String, Int) = c4_0; println(s"c4_0 = $c4_0") + val c1_1 = x1 ++ x1; val c1_1c: (Int, Int) = c1_1; println(s"c1_1 = $c1_1") + val c1_8 = x1 ++ x8; val c1_8c: (Int, String, Int, String, Int, String, Int, String, Int) = c1_8; println(s"c1_8 = $c1_8") + val c2_1 = x2 ++ x1; val c2_1c: (String, Int, Int) = c2_1; println(s"c2_1 = $c2_1") + val c2_2 = x2 ++ x2; val c2_2c: (String, Int, String, Int) = c2_2; println(s"c2_2 = $c2_2") + val c2_3 = x2 ++ x3; val c2_3c: (String, Int, Int, String, Int) = c2_3; println(s"c2_3 = $c2_3") + val c3_3 = x3 ++ x3; val c3_3c: (Int, String, Int, Int, String, Int) = c3_3; println(s"c3_3 = $c3_3") val x23 = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23) type T23 = (Int, Int, Int, Int, Int, @@ -102,12 +102,12 @@ object Test extends App { def concat[X <: Tuple, Y <: Tuple](x: X, y: Y): Tuple.Concat[X, Y] = x ++ y def concat0(x: Tuple, y: Tuple): Tuple.Concat[x.type, y.type] = x ++ y - val conc1: String *: Int *: Unit = concat((), tl1) - val conc2: String *: Int *: Unit = concat(tl1, ()) - val conc3: String *: Int *: String *: Int *: Unit = concat(tl1, tl1) - val conc4: String *: Int *: Unit = concat0((), tl1) - val conc5: String *: Int *: Unit = concat0(tl1, ()) - val conc6: String *: Int *: String *: Int *: Unit = concat0(tl1, tl1) + val conc1: (String, Int) = concat((), tl1) + val conc2: (String, Int) = concat(tl1, ()) + val conc3: (String, Int, String, Int) = concat(tl1, tl1) + val conc4: (String, Int) = concat0((), tl1) + val conc5: (String, Int) = concat0(tl1, ()) + val conc6: (String, Int, String, Int) = concat0(tl1, tl1) def size[X <: Tuple](x: X): Tuple.Size[X] = x.size def size0(x: Tuple): Tuple.Size[x.type] = x.size From 9897d228b64607365c541ac2393f7c0361768206 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sat, 8 Sep 2018 16:04:47 +0200 Subject: [PATCH 39/49] Description and informal spec for match types This is largely the result of a discussion we had on Friday with Guillaume, Sandro, Georg and Olivier. The rules for typing match expressions have evolved a bit from the state we discussed there. --- docs/docs/reference/match-types.md | 187 +++++++++++++++++++++++++++++ docs/sidebar.yml | 2 + 2 files changed, 189 insertions(+) create mode 100644 docs/docs/reference/match-types.md diff --git a/docs/docs/reference/match-types.md b/docs/docs/reference/match-types.md new file mode 100644 index 000000000000..6216d69b49eb --- /dev/null +++ b/docs/docs/reference/match-types.md @@ -0,0 +1,187 @@ +--- +layout: doc-page +title: "Match Types" +--- + +A match type reduces to one of a number of right hand sides, depending on a scrutinee type. Example: + +```scala +type Elem[X] = X match { + case String => Char + case Array[t] => t + case Iterable[t] => t +} +``` +This defines a type that, depending on the scrutinee type `X`, can reduce to one of its right hand sides. For instance, +```scala + Elem[String] =:= Char + Elem[Array[Int]] =:= Int + Elem[List[Float]] =:= Float + Elem[Nil] =:= Nothing +``` +Here `=:=` is understood to mean that left and right hand sides are mutatually subtypes of each other. + +In general, a match type is of the form +```scala + S match { P1 => Tn ... Pn => Tn } +``` +where `S`, `T1`, ..., `Tn` are types and `P1`, ..., `Pn` are type patterns. Type variables +in patterns start as usual with a lower case letter. + +Match types can form part of recursive type definitions. Example: +```scala + type LeafElem[X] = X match { + case String => Char + case Array[t] => LeafElem[t] + case Iterable[t] => LeafElem[t] + case t <: AnyVal => t + } +``` +Recursive match type definitions can also be given an upper bound, like this: +```scala + type Concat[+Xs <: Tuple, +Ys <: Tuple] <: Tuple = Xs match { + case Unit => Ys + case x *: xs => x *: Concat[xs, Ys] + } +``` +In this definition, every instance of `Concat[A, B]`, whether reducible or not, is known to be a subtype of `Tuple`. This is necessary to make the recursive invocation `x *: Concat[xs, Ys]` type check, since `*:` demands a `Tuple` as its right operand. + +## Representation of Match Types + +The internal representation of a match type +``` + S match { P1 => Tn ... Pn => Tn } +``` +is `Match(S, C1, ..., Cn) <: B` where each case `Ci` is of the form +``` + [Xs] => P => T +``` +Here, `[Xs]` is a type parameter clause of the variables bound in pattern `Pi`. If there are no bound type variables in a case, the type parameter clause is omitted and only the function type `P => T` is kept. So each case is either a unary function type or a type lambda over a unary function type. + +`B` is the declared upper bound of the match type, or `Any` if no such bound is given. +We will leave it out in places where it does not matter for the discussion. Scrutinee, bound and pattern types must be first-order types. + +## Match type reduction + +We define match type reduction in terms of an auxiliary relation, `can-reduce`: + +``` + Match(S, C1, ..., Cn) can-reduce i, T' +``` +if `Ci = [Xs] => P => T` and there are minimal instantiations `Is` of the type variables `Xs` such that +``` + S <: [Xs := Is] P + T' = [Xs := Is] T +``` +An instantiation `Is` is _minimal_ for `Xs` if all type variables in `Xs` that appear +covariantly and nonvariantly in `Is` are as small as possible and all type variables in `Xs` that appear contravariantly in `Is` are as large as possible. Here, "small" and "large" are understood wrt `<:`. + +For simplicity, we have omitted constraint handling so far. The full formulation of subtyping tests describes them as a function from a constraint and a pair of types to +either _success_ and a new constraint or _failure_. In the context of reduction, the subtyping test `S <: [Xs := Is] P` is understood to leave the bounds of all variables +in the input constraint unchanged, i.e. existing variables in the constraint cannot be instantiated by matching the scrutinee against the patterns. + +Using `can-reduce`, we can now define match type reduction proper in the `reduces-to` relation: +``` + Match(S, C1, ..., Cn) reduces-to T +``` +if `Ci_1, ..., Ci_k` is a maximal non-empty subset of `C1, ..., Cn` such that for each `i_j`: +``` + Match(S, C1, ..., Cn) can-reduce i_j, Ti_j +``` +and +``` + T = Ti_1 & ... & Ti_k +``` +In other words, a match reduces to the intersection of all right hand sides it can reduce to. This "parallel" notion of reduction was picked for its nice algebraic properties, even though it does not correspond directly to the operational semantics of pattern matching on terms, where the first matching case is chosen. + +## Subtyping Rules for Match Types + +The following rules apply to match types. For simplicity, we omit environments and constraints. + +The first rule is a structural comparison between two match types: +``` + Match(S, C1, ..., Cn) <: Match(T, D1, ..., Dm) +``` +` `if +``` + S <: T, m <= n, Ci <: Di for i in 1..n +``` +I.e. scrutinees and corresponding cases must be subtypes, no case re-ordering is allowed, but the subtype can have more cases than the supertype. + +The second rule states that a match type and its redux are mutual subtypes +``` + Match(S, Cs) <: T + T <: Match(S, Cs) +``` +` `if +``` + Match(S, Cs) reduces-to T +``` + +The third rule states that a match type conforms to its upper bound +``` + (Match(S, Cs) <: B) <: B +``` + +## Variance Laws for Match Types + +Within a match type `Match(S, Cs) <: B`, all occurrences of type variables count as covariant. By the nature of the cases `Ci` this means that occurrences in pattern position are contravarant (since patterns are represented as function type arguments). + +## Typing Rules for Match Expressions + +Typing rules for match expressions have to account for the difference between sequential match on the term level and parallel match on the type level. As a running example consider: +```scala + type M[X] = X match { + case A => 1 + case B => 2 + } + def m[X](x: X): M[X] = x match { + case _: A => 1 + case _: B => 2 + } +``` +As a first approximation, the typing rules for match expressions are as usual. E.g. to typecheck the first case `case _: A => 1` of the definition of `m` above, GADT matching will produce the constraint `X <: A`. Therefore, `M[X]` reduces to the singleton type `1`. +The right hand side `1` of the case conforms to this type, so the case typechecks. Typechecking the second case proceeds similarly. + +However, it turns out that these rules are not enough for type soundness. To see this, assume that `A` and `B` are traits that are both extended by a common class `C`. In this case, `M[C]` reduces to `1 & 2`, but `m(new C)` reduces to `1`. So the type of the application `m(new C)` does not match the reduced result type of `m`, which means soundness is violated. + +To plug the soundness hole, we have to tighten the typing rules for match expressions. In the example above we need to also consider the case where the scrutinee type `X` is a subtype of `A` and `B`. In this case, the match expression still returns `1` but the match type `M[X]` reduces to `1 & 2`, which means there should be a type error. However, this second check can be omitted if `A` and `B` are types that don't overlap. We can omit the check because in that case there is no scrutinee value `x` that could reduce to `1`, so no discrepancy can arise at runtime. + +More generally, we proceeed as follows: + +When typechecking the `i`th case of a match expression +``` + t match { case P_1 => t_1 ... case P_n => t_n +``` +where `t` has type `T` and `t_i` has type `T_i` +against an expected match type `R`: + + 1. Determine all maximal sequences of + patterns `P_j_1, ..., P_j_m` that follow `P_i` in the match expression and that do overlap with `P_i`. That is, `P_i, P_j_1, ..., P_j_m` all match at least one common value. + + 2. For each such sequence, verify that `T_i <: R` under the GADT constraint arising from matching the scrutinee type `T` against all of the patterns `P_i, P_j_1, ..., P_j_m`. + +In the example above, `A` and `B` would be overlapping because they have the common subclass `C`. Hence, we have to check that the right-hand side `1` is a subtype of `M[X]` +under the assumptions that `X <: A` and `X <: B`. Under these assumptions `M[X]` reduces +to `1 & 2`, which gives a type error. + +For simplicity, we have disregarded the `null` value in this discussion. `null` does not cause a fundamental problem but complicates things somewhat because some forms of patterns do not match `null`. + +## Overlapping Patterns + +A complete defininition of when two patterns or types overlap still needs to be worked out. Some examples we want to cover are: + + - Two classes overlap only if one is a subtype of the other + - A final class `C` overlaps with a trait `T` only if `C` extends `T` directly or indirectly. + - A class overlaps with a sealed trait `T` only if it overlaps with one of the known subclasses of `T`. + - An abstract type or type parameter `A` overlaps with a type `B` only if `A`'s upper bound overlaps with `B`. + - A union type `A_1 | A_2` overlaps with `B` only if `A_1` overlaps with `B` or `A_2` overlaps with `B`. + - An intersection type `A_1 & A_2` overlaps with `B` only if both `A_1` and `A_2` overlap with `B`. + - If `C[X_1, ..., X_n]` is a case class, then the instance type `C[A_1, ..., A_n]` overlaps with the instance type `C[B_1, ..., B_n]` only if for every index `i` in `1..n`, + if `X_i` is the type of a parameter of the class, then `A_i` overlaps with `B_i`. + + The last rule in particular is needed to detect non-overlaps for cases where the scrutinee and the patterns are tuples. I.e. `(Int, String)` does not overlap `(Int, Int)` since +`String` does not overlap `Int`. + + + diff --git a/docs/sidebar.yml b/docs/sidebar.yml index 1da6f5358340..396bd336ed34 100644 --- a/docs/sidebar.yml +++ b/docs/sidebar.yml @@ -25,6 +25,8 @@ sidebar: url: docs/reference/union-types.html - title: Type lambdas url: docs/reference/type-lambdas.html + - title: Match types + url: docs/reference/match-types.html - title: Implicit Function Types url: docs/reference/implicit-function-types.html - title: Dependent Function Types From 7a0c31fbb7b1594c190ef76301779a05d762b6c6 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Sat, 8 Sep 2018 22:02:54 +0200 Subject: [PATCH 40/49] Fix typo --- docs/docs/reference/match-types.md | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/docs/docs/reference/match-types.md b/docs/docs/reference/match-types.md index 6216d69b49eb..7968214d2e0b 100644 --- a/docs/docs/reference/match-types.md +++ b/docs/docs/reference/match-types.md @@ -19,7 +19,7 @@ This defines a type that, depending on the scrutinee type `X`, can reduce to one Elem[List[Float]] =:= Float Elem[Nil] =:= Nothing ``` -Here `=:=` is understood to mean that left and right hand sides are mutatually subtypes of each other. +Here `=:=` is understood to mean that left and right hand sides are mutually subtypes of each other. In general, a match type is of the form ```scala @@ -182,6 +182,3 @@ A complete defininition of when two patterns or types overlap still needs to be The last rule in particular is needed to detect non-overlaps for cases where the scrutinee and the patterns are tuples. I.e. `(Int, String)` does not overlap `(Int, Int)` since `String` does not overlap `Int`. - - - From 0ca095fbce8afc1a66779a596072112700b3197d Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Mon, 10 Sep 2018 10:56:55 +0200 Subject: [PATCH 41/49] Add related work section to match-types.md --- docs/docs/reference/match-types.md | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/docs/docs/reference/match-types.md b/docs/docs/reference/match-types.md index 7968214d2e0b..0b79df81b8c2 100644 --- a/docs/docs/reference/match-types.md +++ b/docs/docs/reference/match-types.md @@ -182,3 +182,23 @@ A complete defininition of when two patterns or types overlap still needs to be The last rule in particular is needed to detect non-overlaps for cases where the scrutinee and the patterns are tuples. I.e. `(Int, String)` does not overlap `(Int, Int)` since `String` does not overlap `Int`. + +## Related Work + +Match types have similarities with [closed type families](https://wiki.haskell.org/GHC/Type_families) in Haskell. Some differences are: + + - Subtyping instead of type equalities. + - Match type reduction does not tighten the underlying constraint, whereas type family reduction does unify. This difference in approach mirrors the difference between local type inference in Scala and global type inference in Haskell. + - No a-priory requirement that cases are non-overlapping. Uses parallel reduction + instead of always chosing a unique branch. + +Match types are also similar to Typescript's [conditional types](https://github.com/Microsoft/TypeScript/pull/21316). The main differences here are: + + - Conditional types only reduce if scrutinee and pattern are ground, whereas + match types also work for type parameters and abstract types. + - Match types can bind variables in type patterns. + - Match types support direct recursion. + +Conditional types on Typescript distribute through union types. We should evaluate whether match types should support this as well. + + From 194d85d17a3e392758f220f29b1692a1f1be2bb4 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Mon, 10 Sep 2018 12:07:44 +0200 Subject: [PATCH 42/49] Fix matchtype termination - Don't reduce eagerly when forming matchtypes, as this could generate illegal cycles. - Add section to match-types.md how termination is handled. --- .../src/dotty/tools/dotc/core/Types.scala | 6 ++- docs/docs/reference/match-types.md | 53 ++++++++++++++++++- tests/neg/matchtype-loop.scala | 17 ++++++ 3 files changed, 72 insertions(+), 4 deletions(-) create mode 100644 tests/neg/matchtype-loop.scala diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 44f72c412032..67918f6fc9db 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -3636,8 +3636,10 @@ object Types { object MatchType { def apply(bound: Type, scrutinee: Type, cases: List[Type])(implicit ctx: Context) = { - val mt = unique(new CachedMatchType(bound, scrutinee, cases)) - mt.reduced.orElse(mt) + unique(new CachedMatchType(bound, scrutinee, cases)) + // TODO: maybe we should try to reduce match types immediately, but this risks creating illegal + // cycles. So we can do this only if we can prove that the redux is in some sense simpler than + // the original type. } } diff --git a/docs/docs/reference/match-types.md b/docs/docs/reference/match-types.md index 0b79df81b8c2..19814cd8a2be 100644 --- a/docs/docs/reference/match-types.md +++ b/docs/docs/reference/match-types.md @@ -183,13 +183,62 @@ A complete defininition of when two patterns or types overlap still needs to be The last rule in particular is needed to detect non-overlaps for cases where the scrutinee and the patterns are tuples. I.e. `(Int, String)` does not overlap `(Int, Int)` since `String` does not overlap `Int`. +## Handling Termination + +Match type definitions can be recursive, which raises the question whether and how to check +that reduction terminates. This is currently an open question. We should investigate whether +there are workable ways to enforce that recursion is primitive. + +Note that, since reduction is linked to subtyping, we already have a cycle dectection mechanism in place. +So the following will already give a reasonable error message: +```scala + type L[X] = X match { + case Int => L[X] + } + def g[X]: L[X] = ??? +``` + +``` + | val x: Int = g[Int] + | ^^^^^^ + | found: Test.L[Int] + | required: Int +``` + +The subtype cycle test can be circumvented by producing larger types in each recursive invocation, as in the following definitions: +```scala + type LL[X] = X match { + case Int => LL[LL[X]] + } + def gg[X]: LL[X] = ??? +``` +In this case subtyping enters into an infinite recursion. This is not as bad as it looks, however, because +`dotc` turns selected stack overflows into type errors. If there is a stackoverflow during subtyping, +the exception will be caught and turned into a compile-time error that indicates +a trace of the subtype tests that caused the overflow without showing a full stacktrace. +Concretely: +``` + | val xx: Int = gg[Int] + | ^ + |Recursion limit exceeded. + |Maybe there is an illegal cyclic reference? + |If that's not the case, you could also try to increase the stacksize using the -Xss JVM option. + |A recurring operation is (inner to outer): + | + | subtype Test.LL[Int] <:< Int + | subtype Test.LL[Int] <:< Int + | ... + | subtype Test.LL[Int] <:< Int +``` +(The actual error message shows some additional lines in the stacktrace). + ## Related Work Match types have similarities with [closed type families](https://wiki.haskell.org/GHC/Type_families) in Haskell. Some differences are: - Subtyping instead of type equalities. - Match type reduction does not tighten the underlying constraint, whereas type family reduction does unify. This difference in approach mirrors the difference between local type inference in Scala and global type inference in Haskell. - - No a-priory requirement that cases are non-overlapping. Uses parallel reduction + - No a-priori requirement that cases are non-overlapping. Uses parallel reduction instead of always chosing a unique branch. Match types are also similar to Typescript's [conditional types](https://github.com/Microsoft/TypeScript/pull/21316). The main differences here are: @@ -199,6 +248,6 @@ Match types are also similar to Typescript's [conditional types](https://github. - Match types can bind variables in type patterns. - Match types support direct recursion. -Conditional types on Typescript distribute through union types. We should evaluate whether match types should support this as well. +Conditional types in Typescript distribute through union types. We should evaluate whether match types should support this as well. diff --git a/tests/neg/matchtype-loop.scala b/tests/neg/matchtype-loop.scala new file mode 100644 index 000000000000..2b91fea86ba0 --- /dev/null +++ b/tests/neg/matchtype-loop.scala @@ -0,0 +1,17 @@ +object Test { + type L[X] = X match { + case Int => L[X] + } + type LL[X] = X match { + case Int => LL[LL[X]] + } + def a: L[Boolean] = ??? + def b: L[Int] = ??? + def g[X]: L[X] = ??? + def g[X]: L[X] = ??? // error: found: L[Int], required: Int + + def aa: LL[Boolean] = ??? + def bb: LL[Int] = ??? // error: recursion limit exceeded with subtype LazyRef(Test.LL[Int]) <:< Int + def gg[X]: LL[X] = ??? + val xx: Int = gg[Int] // error: recursion limit exceeded with subtype LazyRef(Test.LL[Int]) <:< Int +} From c7ee07c2dff0e72eac486eac81b9888777a1ae50 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 12 Sep 2018 15:36:21 +0200 Subject: [PATCH 43/49] Fix `Elem` method for sizes > 23 This used to give an index-out-of-bounds error. --- library/src-scala3/scala/Tuple.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/src-scala3/scala/Tuple.scala b/library/src-scala3/scala/Tuple.scala index 2ff81696c513..78e5b77607de 100644 --- a/library/src-scala3/scala/Tuple.scala +++ b/library/src-scala3/scala/Tuple.scala @@ -342,7 +342,7 @@ abstract sealed class NonEmptyTuple extends Tuple { rewrite def apply(n: Int): Elem[this.type, n.type] = { type Result = Elem[this.type, n.type] - rewrite constValueOpt[BoundedSize[this.type]] match { + rewrite constValueOpt[Size[this.type]] match { case Some(1) => val t = asInstanceOf[Tuple1[_]] rewrite constValueOpt[n.type] match { From 00de2f80e5d8162b537e753a15475e28e75eee19 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 12 Sep 2018 16:04:56 +0200 Subject: [PATCH 44/49] Print max constraint under -Ydetailed-stats --- .../src/dotty/tools/dotc/core/ConstraintRunInfo.scala | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala b/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala index 55ce0292f115..73924d0cf19d 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala @@ -2,7 +2,7 @@ package dotty.tools.dotc package core import Contexts._ -import config.Printers.typr +import config.Printers.{default, typr} trait ConstraintRunInfo { self: Run => private[this] var maxSize = 0 @@ -12,8 +12,9 @@ trait ConstraintRunInfo { self: Run => maxSize = size maxConstraint = c } - def printMaxConstraint()(implicit ctx: Context) = - if (maxSize > 0) typr.println(s"max constraint = ${maxConstraint.show}") - + def printMaxConstraint()(implicit ctx: Context) = { + val printer = if (ctx.settings.YdetailedStats.value) default else typr + if (maxSize > 0) printer.println(s"max constraint = ${maxConstraint.show}") + } protected def reset() = maxConstraint = null } From c51a2c019883053fd3d4cc26bc83eeecac1b9a13 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 12 Sep 2018 17:10:43 +0200 Subject: [PATCH 45/49] Normalize when simplifying We cannot normalize match types and S types on creation since that ignores constraints that might change. So we normalize instead as part of `simplify`. The new scheme causes deeper subtype recursions than the old, so the Tuple23 part in tuples1 had to me moved to pos-deep-subtype. --- .../tools/dotc/core/TypeApplications.scala | 11 +--- .../src/dotty/tools/dotc/core/TypeOps.scala | 64 ++++++++++--------- .../src/dotty/tools/dotc/core/Types.scala | 49 ++++++++++++-- .../dotty/tools/dotc/CompilationTests.scala | 1 + .../matchtype-loop.scala | 2 +- tests/pos-deep-subtype/tuples23.scala | 27 ++++++++ tests/run/tuples1.check | 3 - tests/run/tuples1.scala | 24 ------- 8 files changed, 109 insertions(+), 72 deletions(-) rename tests/{neg => neg-custom-args}/matchtype-loop.scala (87%) create mode 100644 tests/pos-deep-subtype/tuples23.scala diff --git a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala index d5530ad873ff..e4bcea5ce191 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala @@ -410,15 +410,8 @@ class TypeApplications(val self: Type) extends AnyVal { LazyRef(c => dealiased.ref(c).appliedTo(args)) case dealiased: WildcardType => WildcardType(dealiased.optBounds.appliedTo(args).bounds) - case dealiased: TypeRef => - val sym = dealiased.symbol - if (sym == defn.NothingClass) return dealiased - if (defn.isTypelevel_S(sym) && args.length == 1) - args.head.safeDealias match { - case ConstantType(Constant(n: Int)) => return ConstantType(Constant(n + 1)) - case none => - } - AppliedType(self, args) + case dealiased: TypeRef if dealiased.symbol == defn.NothingClass => + dealiased case dealiased => AppliedType(self, args) } diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index d6cb9f528b2e..23ddd3b31afa 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -80,35 +80,41 @@ trait TypeOps { this: Context => // TODO: Make standalone object. pre.isStable || !ctx.phase.isTyper /** Implementation of Types#simplified */ - final def simplify(tp: Type, theMap: SimplifyMap): Type = tp match { - case tp: NamedType => - if (tp.symbol.isStatic || (tp.prefix `eq` NoPrefix)) tp - else tp.derivedSelect(simplify(tp.prefix, theMap)) match { - case tp1: NamedType if tp1.denotationIsCurrent => - val tp2 = tp1.reduceProjection - //if (tp2 ne tp1) println(i"simplified $tp1 -> $tp2") - tp2 - case tp1 => tp1 - } - case tp: TypeParamRef => - if (tp.paramName.is(DepParamName)) { - val bounds = ctx.typeComparer.bounds(tp) - if (bounds.lo.isRef(defn.NothingClass)) bounds.hi else bounds.lo - } - else { - val tvar = typerState.constraint.typeVarOfParam(tp) - if (tvar.exists) tvar else tp - } - case _: ThisType | _: BoundType => - tp - case tp: AliasingBounds => - tp.derivedAlias(simplify(tp.alias, theMap)) - case AndType(l, r) if !ctx.mode.is(Mode.Type) => - simplify(l, theMap) & simplify(r, theMap) - case OrType(l, r) if !ctx.mode.is(Mode.Type) => - simplify(l, theMap) | simplify(r, theMap) - case _ => - (if (theMap != null) theMap else new SimplifyMap).mapOver(tp) + final def simplify(tp: Type, theMap: SimplifyMap): Type = { + def mapOver = (if (theMap != null) theMap else new SimplifyMap).mapOver(tp) + tp match { + case tp: NamedType => + if (tp.symbol.isStatic || (tp.prefix `eq` NoPrefix)) tp + else tp.derivedSelect(simplify(tp.prefix, theMap)) match { + case tp1: NamedType if tp1.denotationIsCurrent => + val tp2 = tp1.reduceProjection + //if (tp2 ne tp1) println(i"simplified $tp1 -> $tp2") + tp2 + case tp1 => tp1 + } + case tp: TypeParamRef => + if (tp.paramName.is(DepParamName)) { + val bounds = ctx.typeComparer.bounds(tp) + if (bounds.lo.isRef(defn.NothingClass)) bounds.hi else bounds.lo + } + else { + val tvar = typerState.constraint.typeVarOfParam(tp) + if (tvar.exists) tvar else tp + } + case _: ThisType | _: BoundType => + tp + case tp: AliasingBounds => + tp.derivedAlias(simplify(tp.alias, theMap)) + case AndType(l, r) if !ctx.mode.is(Mode.Type) => + simplify(l, theMap) & simplify(r, theMap) + case OrType(l, r) if !ctx.mode.is(Mode.Type) => + simplify(l, theMap) | simplify(r, theMap) + case _: AppliedType | _: MatchType => + val normed = tp.tryNormalize + if (normed.exists) normed else mapOver + case _ => + mapOver + } } class SimplifyMap extends TypeMap { diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 67918f6fc9db..e390635233c1 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -34,6 +34,7 @@ import annotation.tailrec import language.implicitConversions import scala.util.hashing.{ MurmurHash3 => hashing } import config.Printers.{core, typr} +import reporting.trace import java.lang.ref.WeakReference import scala.annotation.internal.sharable @@ -1074,6 +1075,20 @@ object Types { /** Like `dealiasKeepAnnots`, but keeps only refining annotations */ final def dealiasKeepRefiningAnnots(implicit ctx: Context): Type = dealias1(keepIfRefining) + /** The result of normalization using `tryNormalize`, or the type itself if + * tryNormlize yields NoType + */ + final def normalized(implicit ctx: Context) = { + val normed = tryNormalize + if (normed.exists) normed else this + } + + /** If this type can be normalized at the top-level by rewriting match types + * of S[n] types, the result after applying all toplevel normalizations, + * otherwise NoType + */ + def tryNormalize(implicit ctx: Context): Type = NoType + private def widenDealias1(keep: AnnotatedType => Context => Boolean)(implicit ctx: Context): Type = { val res = this.widen.dealias1(keep) if (res eq this) res else res.widenDealias1(keep) @@ -3264,6 +3279,29 @@ object Types { cachedSuper } + override def tryNormalize(implicit ctx: Context): Type = tycon match { + case tycon: TypeRef => + def tryMatchAlias = tycon.info match { + case MatchAlias(alias) => + trace("normalize $this", show = true) { + alias.applyIfParameterized(args).tryNormalize + } + case _ => + NoType + } + if (defn.isTypelevel_S(tycon.symbol) && args.length == 1) { + trace("normalize S $this", show = true) { + args.head.normalized match { + case ConstantType(Constant(n: Int)) => ConstantType(Constant(n + 1)) + case none => tryMatchAlias + } + } + } + else tryMatchAlias + case _ => + NoType + } + def lowerBound(implicit ctx: Context) = tycon.stripTypeVar match { case tycon: TypeRef => tycon.info match { @@ -3574,6 +3612,8 @@ object Types { private[this] var myReduced: Type = null private[this] var reductionContext: mutable.Map[Type, TypeBounds] = null + override def tryNormalize(implicit ctx: Context): Type = reduced.normalized + def reduced(implicit ctx: Context): Type = { val trackingCtx = ctx.fresh.setTypeComparerFn(new TrackingTypeComparer(_)) val cmp = trackingCtx.typeComparer.asInstanceOf[TrackingTypeComparer] @@ -3617,7 +3657,8 @@ object Types { if (!Config.cacheMatchReduced || myReduced == null || !upToDate) { record("MatchType.reduce computed") if (myReduced != null) record("MatchType.reduce cache miss") - myReduced = recur(cases)(trackingCtx) + myReduced = + trace(i"reduce match type $this", show = true) { recur(cases)(trackingCtx) } updateReductionContext() } myReduced @@ -3635,12 +3676,8 @@ object Types { class CachedMatchType(bound: Type, scrutinee: Type, cases: List[Type]) extends MatchType(bound, scrutinee, cases) object MatchType { - def apply(bound: Type, scrutinee: Type, cases: List[Type])(implicit ctx: Context) = { + def apply(bound: Type, scrutinee: Type, cases: List[Type])(implicit ctx: Context) = unique(new CachedMatchType(bound, scrutinee, cases)) - // TODO: maybe we should try to reduce match types immediately, but this risks creating illegal - // cycles. So we can do this only if we can prove that the redux is in some sense simpler than - // the original type. - } } // ------ ClassInfo, Type Bounds -------------------------------------------------- diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index 005c95099377..778062c31a6c 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -161,6 +161,7 @@ class CompilationTests extends ParallelTesting { compileFile("tests/neg-custom-args/i1754.scala", allowDeepSubtypes) + compileFilesInDir("tests/neg-custom-args/isInstanceOf", allowDeepSubtypes and "-Xfatal-warnings") + compileFile("tests/neg-custom-args/i3627.scala", allowDeepSubtypes) + + compileFile("tests/neg-custom-args/matchtype-loop.scala", allowDeepSubtypes) + compileFile("tests/neg-custom-args/completeFromSource/nested/Test1.scala", defaultOptions.and("-sourcepath", "tests/neg-custom-args", "-scansource")) }.checkExpectedErrors() diff --git a/tests/neg/matchtype-loop.scala b/tests/neg-custom-args/matchtype-loop.scala similarity index 87% rename from tests/neg/matchtype-loop.scala rename to tests/neg-custom-args/matchtype-loop.scala index 2b91fea86ba0..4a7c1ab8d988 100644 --- a/tests/neg/matchtype-loop.scala +++ b/tests/neg-custom-args/matchtype-loop.scala @@ -8,7 +8,7 @@ object Test { def a: L[Boolean] = ??? def b: L[Int] = ??? def g[X]: L[X] = ??? - def g[X]: L[X] = ??? // error: found: L[Int], required: Int + val x: Int = g[Int] // error: found: L[Int], required: Int def aa: LL[Boolean] = ??? def bb: LL[Int] = ??? // error: recursion limit exceeded with subtype LazyRef(Test.LL[Int]) <:< Int diff --git a/tests/pos-deep-subtype/tuples23.scala b/tests/pos-deep-subtype/tuples23.scala new file mode 100644 index 000000000000..e442842e834b --- /dev/null +++ b/tests/pos-deep-subtype/tuples23.scala @@ -0,0 +1,27 @@ +object Test extends App { + val x23 = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23) + type T23 = (Int, Int, Int, Int, Int, + Int, Int, Int, Int, Int, + Int, Int, Int, Int, Int, + Int, Int, Int, Int, Int, + Int, Int, Int) + val x23c: T23 = x23 + println(x23) + assert(x23(0) == 1) + assert(x23(22) == 23) + + x23 match { + case (x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22, x23) => + println(x1 + x2 + x3 + x4 + x5 + x6 + x7 + x8 + x9 + x10 + x11 + x12 + x13 + x14 + x15 + x16 + x17 + x18 + x19 + x20 + x21 + x22 + x23) + } + rewrite def decompose3 = rewrite x23 match { case x *: y *: xs => (x, y, xs) } + + { val (x, y, xs) = decompose3 + val xc: Int = x + val yc: Int = y + val xsc: Unit = xs + println(s"$x23 -> $x, $y, $xs") + } + + val x23s: 23 = x23.size +} \ No newline at end of file diff --git a/tests/run/tuples1.check b/tests/run/tuples1.check index db564031fb7e..5fea28566dba 100644 --- a/tests/run/tuples1.check +++ b/tests/run/tuples1.check @@ -32,8 +32,5 @@ c2_1 = (A,1,1) c2_2 = (A,1,A,1) c2_3 = (A,1,2,A,1) c3_3 = (2,A,1,2,A,1) -(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23) -276 (A,1) -> A, (1) (A,1) -> A, 1, () -(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23) -> 1, 2, (3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23) diff --git a/tests/run/tuples1.scala b/tests/run/tuples1.scala index 211f7df0c747..dd1ce0c2d554 100644 --- a/tests/run/tuples1.scala +++ b/tests/run/tuples1.scala @@ -34,24 +34,8 @@ object Test extends App { val c2_3 = x2 ++ x3; val c2_3c: (String, Int, Int, String, Int) = c2_3; println(s"c2_3 = $c2_3") val c3_3 = x3 ++ x3; val c3_3c: (Int, String, Int, Int, String, Int) = c3_3; println(s"c3_3 = $c3_3") - val x23 = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23) - type T23 = (Int, Int, Int, Int, Int, - Int, Int, Int, Int, Int, - Int, Int, Int, Int, Int, - Int, Int, Int, Int, Int, - Int, Int, Int) - val x23c: T23 = x23 - println(x23) - assert(x23(0) == 1) - assert(x23(22) == 23) - - x23 match { - case (x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22, x23) => - println(x1 + x2 + x3 + x4 + x5 + x6 + x7 + x8 + x9 + x10 + x11 + x12 + x13 + x14 + x15 + x16 + x17 + x18 + x19 + x20 + x21 + x22 + x23) - } rewrite def decompose1 = rewrite x2 match { case x *: xs => (x, xs) } rewrite def decompose2 = rewrite x2 match { case x *: y *: xs => (x, y, xs) } - rewrite def decompose3 = rewrite x23 match { case x *: y *: xs => (x, y, xs) } { val (x, xs) = decompose1 val xc: String = x @@ -66,16 +50,8 @@ object Test extends App { println(s"$x2 -> $x, $y, $xs") } - { val (x, y, xs) = decompose3 - val xc: Int = x - val yc: Int = y - val xsc: Unit = xs - println(s"$x23 -> $x, $y, $xs") - } - val x3s: 3 = x3.size val us: 0 = ().size - val x23s: 23 = x23.size // dynamic operations From 16d1856db6f1a42432095f1071964efe2672a57e Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 12 Sep 2018 22:22:49 +0200 Subject: [PATCH 46/49] Propagate bound into nested match types --- compiler/src/dotty/tools/dotc/typer/Typer.scala | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 234624edea09..1ec4b28eb8c4 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -1317,7 +1317,9 @@ class Typer extends Namer } def typedMatchTypeTree(tree: untpd.MatchTypeTree, pt: Type)(implicit ctx: Context): Tree = { - val bound1 = typed(tree.bound) + val bound1 = + if (tree.bound.isEmpty && isFullyDefined(pt, ForceDegree.none)) TypeTree(pt) + else typed(tree.bound) val sel1 = typed(tree.selector) val pt1 = if (bound1.isEmpty) pt else bound1.tpe val cases1 = tree.cases.mapconserve(typedTypeCase(_, sel1.tpe, pt1)) From 536c350bccb987665706a88061f7f7df543ed3cf Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 12 Sep 2018 23:00:49 +0200 Subject: [PATCH 47/49] Match cases in parallel ... as mandated by new spec. This required some changes to Tuple.scala to avoid blow-up of repeated matches. Also, scrutinees of bottom types are now specialized to always yield NoType. This makes sense since corresponding match expressions would not match anything, either. It also prevents deep subtype recursions with Nothing as scrutinee. --- .../src/dotty/tools/dotc/core/Types.scala | 48 +++++++------------ library/src-scala3/scala/Tuple.scala | 29 ++++++----- tests/pos-deep-subtype/tuples2.scala | 18 +++---- 3 files changed, 40 insertions(+), 55 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index e390635233c1..aef9a70dd2c4 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -3283,14 +3283,14 @@ object Types { case tycon: TypeRef => def tryMatchAlias = tycon.info match { case MatchAlias(alias) => - trace("normalize $this", show = true) { + trace("normalize $this", typr, show = true) { alias.applyIfParameterized(args).tryNormalize } case _ => NoType } if (defn.isTypelevel_S(tycon.symbol) && args.length == 1) { - trace("normalize S $this", show = true) { + trace("normalize S $this", typr, show = true) { args.head.normalized match { case ConstantType(Constant(n: Int)) => ConstantType(Constant(n + 1)) case none => tryMatchAlias @@ -3590,25 +3590,6 @@ object Types { def alternatives(implicit ctx: Context): List[Type] = cases.map(caseType) def underlying(implicit ctx: Context): Type = bound - private def wildApproxMap(implicit ctx: Context) = new TypeMap { - def apply(t: Type) = t match { - case t: TypeRef => - t.info match { - case TypeBounds(lo, hi) if lo `ne` hi => WildcardType - case _ => mapOver(t) - } - case t: ParamRef => WildcardType - case _ => mapOver(t) - } - } - - private[this] var myApproxScrut: Type = null - - def approximatedScrutinee(implicit ctx: Context): Type = { - if (myApproxScrut == null) myApproxScrut = wildApproxMap.apply(scrutinee) - myApproxScrut - } - private[this] var myReduced: Type = null private[this] var reductionContext: mutable.Map[Type, TypeBounds] = null @@ -3618,15 +3599,6 @@ object Types { val trackingCtx = ctx.fresh.setTypeComparerFn(new TrackingTypeComparer(_)) val cmp = trackingCtx.typeComparer.asInstanceOf[TrackingTypeComparer] - def recur(cases: List[Type])(implicit ctx: Context): Type = cases match { - case Nil => NoType - case cas :: cases1 => - val r = cmp.matchCase(scrutinee, cas, instantiate = true) - if (r.exists) r - else if (cmp.matchCase(approximatedScrutinee, cas, instantiate = false).exists) NoType - else recur(cases1) - } - def isRelevant(tp: Type) = tp match { case tp: TypeParamRef => ctx.typerState.constraint.entry(tp).exists case tp: TypeRef => ctx.gadt.bounds.contains(tp.symbol) @@ -3658,7 +3630,21 @@ object Types { record("MatchType.reduce computed") if (myReduced != null) record("MatchType.reduce cache miss") myReduced = - trace(i"reduce match type $this", show = true) { recur(cases)(trackingCtx) } + trace(i"reduce match type $this", typr, show = true) { + if (defn.isBottomType(scrutinee)) defn.NothingType + else { + val applicableBranches = cases + .map(cmp.matchCase(scrutinee, _, instantiate = true)(trackingCtx)) + .filter(_.exists) + applicableBranches match { + case Nil => NoType + case applicableBranch :: Nil => applicableBranch + case _ => + record(i"MatchType.multi-branch") + ctx.typeComparer.glb(applicableBranches) + } + } + } updateReductionContext() } myReduced diff --git a/library/src-scala3/scala/Tuple.scala b/library/src-scala3/scala/Tuple.scala index 78e5b77607de..a6f0a5196055 100644 --- a/library/src-scala3/scala/Tuple.scala +++ b/library/src-scala3/scala/Tuple.scala @@ -119,9 +119,12 @@ object Tuple { case x1 *: xs1 => x1 *: Concat[xs1, Y] } - type Elem[+X <: Tuple, +N] = (X, N) match { - case (x *: xs, 0) => x - case (x *: xs, S[n1]) => Elem[xs, n1] + type Elem[+X <: Tuple, +N] = X match { + case x *: xs => + N match { + case 0 => x + case S[n1] => Elem[xs, n1] + } } type Size[+X] <: Int = X match { @@ -129,21 +132,17 @@ object Tuple { case x *: xs => S[Size[xs]] } - private type XXL = S[$MaxSpecialized.type] - - private type BoundedS[N <: Int] = N match { - case XXL => XXL - case _ => S[N] - } - - private[scala] type BoundedSize[X] <: Int = X match { + private[scala] type BoundedSizeRecur[X, L <: Int] <: Int = X match { case Unit => 0 - case x *: xs => BoundedSize[xs] match { - case XXL => XXL - case _ => S[BoundedSize[xs]] - } + case x *: xs => + L match { + case 0 => 0 + case S[n] => S[BoundedSizeRecur[xs, n]] + } } + private[scala] type BoundedSize[X] = BoundedSizeRecur[X, 23] + val $emptyArray = Array[Object]() def $toArray(xs: Tuple, n: Int) = { diff --git a/tests/pos-deep-subtype/tuples2.scala b/tests/pos-deep-subtype/tuples2.scala index 83921d48ab15..6d4e78edf1f5 100644 --- a/tests/pos-deep-subtype/tuples2.scala +++ b/tests/pos-deep-subtype/tuples2.scala @@ -1,40 +1,40 @@ object Test extends App { val xs0 = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16) assert(xs0(15) == 16) - // 2.733s + // 2.787s val xs1 = xs0 ++ xs0 assert(xs1(31) == 16) - // 3.089s + // 3.354s val xs2 = xs1 ++ xs1 assert(xs2(63) == 16) - // 3.329s + // 3.523s val xs3 = xs2 ++ xs2 assert(xs3(127) == 16) - // 3.416s + // 3.722s /* The following operations exhaust the standard stack, but succeed with -Xs10m: val xs4 = xs3 ++ xs3 assert(xs4(255) == 16) - // 3.765s + // 4.023s val xs5a = xs3 ++ xs4 assert(xs5a(383) == 16) - // 3.804s + // 4.243s val xs5 = xs4 ++ xs4 assert(xs5(511) == 16) - // 3.866s + // 4.416s val xs6 = xs5 ++ xs5 assert(xs6(1023) == 16) - // 4.115s + // 4.900s val xs7 = xs6 ++ xs6 assert(xs7(2047) == 16) - // 4.846s + // 5.538s */ } From 5a8ac63448e1dd11c8a8f618473195b8ea42ea9a Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 13 Sep 2018 13:17:50 +0200 Subject: [PATCH 48/49] Update rules on match term/type checking Unfortunately, things are not as easy as first hoped for... --- docs/docs/reference/match-types.md | 41 ++++++++++++++++++++---------- tests/pending/pos/matchterm.scala | 12 +++++++++ 2 files changed, 40 insertions(+), 13 deletions(-) create mode 100644 tests/pending/pos/matchterm.scala diff --git a/docs/docs/reference/match-types.md b/docs/docs/reference/match-types.md index 19814cd8a2be..23233ebee9f1 100644 --- a/docs/docs/reference/match-types.md +++ b/docs/docs/reference/match-types.md @@ -129,41 +129,56 @@ Within a match type `Match(S, Cs) <: B`, all occurrences of type variables count ## Typing Rules for Match Expressions -Typing rules for match expressions have to account for the difference between sequential match on the term level and parallel match on the type level. As a running example consider: +Typing rules for match expressions are tricky. First, they need some new form of GADT matching for value parameters. +Second, they have to account for the difference between sequential match on the term level and parallel match on the type level. As a running example consider: ```scala - type M[X] = X match { + type M[+X] = X match { case A => 1 case B => 2 } +``` +We'd like to be able to typecheck +```scala def m[X](x: X): M[X] = x match { + case _: A => 1 // type error + case _: B => 2 // type error + } +``` +Unfortunately, this goes nowhere. Let's try the first case. We have: `x.type <: A` and `x.type <: X`. This tells +us nothing useful about `X`, so we cannot reduce `M` in order to show that the right hand side of the case is valid. + +The following variant is more promising: +```scala + def m(x: Any): M[x.type] = x match { case _: A => 1 case _: B => 2 } ``` -As a first approximation, the typing rules for match expressions are as usual. E.g. to typecheck the first case `case _: A => 1` of the definition of `m` above, GADT matching will produce the constraint `X <: A`. Therefore, `M[X]` reduces to the singleton type `1`. -The right hand side `1` of the case conforms to this type, so the case typechecks. Typechecking the second case proceeds similarly. +To make this work, we'd need a new form of GADT checking: If the scrutinee is a term variable `s`, we can make use of +the fact that `s.type` must conform to the pattern's type and derive a GADT constraint from that. For the first case above, +this would be the constraint `x.type <: A`. The new aspect here is that we need GADT constraints over singleton types where +before we just had constraints over type parameters. + +Assuming this extension, we can then try to typecheck as usual. E.g. to typecheck the first case `case _: A => 1` of the definition of `m` above, GADT matching will produce the constraint `x.type <: A`. Therefore, `M[x.type]` reduces to the singleton type `1`. The right hand side `1` of the case conforms to this type, so the case typechecks. Typechecking the second case proceeds similarly. -However, it turns out that these rules are not enough for type soundness. To see this, assume that `A` and `B` are traits that are both extended by a common class `C`. In this case, `M[C]` reduces to `1 & 2`, but `m(new C)` reduces to `1`. So the type of the application `m(new C)` does not match the reduced result type of `m`, which means soundness is violated. +However, it turns out that these rules are not enough for type soundness. To see this, assume that `A` and `B` are traits that are both extended by a common class `C`. In this case, and assuming `c: C`, `M[c.type]` reduces to `1 & 2`, but `m(c)` reduces to `1`. So the type of the application `m(c)` does not match the reduced result type of `m`, which means soundness is violated. -To plug the soundness hole, we have to tighten the typing rules for match expressions. In the example above we need to also consider the case where the scrutinee type `X` is a subtype of `A` and `B`. In this case, the match expression still returns `1` but the match type `M[X]` reduces to `1 & 2`, which means there should be a type error. However, this second check can be omitted if `A` and `B` are types that don't overlap. We can omit the check because in that case there is no scrutinee value `x` that could reduce to `1`, so no discrepancy can arise at runtime. +To plug the soundness hole, we have to tighten the typing rules for match expressions. In the example above we need to also consider the case where the scrutinee `x` conforms to `A` and `B`. In this case, the match expression still returns `1` but the match type `M[x.type]` reduces to `1 & 2`, which means there should be a type error. However, this second check can be omitted if `A` and `B` are types that don't overlap. We can omit the check because in that case there is no scrutinee value `x` that could reduce to `1`, so no discrepancy can arise at runtime. More generally, we proceeed as follows: When typechecking the `i`th case of a match expression ``` - t match { case P_1 => t_1 ... case P_n => t_n + x match { case P_1 => t_1 ... case P_n => t_n ``` -where `t` has type `T` and `t_i` has type `T_i` -against an expected match type `R`: +where `t_i` has type `T_i` against an expected match type `R`: 1. Determine all maximal sequences of patterns `P_j_1, ..., P_j_m` that follow `P_i` in the match expression and that do overlap with `P_i`. That is, `P_i, P_j_1, ..., P_j_m` all match at least one common value. - 2. For each such sequence, verify that `T_i <: R` under the GADT constraint arising from matching the scrutinee type `T` against all of the patterns `P_i, P_j_1, ..., P_j_m`. + 2. For each such sequence, verify that `T_i <: R` under the GADT constraint arising from matching the scrutinee `x` against all of the patterns `P_i, P_j_1, ..., P_j_m`. -In the example above, `A` and `B` would be overlapping because they have the common subclass `C`. Hence, we have to check that the right-hand side `1` is a subtype of `M[X]` -under the assumptions that `X <: A` and `X <: B`. Under these assumptions `M[X]` reduces -to `1 & 2`, which gives a type error. +In the example above, `A` and `B` would be overlapping because they have the common subclass `C`. Hence, we have to check that the right-hand side `1` is a subtype of `M[x.type]` under the assumptions that `x.type <: A` and `x.type <: B`. Under these assumptions `M[x.type]` reduces to `1 & 2`, which gives a type error. For simplicity, we have disregarded the `null` value in this discussion. `null` does not cause a fundamental problem but complicates things somewhat because some forms of patterns do not match `null`. diff --git a/tests/pending/pos/matchterm.scala b/tests/pending/pos/matchterm.scala new file mode 100644 index 000000000000..cb84e3efedfb --- /dev/null +++ b/tests/pending/pos/matchterm.scala @@ -0,0 +1,12 @@ +case class A() +case class B() +object Test { + type T[X] = X match { + case A => Int + case B => String + } + def f(x: Any): T[x.type] = x match { + case A() => 1 + case B() => "" + } +} \ No newline at end of file From 7b548dbd0b6734f2c92ace2f55a33ccdeefb1ffd Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Thu, 13 Sep 2018 17:23:28 +0200 Subject: [PATCH 49/49] Go back to reducing match types sequentially However, need non-overlapping patterns in order to discard a pattern. --- .../src/dotty/tools/dotc/core/Types.scala | 47 ++++++++++++++----- docs/docs/reference/match-types.md | 34 ++++---------- tests/neg-custom-args/matchtype-loop.scala | 4 +- 3 files changed, 45 insertions(+), 40 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index aef9a70dd2c4..e5c0c24daa5c 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -3595,10 +3595,38 @@ object Types { override def tryNormalize(implicit ctx: Context): Type = reduced.normalized + /** Switch to choose parallel or sequential reduction */ + private final val reduceInParallel = false + + final def cantPossiblyMatch(cas: Type)(implicit ctx: Context) = + true // should be refined if we allow overlapping cases + def reduced(implicit ctx: Context): Type = { val trackingCtx = ctx.fresh.setTypeComparerFn(new TrackingTypeComparer(_)) val cmp = trackingCtx.typeComparer.asInstanceOf[TrackingTypeComparer] + def reduceSequential(cases: List[Type])(implicit ctx: Context): Type = cases match { + case Nil => NoType + case cas :: cases1 => + val r = cmp.matchCase(scrutinee, cas, instantiate = true) + if (r.exists) r + else if (cantPossiblyMatch(cas)) reduceSequential(cases1) + else NoType + } + + def reduceParallel(implicit ctx: Context) = { + val applicableBranches = cases + .map(cmp.matchCase(scrutinee, _, instantiate = true)(trackingCtx)) + .filter(_.exists) + applicableBranches match { + case Nil => NoType + case applicableBranch :: Nil => applicableBranch + case _ => + record(i"MatchType.multi-branch") + ctx.typeComparer.glb(applicableBranches) + } + } + def isRelevant(tp: Type) = tp match { case tp: TypeParamRef => ctx.typerState.constraint.entry(tp).exists case tp: TypeRef => ctx.gadt.bounds.contains(tp.symbol) @@ -3631,18 +3659,13 @@ object Types { if (myReduced != null) record("MatchType.reduce cache miss") myReduced = trace(i"reduce match type $this", typr, show = true) { - if (defn.isBottomType(scrutinee)) defn.NothingType - else { - val applicableBranches = cases - .map(cmp.matchCase(scrutinee, _, instantiate = true)(trackingCtx)) - .filter(_.exists) - applicableBranches match { - case Nil => NoType - case applicableBranch :: Nil => applicableBranch - case _ => - record(i"MatchType.multi-branch") - ctx.typeComparer.glb(applicableBranches) - } + try + if (defn.isBottomType(scrutinee)) defn.NothingType + else if (reduceInParallel) reduceParallel(trackingCtx) + else reduceSequential(cases)(trackingCtx) + catch { + case ex: Throwable => + handleRecursive("reduce type ", i"$scrutinee match ...", ex) } } updateReductionContext() diff --git a/docs/docs/reference/match-types.md b/docs/docs/reference/match-types.md index 23233ebee9f1..03fa5b4b677e 100644 --- a/docs/docs/reference/match-types.md +++ b/docs/docs/reference/match-types.md @@ -84,15 +84,12 @@ Using `can-reduce`, we can now define match type reduction proper in the `reduce ``` Match(S, C1, ..., Cn) reduces-to T ``` -if `Ci_1, ..., Ci_k` is a maximal non-empty subset of `C1, ..., Cn` such that for each `i_j`: +if ``` - Match(S, C1, ..., Cn) can-reduce i_j, Ti_j + Match(S, C1, ..., Cn) can-reduce i, T ``` -and -``` - T = Ti_1 & ... & Ti_k -``` -In other words, a match reduces to the intersection of all right hand sides it can reduce to. This "parallel" notion of reduction was picked for its nice algebraic properties, even though it does not correspond directly to the operational semantics of pattern matching on terms, where the first matching case is chosen. +and, for `j` in `1..i-1`: `C_j` is disjoint from `C_i`, or else `S` cannot possibly match `C_j`. +See the section on overlapping patterns for an elaboration of "disjoint" and "cannot possibly match". ## Subtyping Rules for Match Types @@ -159,26 +156,11 @@ the fact that `s.type` must conform to the pattern's type and derive a GADT cons this would be the constraint `x.type <: A`. The new aspect here is that we need GADT constraints over singleton types where before we just had constraints over type parameters. -Assuming this extension, we can then try to typecheck as usual. E.g. to typecheck the first case `case _: A => 1` of the definition of `m` above, GADT matching will produce the constraint `x.type <: A`. Therefore, `M[x.type]` reduces to the singleton type `1`. The right hand side `1` of the case conforms to this type, so the case typechecks. Typechecking the second case proceeds similarly. - -However, it turns out that these rules are not enough for type soundness. To see this, assume that `A` and `B` are traits that are both extended by a common class `C`. In this case, and assuming `c: C`, `M[c.type]` reduces to `1 & 2`, but `m(c)` reduces to `1`. So the type of the application `m(c)` does not match the reduced result type of `m`, which means soundness is violated. - -To plug the soundness hole, we have to tighten the typing rules for match expressions. In the example above we need to also consider the case where the scrutinee `x` conforms to `A` and `B`. In this case, the match expression still returns `1` but the match type `M[x.type]` reduces to `1 & 2`, which means there should be a type error. However, this second check can be omitted if `A` and `B` are types that don't overlap. We can omit the check because in that case there is no scrutinee value `x` that could reduce to `1`, so no discrepancy can arise at runtime. - -More generally, we proceeed as follows: - -When typechecking the `i`th case of a match expression -``` - x match { case P_1 => t_1 ... case P_n => t_n -``` -where `t_i` has type `T_i` against an expected match type `R`: - - 1. Determine all maximal sequences of - patterns `P_j_1, ..., P_j_m` that follow `P_i` in the match expression and that do overlap with `P_i`. That is, `P_i, P_j_1, ..., P_j_m` all match at least one common value. - - 2. For each such sequence, verify that `T_i <: R` under the GADT constraint arising from matching the scrutinee `x` against all of the patterns `P_i, P_j_1, ..., P_j_m`. +Assuming this extension, we can then try to typecheck as usual. E.g. to typecheck the first case `case _: A => 1` of the definition of `m` above, GADT matching will produce the constraint `x.type <: A`. Therefore, `M[x.type]` reduces to the singleton type `1`. The right hand side `1` of the case conforms to this type, so the case typechecks. -In the example above, `A` and `B` would be overlapping because they have the common subclass `C`. Hence, we have to check that the right-hand side `1` is a subtype of `M[x.type]` under the assumptions that `x.type <: A` and `x.type <: B`. Under these assumptions `M[x.type]` reduces to `1 & 2`, which gives a type error. +Typechecking the second case hits a snag, though. In general, the assumption `x.type <: B` is not enough to prove that +`M[x.type]` reduces to `2`. However we can reduce `M[x.type]` to `2` if the types `A` and `B` do not overlap. +So correspondence of match terms to match types is feasible only in the case of non-overlapping patterns. For simplicity, we have disregarded the `null` value in this discussion. `null` does not cause a fundamental problem but complicates things somewhat because some forms of patterns do not match `null`. diff --git a/tests/neg-custom-args/matchtype-loop.scala b/tests/neg-custom-args/matchtype-loop.scala index 4a7c1ab8d988..316897b808a5 100644 --- a/tests/neg-custom-args/matchtype-loop.scala +++ b/tests/neg-custom-args/matchtype-loop.scala @@ -11,7 +11,7 @@ object Test { val x: Int = g[Int] // error: found: L[Int], required: Int def aa: LL[Boolean] = ??? - def bb: LL[Int] = ??? // error: recursion limit exceeded with subtype LazyRef(Test.LL[Int]) <:< Int + def bb: LL[Int] = ??? // error: recursion limit exceeded with reduce type LazyRef(Test.LL[Int]) match ... def gg[X]: LL[X] = ??? - val xx: Int = gg[Int] // error: recursion limit exceeded with subtype LazyRef(Test.LL[Int]) <:< Int + val xx: Int = gg[Int] // error: recursion limit exceeded with reduce type LazyRef(Test.LL[Int]) match ... }