diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 471a9953c4f0..515c994f25e7 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -652,6 +652,8 @@ object desugar { tdef, evidenceBuf, (tdef.mods.flags.toTermFlags & AccessFlags) | Lazy | DeferredGivenFlags, inventGivenName, Nil) + if tdef.mods.flags.is(Into, butNot = Opaque) then + report.error(ModifierNotAllowedForDefinition(Into), flagSourcePos(tdef, Into)) if evidenceBuf.isEmpty then result else Thicket(result :: evidenceBuf.toList) /** The expansion of a class definition. See inline comments for what is involved */ @@ -2268,11 +2270,8 @@ object desugar { assert(ctx.mode.isExpr || ctx.reporter.errorsReported || ctx.mode.is(Mode.Interactive), ctx.mode) Select(t, op.name) case PrefixOp(op, t) => - if op.name == tpnme.into then - Annotated(t, New(ref(defn.IntoAnnot.typeRef), Nil :: Nil)) - else - val nspace = if (ctx.mode.is(Mode.Type)) tpnme else nme - Select(t, nspace.UNARY_PREFIX ++ op.name) + val nspace = if (ctx.mode.is(Mode.Type)) tpnme else nme + Select(t, nspace.UNARY_PREFIX ++ op.name) case ForDo(enums, body) => makeFor(nme.foreach, nme.foreach, enums, body) orElse tree case ForYield(enums, body) => diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 45e17794ec96..a6a80ef8c323 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -9,6 +9,7 @@ import Annotations.Annotation import NameKinds.ContextBoundParamName import typer.ConstFold import reporting.trace +import util.SrcPos import Decorators.* import Constants.Constant @@ -522,6 +523,10 @@ trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] if id.span == result.span.startPos => Some(result) case _ => None end ImpureByNameTypeTree + + /** The position of the modifier associated with given flag in this definition. */ + def flagSourcePos(mdef: DefTree, flag: FlagSet): SrcPos = + mdef.mods.mods.find(_.flags == flag).getOrElse(mdef).srcPos } trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index 145c61584fcc..1afc188bf669 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -236,6 +236,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case class Tracked()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Tracked) + case class Into()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Into) + /** Used under pureFunctions to mark impure function types `A => B` in `FunctionWithMods` */ case class Impure()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Impure) } @@ -573,12 +575,6 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { ValDef(nme.syntheticParamName(n), if (tpt == null) TypeTree() else tpt, EmptyTree) .withFlags(flags) - def isInto(t: Tree)(using Context): Boolean = t match - case PrefixOp(Ident(tpnme.into), _) => true - case Function(_, res) => isInto(res) - case Parens(t) => isInto(t) - case _ => false - def lambdaAbstract(params: List[ValDef] | List[TypeDef], tpt: Tree)(using Context): Tree = params match case Nil => tpt diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 4db4d868fd86..02b7a1584f42 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -104,10 +104,12 @@ extension (tp: Type) final def isTrackableRef(using Context): Boolean = tp match case _: (ThisType | TermParamRef) => true case tp: TermRef => - ((tp.prefix eq NoPrefix) - || tp.symbol.isField && !tp.symbol.isStatic && tp.prefix.isTrackableRef - || tp.isCap - ) && !tp.symbol.isOneOf(UnstableValueFlags) + !tp.underlying.exists // might happen during construction of lambdas with annotations on parameters + || + ((tp.prefix eq NoPrefix) + || tp.symbol.isField && !tp.symbol.isStatic && tp.prefix.isTrackableRef + || tp.isCap + ) && !tp.symbol.isOneOf(UnstableValueFlags) case tp: TypeRef => tp.symbol.isType && tp.derivesFrom(defn.Caps_CapSet) case tp: TypeParamRef => @@ -637,19 +639,6 @@ class CleanupRetains(using Context) extends TypeMap: RetainingType(tp, Nil, byName = annot.symbol == defn.RetainsByNameAnnot) case _ => mapOver(tp) -/** A typemap that follows aliases and keeps their transformed results if - * there is a change. - */ -trait FollowAliasesMap(using Context) extends TypeMap: - var follow = true // Used for debugging so that we can compare results with and w/o following. - def mapFollowingAliases(t: Type): Type = - val t1 = t.dealiasKeepAnnots - if follow && (t1 ne t) then - val t2 = apply(t1) - if t2 ne t1 then t2 - else t - else mapOver(t) - /** An extractor for `caps.reachCapability(ref)`, which is used to express a reach * capability as a tree in a @retains annotation. */ diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 14f2491214e2..641d8977de22 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -753,6 +753,9 @@ class Definitions { @tu lazy val StringBuilderClass: ClassSymbol = requiredClass("scala.collection.mutable.StringBuilder") @tu lazy val MatchErrorClass : ClassSymbol = requiredClass("scala.MatchError") @tu lazy val ConversionClass : ClassSymbol = requiredClass("scala.Conversion").typeRef.symbol.asClass + @tu lazy val ConversionModule : Symbol = ConversionClass.companionModule + @tu lazy val ConversionModuleClass: ClassSymbol = ConversionModule.moduleClass.asClass + @tu lazy val Conversion_into : Symbol = ConversionModuleClass.requiredType("into") @tu lazy val StringAddClass : ClassSymbol = requiredClass("scala.runtime.StringAdd") @tu lazy val StringAdd_+ : Symbol = StringAddClass.requiredMethod(nme.raw.PLUS) @@ -1039,8 +1042,6 @@ class Definitions { @tu lazy val ImplicitAmbiguousAnnot: ClassSymbol = requiredClass("scala.annotation.implicitAmbiguous") @tu lazy val ImplicitNotFoundAnnot: ClassSymbol = requiredClass("scala.annotation.implicitNotFound") @tu lazy val InlineParamAnnot: ClassSymbol = requiredClass("scala.annotation.internal.InlineParam") - @tu lazy val IntoAnnot: ClassSymbol = requiredClass("scala.annotation.into") - @tu lazy val IntoParamAnnot: ClassSymbol = requiredClass("scala.annotation.internal.$into") @tu lazy val ErasedParamAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ErasedParam") @tu lazy val MainAnnot: ClassSymbol = requiredClass("scala.main") @tu lazy val MappedAlternativeAnnot: ClassSymbol = requiredClass("scala.annotation.internal.MappedAlternative") @@ -1058,6 +1059,7 @@ class Definitions { // @tu lazy val ScalaStrictFPAnnot: ClassSymbol = requiredClass("scala.annotation.strictfp") @tu lazy val ScalaStaticAnnot: ClassSymbol = requiredClass("scala.annotation.static") @tu lazy val SerialVersionUIDAnnot: ClassSymbol = requiredClass("scala.SerialVersionUID") + @tu lazy val SilentIntoAnnot: ClassSymbol = requiredClass("scala.annotation.internal.$into") @tu lazy val TailrecAnnot: ClassSymbol = requiredClass("scala.annotation.tailrec") @tu lazy val ThreadUnsafeAnnot: ClassSymbol = requiredClass("scala.annotation.threadUnsafe") @tu lazy val ConstructorOnlyAnnot: ClassSymbol = requiredClass("scala.annotation.constructorOnly") @@ -1117,7 +1119,7 @@ class Definitions { // Set of annotations that are not printed in types except under -Yprint-debug @tu lazy val SilentAnnots: Set[Symbol] = - Set(InlineParamAnnot, ErasedParamAnnot, RefineOverrideAnnot) + Set(InlineParamAnnot, ErasedParamAnnot, RefineOverrideAnnot, SilentIntoAnnot) // A list of annotations that are commonly used to indicate that a field/method argument or return // type is not null. These annotations are used by the nullification logic in JavaNullInterop to @@ -1387,6 +1389,9 @@ class Definitions { final def isNamedTuple_From(sym: Symbol)(using Context): Boolean = sym.name == tpnme.From && sym.owner == NamedTupleModule.moduleClass + final def isInto(sym: Symbol)(using Context): Boolean = + sym.name == tpnme.into && sym.owner == ConversionModuleClass + private val compiletimePackageAnyTypes: Set[Name] = Set( tpnme.Equals, tpnme.NotEquals, tpnme.IsConst, tpnme.ToString ) diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index 57bf870c6b64..6adf899e9da0 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -252,7 +252,7 @@ object Flags { /** A field generated for a primary constructor parameter (no matter if it's a 'val' or not), * or an accessor of such a field. */ - val (_, ParamAccessor @ _, _) = newFlags(14, "") + val (ParamAccessorOrInto @ _, ParamAccessor @ _, Into @ _) = newFlags(14, "", "into") /** A value or class implementing a module */ val (Module @ _, ModuleVal @ _, ModuleClass @ _) = newFlags(15, "module") @@ -452,7 +452,7 @@ object Flags { commonFlags(Private, Protected, Final, Case, Implicit, Given, Override, JavaStatic, Transparent, Erased) val TypeSourceModifierFlags: FlagSet = - CommonSourceModifierFlags.toTypeFlags | Abstract | Sealed | Opaque | Open + CommonSourceModifierFlags.toTypeFlags | Abstract | Sealed | Opaque | Open | Into val TermSourceModifierFlags: FlagSet = CommonSourceModifierFlags.toTermFlags | Inline | AbsOverride | Lazy | Tracked @@ -467,7 +467,7 @@ object Flags { * TODO: Should check that FromStartFlags do not change in completion */ val FromStartFlags: FlagSet = commonFlags( - Module, Package, Deferred, Method, Case, Enum, Param, ParamAccessor, + Module, Package, Deferred, Method, Case, Enum, Param, ParamAccessorOrInto, Scala2SpecialFlags, MutableOrOpen, Opaque, Touched, JavaStatic, OuterOrCovariant, LabelOrContravariant, CaseAccessor, Tracked, Extension, NonMember, Implicit, Given, Permanent, Synthetic, Exported, diff --git a/compiler/src/dotty/tools/dotc/core/NamerOps.scala b/compiler/src/dotty/tools/dotc/core/NamerOps.scala index dbdb46aba334..3a92c91913f4 100644 --- a/compiler/src/dotty/tools/dotc/core/NamerOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NamerOps.scala @@ -317,4 +317,11 @@ object NamerOps: ann.tree match case ast.tpd.WitnessNamesAnnot(witnessNames) => addContextBoundCompanionFor(sym, witnessNames, Nil) + + /** if `sym` is a term parameter or parameter accessor, map all occurrences of + * `into[T]` in its type to `T @$into`. + */ + extension (tp: Type) + def suppressIntoIfParam(sym: Symbol)(using Context): Type = + if sym.isOneOf(TermParamOrAccessor) then TypeOps.suppressInto(tp) else tp end NamerOps diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index c33c795571e6..6bc587944dc5 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -132,7 +132,6 @@ object StdNames { val EXCEPTION_RESULT_PREFIX: N = "exceptionResult" val EXPAND_SEPARATOR: N = str.EXPAND_SEPARATOR val IMPORT: N = "" - val INTO: N = "$into" val MODULE_SUFFIX: N = str.MODULE_SUFFIX val OPS_PACKAGE: N = "" val OVERLOADED: N = "" diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index a1e26c20fdbb..406bb7babce5 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -18,6 +18,7 @@ import typer.ForceDegree import typer.Inferencing.* import typer.IfBottom import reporting.TestingReporter +import Annotations.Annotation import cc.{CapturingType, derivedCapturingType, CaptureSet, captureSet, isBoxed, isBoxedCapturing} import CaptureSet.{CompareResult, IdentityCaptRefMap, VarState} @@ -936,6 +937,28 @@ object TypeOps: class StripTypeVarsMap(using Context) extends TypeMap: def apply(tp: Type) = mapOver(tp).stripTypeVar + /** Map no-flip covariant occurrences of `into[T]` to `T @$into` */ + def suppressInto(using Context) = new FollowAliasesMap: + def apply(t: Type): Type = t match + case AppliedType(tycon: TypeRef, arg :: Nil) if variance >= 0 && defn.isInto(tycon.symbol) => + AnnotatedType(arg, Annotation(defn.SilentIntoAnnot, util.Spans.NoSpan)) + case _: MatchType | _: LazyRef => + t + case _ => + mapFollowingAliases(t) + + /** Map no-flip covariant occurrences of `T @$into` to `into[T]` */ + def revealInto(using Context) = new FollowAliasesMap: + def apply(t: Type): Type = t match + case AnnotatedType(t1, ann) if variance >= 0 && ann.symbol == defn.SilentIntoAnnot => + AppliedType( + defn.ConversionModule.termRef.select(defn.Conversion_into), // the external reference to the opaque type + t1 :: Nil) + case _: MatchType | _: LazyRef => + t + case _ => + mapFollowingAliases(t) + /** Apply [[Type.stripTypeVar]] recursively. */ def stripTypeVars(tp: Type)(using Context): Type = new StripTypeVarsMap().apply(tp) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index ebff52b002a1..7c4f03c56d7c 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -43,7 +43,6 @@ import CaptureSet.{CompareResult, IdentityCaptRefMap} import scala.annotation.internal.sharable import scala.annotation.threadUnsafe -import dotty.tools.dotc.cc.ccConfig object Types extends TypeUtils { @@ -446,11 +445,34 @@ object Types extends TypeUtils { def isRepeatedParam(using Context): Boolean = typeSymbol eq defn.RepeatedParamClass - /** Is this a parameter type that allows implicit argument converson? */ + /** Is this type of the form `compiletime.into[T]`, which means it can be the + * target of an implicit converson without requiring a language import? + */ def isInto(using Context): Boolean = this match - case AnnotatedType(_, annot) => annot.symbol == defn.IntoParamAnnot + case AppliedType(tycon: TypeRef, arg :: Nil) => defn.isInto(tycon.symbol) case _ => false + /** Is this type a legal target type for an implicit conversion, so that + * no `implicitConversions` language import is necessary? + */ + def isConversionTargetType(using Context): Boolean = + dealias(KeepOpaques).match + case tp: TypeRef => + (tp.symbol.isClass || tp.symbol.isOpaqueAlias) && tp.symbol.is(Into) + case tp @ AppliedType(tycon, _) => + isInto || tycon.isConversionTargetType + case tp: AndOrType => + tp.tp1.isConversionTargetType && tp.tp2.isConversionTargetType + case tp: TypeVar => + false + case tp: MatchType => + val tp1 = tp.reduced + (tp1 ne tp) && tp1.isConversionTargetType + case tp: RefinedType => + tp.parent.isConversionTargetType + case _ => + false + /** Is this the type of a method that has a repeated parameter type as * last parameter type? */ @@ -1470,48 +1492,48 @@ object Types extends TypeUtils { case Atoms.Unknown => Atoms.Unknown case _ => Atoms.Unknown - private def dealias1(keep: AnnotatedType => Context ?=> Boolean, keepOpaques: Boolean)(using Context): Type = this match { + def dealias(keeps: Keeps)(using Context): Type = this match case tp: TypeRef => - if (tp.symbol.isClass) tp - else tp.info match { - case TypeAlias(alias) if !(keepOpaques && tp.symbol.is(Opaque)) => - alias.dealias1(keep, keepOpaques) + if tp.symbol.isClass then tp + else tp.info match + case TypeAlias(alias) if (keeps & KeepOpaques) == 0 || !tp.symbol.is(Opaque) => + alias.dealias(keeps) case _ => tp - } case app @ AppliedType(tycon, _) => - val tycon1 = tycon.dealias1(keep, keepOpaques) - if (tycon1 ne tycon) app.superType.dealias1(keep, keepOpaques) + val tycon1 = tycon.dealias(keeps) + if tycon1 ne tycon then app.superType.dealias(keeps) else this case tp: TypeVar => val tp1 = tp.instanceOpt - if (tp1.exists) tp1.dealias1(keep, keepOpaques) else tp + if tp1.exists then tp1.dealias(keeps) else tp case tp: AnnotatedType => - val parent1 = tp.parent.dealias1(keep, keepOpaques) - if keep(tp) then tp.derivedAnnotatedType(parent1, tp.annot) + val parent1 = tp.parent.dealias(keeps) + if (keeps & KeepAnnots) != 0 + || (keeps & KeepRefiningAnnots) != 0 && tp.isRefining + then tp.derivedAnnotatedType(parent1, tp.annot) else tp match case tp @ CapturingType(parent, refs) => tp.derivedCapturingType(parent1, refs) case _ => parent1 case tp: LazyRef => - tp.ref.dealias1(keep, keepOpaques) + tp.ref.dealias(keeps) case _ => this - } /** Follow aliases and dereference LazyRefs, annotated types and instantiated * TypeVars until type is no longer alias type, annotated type, LazyRef, * or instantiated type variable. */ - final def dealias(using Context): Type = dealias1(keepNever, keepOpaques = false) + final def dealias(using Context): Type = dealias(KeepNothing) /** Follow aliases and dereference LazyRefs and instantiated TypeVars until type * is no longer alias type, LazyRef, or instantiated type variable. * Goes through annotated types and rewraps annotations on the result. */ - final def dealiasKeepAnnots(using Context): Type = dealias1(keepAlways, keepOpaques = false) + final def dealiasKeepAnnots(using Context): Type = dealias(KeepAnnots) /** Like `dealiasKeepAnnots`, but keeps only refining annotations */ - final def dealiasKeepRefiningAnnots(using Context): Type = dealias1(keepIfRefining, keepOpaques = false) + final def dealiasKeepRefiningAnnots(using Context): Type = dealias(KeepRefiningAnnots) /** Like dealias, but does not follow aliases if symbol is Opaque. This is * necessary if we want to look at the info of a symbol containing opaque @@ -1529,13 +1551,13 @@ object Types extends TypeUtils { * Here, we dealias symbol infos at the start of capture checking in operation `fluidify`. * We have to be careful not to accidentally reveal opaque aliases when doing so. */ - final def dealiasKeepOpaques(using Context): Type = dealias1(keepNever, keepOpaques = true) + final def dealiasKeepOpaques(using Context): Type = dealias(KeepOpaques) /** Like dealiasKeepAnnots, but does not follow opaque aliases. See `dealiasKeepOpaques` * for why this is sometimes necessary. */ final def dealiasKeepAnnotsAndOpaques(using Context): Type = - dealias1(keepAlways, keepOpaques = true) + dealias(KeepAnnots | KeepOpaques) /** Approximate this type with a type that does not contain skolem types. */ final def deskolemized(using Context): Type = @@ -1567,19 +1589,18 @@ object Types extends TypeUtils { case tp: AppliedType => tp.underlyingNormalizable case _ => NoType - private def widenDealias1(keep: AnnotatedType => Context ?=> Boolean)(using Context): Type = { - val res = this.widen.dealias1(keep, keepOpaques = false) - if (res eq this) res else res.widenDealias1(keep) - } + private def widenDealias(keeps: Keeps)(using Context): Type = + val tp1 = widen.dealias(keeps) + if tp1 eq this then this else tp1.widenDealias(keeps) /** Perform successive widenings and dealiasings until none can be applied anymore */ - final def widenDealias(using Context): Type = widenDealias1(keepNever) + final def widenDealias(using Context): Type = widenDealias(KeepNothing) /** Perform successive widenings and dealiasings while rewrapping annotations, until none can be applied anymore */ - final def widenDealiasKeepAnnots(using Context): Type = widenDealias1(keepAlways) + final def widenDealiasKeepAnnots(using Context): Type = widenDealias(KeepAnnots) /** Perform successive widenings and dealiasings while rewrapping refining annotations, until none can be applied anymore */ - final def widenDealiasKeepRefiningAnnots(using Context): Type = widenDealias1(keepIfRefining) + final def widenDealiasKeepRefiningAnnots(using Context): Type = widenDealias(KeepRefiningAnnots) /** Widen from constant type to its underlying non-constant * base type. @@ -1966,8 +1987,7 @@ object Types extends TypeUtils { } defn.FunctionNOf( mt.paramInfos.mapConserve: - _.translateFromRepeated(toArray = isJava) - .mapIntoAnnot(defn.IntoParamAnnot, null), + _.translateFromRepeated(toArray = isJava), result1, isContextual) if mt.hasErasedParams then defn.PolyFunctionOf(mt) @@ -2015,38 +2035,6 @@ object Types extends TypeUtils { case _ => this } - /** A mapping between mapping one kind of into annotation to another or - * dropping into annotations. - * @param from the into annotation to map - * @param to either the replacement annotation symbol, or `null` - * in which case the `from` annotations are dropped. - */ - def mapIntoAnnot(from: ClassSymbol, to: ClassSymbol | Null)(using Context): Type = this match - case self @ AnnotatedType(tp, annot) => - val tp1 = tp.mapIntoAnnot(from, to) - if annot.symbol == from then - if to == null then tp1 - else AnnotatedType(tp1, Annotation(to, annot.tree.span)) - else self.derivedAnnotatedType(tp1, annot) - case AppliedType(tycon, arg :: Nil) if tycon.typeSymbol == defn.RepeatedParamClass => - val arg1 = arg.mapIntoAnnot(from, to) - if arg1 eq arg then this - else AppliedType(tycon, arg1 :: Nil) - case defn.FunctionOf(argTypes, resType, isContextual) => - val resType1 = resType.mapIntoAnnot(from, to) - if resType1 eq resType then this - else defn.FunctionOf(argTypes, resType1, isContextual) - case RefinedType(parent, rname, mt: MethodOrPoly) => - val mt1 = mt.mapIntoAnnot(from, to) - if mt1 eq mt then this - else RefinedType(parent.mapIntoAnnot(from, to), rname, mt1) - case mt: MethodOrPoly => - mt.derivedLambdaType(resType = mt.resType.mapIntoAnnot(from, to)) - case tp: ExprType => - tp.derivedExprType(tp.resType.mapIntoAnnot(from, to)) - case _ => - this - /** The set of distinct symbols referred to by this type, after all aliases are expanded */ def coveringSet(using Context): Set[Symbol] = (new CoveringSetAccumulator).apply(Set.empty[Symbol], this) @@ -4214,11 +4202,11 @@ object Types extends TypeUtils { /** Produce method type from parameter symbols, with special mappings for repeated * and inline parameters: - * - replace @repeated annotations on Seq or Array types by types + * - replace `@repeated` annotations on Seq or Array types by types * - map into annotations to $into annotations - * - add @inlineParam to inline parameters - * - add @erasedParam to erased parameters - * - wrap types of parameters that have an @allowConversions annotation with Into[_] + * - add `@inlineParam` to inline parameters + * - add `@erasedParam` to erased parameters + * - map `T @$into` types to `into[T]` */ def fromSymbols(params: List[Symbol], resultType: Type)(using Context): MethodType = apply(params.map(_.name.asTermName))( @@ -4232,9 +4220,7 @@ object Types extends TypeUtils { def addAnnotation(tp: Type, cls: ClassSymbol, param: Symbol): Type = tp match case ExprType(resType) => ExprType(addAnnotation(resType, cls, param)) case _ => AnnotatedType(tp, Annotation(cls, param.span)) - var paramType = pinfo - .annotatedToRepeated - .mapIntoAnnot(defn.IntoAnnot, defn.IntoParamAnnot) + var paramType = TypeOps.revealInto(pinfo).annotatedToRepeated if param.is(Inline) then paramType = addAnnotation(paramType, defn.InlineParamAnnot, param) if param.is(Erased) then @@ -6157,7 +6143,7 @@ object Types extends TypeUtils { case tp: TypeAlias => ensureTrackable(tp.alias) case _ => - assert(false, i"not a trackable CaptureRef: $result with underlying ${result.underlyingIterator.toList}") + assert(false, i"not a trackable CaptureRef: $result of class ${result.getClass} with underlying ${result.underlyingIterator.toList}") ensureTrackable(result) /** A restriction of the inverse to a function on tracked CaptureRefs */ @@ -6169,6 +6155,18 @@ object Types extends TypeUtils { end BiTypeMap + /** A typemap that follows aliases and keeps their transformed results if + * there is a change. + */ + trait FollowAliasesMap(using Context) extends TypeMap: + def mapFollowingAliases(t: Type): Type = + val t1 = t.dealiasKeepAnnots + if t1 ne t then + val t2 = apply(t1) + if t2 ne t1 then t2 + else t + else mapOver(t) + abstract class TypeMap(implicit protected var mapCtx: Context) extends VariantTraversal with (Type => Type) { thisMap => @@ -7080,6 +7078,15 @@ object Types extends TypeUtils { def isStable = true } + // ----- Dealias keep flags -------------------------------------------- + + private type Keeps = Int + + private val KeepNothing = 0 + private val KeepAnnots = 1 + private val KeepRefiningAnnots = 2 + private val KeepOpaques = 4 + // ----- Debug --------------------------------------------------------- @sharable var debugTrace: Boolean = false diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index cf9885d16d1f..f718fc892d0b 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -852,6 +852,9 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { if (flags.is(ParamAccessor) && sym.isTerm && !sym.isSetter) flags = flags &~ ParamAccessor // we only generate a tag for parameter setters pickleFlags(flags, sym.isTerm) + if flags.is(Into) then + // Temporary measure until we can change TastyFormat to include an INTO tag + pickleAnnotation(sym, mdef, Annotation(defn.SilentIntoAnnot, util.Spans.NoSpan)) val annots = sym.annotations.foreach(pickleAnnotation(sym, mdef, _)) } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index d6f2812dad0d..766f6019faf9 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -667,7 +667,11 @@ class TreeUnpickler(reader: TastyReader, } val annotOwner = if sym.owner.isClass then newLocalDummy(sym.owner) else sym.owner - val annots = annotFns.map(_(annotOwner)) + var annots = annotFns.map(_(annotOwner)) + if annots.exists(_.symbol == defn.SilentIntoAnnot) then + // Temporary measure until we can change TastyFormat to include an INTO tag + sym.setFlag(Into) + annots = annots.filterNot(_.symbol == defn.SilentIntoAnnot) sym.annotations = annots if sym.isOpaqueAlias then sym.setFlag(Deferred) val isScala2MacroDefinedInScala3 = flags.is(Macro, butNot = Inline) && flags.is(Erased) @@ -933,7 +937,7 @@ class TreeUnpickler(reader: TastyReader, DefDef(paramDefss, tpt) case VALDEF => val tpt = readTpt()(using localCtx) - sym.info = tpt.tpe + sym.info = tpt.tpe.suppressIntoIfParam(sym) ValDef(tpt) case TYPEDEF | TYPEPARAM => if (sym.isClass) { @@ -978,7 +982,7 @@ class TreeUnpickler(reader: TastyReader, case PARAM => val tpt = readTpt()(using localCtx) assert(nothingButMods(end)) - sym.info = tpt.tpe + sym.info = tpt.tpe.suppressIntoIfParam(sym) ValDef(tpt) } goto(end) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 1610362c3323..b547f75bc021 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -78,9 +78,6 @@ object Parsers { enum ParseKind: case Expr, Type, Pattern - enum IntoOK: - case Yes, No, Nested - type StageKind = Int object StageKind { val None = 0 @@ -1590,8 +1587,8 @@ object Parsers { /** Same as [[typ]], but if this results in a wildcard it emits a syntax error and * returns a tree for type `Any` instead. */ - def toplevelTyp(intoOK: IntoOK = IntoOK.No, inContextBound: Boolean = false): Tree = - rejectWildcardType(typ(intoOK, inContextBound)) + def toplevelTyp(inContextBound: Boolean = false): Tree = + rejectWildcardType(typ(inContextBound)) private def getFunction(tree: Tree): Option[Function] = tree match { case Parens(tree1) => getFunction(tree1) @@ -1656,21 +1653,12 @@ object Parsers { * | `(' [ FunArgType {`,' FunArgType } ] `)' * | '(' [ TypedFunParam {',' TypedFunParam } ')' * MatchType ::= InfixType `match` <<< TypeCaseClauses >>> - * IntoType ::= [‘into’] IntoTargetType - * | ‘( IntoType ‘)’ - * IntoTargetType ::= Type - * | FunTypeArgs (‘=>’ | ‘?=>’) IntoType */ - def typ(intoOK: IntoOK = IntoOK.No, inContextBound: Boolean = false): Tree = + def typ(inContextBound: Boolean = false): Tree = val start = in.offset var imods = Modifiers() val erasedArgs: ListBuffer[Boolean] = ListBuffer() - def nestedIntoOK(token: Int) = - if token == TLARROW then IntoOK.No - else if intoOK == IntoOK.Nested then IntoOK.Yes - else intoOK - def functionRest(params: List[Tree]): Tree = val paramSpan = Span(start, in.lastOffset) atSpan(start, in.offset) { @@ -1699,9 +1687,8 @@ object Parsers { else accept(ARROW) - def resType() = typ(nestedIntoOK(token)) val resultType = - if isPure then capturesAndResult(resType) else resType() + if isPure then capturesAndResult(() => typ()) else typ() if token == TLARROW then for case ValDef(_, tpt, _) <- params do if isByNameType(tpt) then @@ -1736,12 +1723,6 @@ object Parsers { syntaxError(ErasedTypesCanOnlyBeFunctionTypes(), implicitKwPos(start)) t - def isIntoPrefix: Boolean = - intoOK == IntoOK.Yes - && in.isIdent(nme.into) - && in.featureEnabled(Feature.into) - && canStartTypeTokens.contains(in.lookahead.token) - def convertToElem(t: Tree): Tree = t match case ByNameTypeTree(t1) => syntaxError(ByNameParameterNotSupported(t), t.span) @@ -1778,32 +1759,6 @@ object Parsers { funArgType() commaSeparatedRest(t, funArg) accept(RPAREN) - - val intoAllowed = - intoOK == IntoOK.Yes - && args.lengthCompare(1) == 0 - && (!canFollowSimpleTypeTokens.contains(in.token) || followingIsVararg()) - val byNameAllowed = in.isArrow || isPureArrow - - def sanitize(arg: Tree): Tree = arg match - case ByNameTypeTree(t) if !byNameAllowed => - syntaxError(ByNameParameterNotSupported(t), t.span) - t - case PrefixOp(id @ Ident(tpnme.into), t) if !intoAllowed => - syntaxError(em"no `into` modifier allowed here", id.span) - t - case Parens(t) => - cpy.Parens(arg)(sanitize(t)) - case arg: FunctionWithMods => - val body1 = sanitize(arg.body) - if body1 eq arg.body then arg - else FunctionWithMods(arg.args, body1, arg.mods, arg.erasedParams).withSpan(arg.span) - case Function(args, res) if !intoAllowed => - cpy.Function(arg)(args, sanitize(res)) - case arg => - arg - val args1 = args.mapConserve(sanitize) - if in.isArrow || isPureArrow || erasedArgs.contains(true) then functionRest(args) else @@ -1824,15 +1779,13 @@ object Parsers { LambdaTypeTree(tparams.mapConserve(stripContextBounds("type lambdas")), toplevelTyp()) else if in.token == ARROW || isPureArrow(nme.PUREARROW) then val arrowOffset = in.skipToken() - val body = toplevelTyp(nestedIntoOK(in.token)) + val body = toplevelTyp() makePolyFunction(tparams, body, "type", Ident(nme.ERROR.toTypeName), start, arrowOffset) else accept(TLARROW) typ() else if in.token == INDENT then enclosed(INDENT, typ()) - else if isIntoPrefix then - PrefixOp(typeIdent(), typ(IntoOK.Nested)) else typeRest(infixType(inContextBound)) end typ @@ -2226,9 +2179,7 @@ object Parsers { * | `=>' Type * | `->' [CaptureSet] Type */ - val funArgType: () => Tree = - () => paramTypeOf(() => typ(IntoOK.Yes)) - // We allow intoOK and filter out afterwards in typ() + val funArgType: () => Tree = () => paramTypeOf(() => typ()) /** ParamType ::= ParamValueType * | `=>' ParamValueType @@ -2237,17 +2188,10 @@ object Parsers { def paramType(): Tree = paramTypeOf(paramValueType) /** ParamValueType ::= Type [`*'] - * | IntoType - * | ‘(’ IntoType ‘)’ `*' */ def paramValueType(): Tree = - val t = toplevelTyp(IntoOK.Yes) + val t = toplevelTyp() if isIdent(nme.raw.STAR) then - if !t.isInstanceOf[Parens] && isInto(t) then - syntaxError( - em"""`*` cannot directly follow `into` parameter - |the `into` parameter needs to be put in parentheses""", - in.offset) in.nextToken() atSpan(startOffset(t)): PostfixOp(t, Ident(tpnme.raw.STAR)) @@ -3327,6 +3271,7 @@ object Parsers { case IDENTIFIER => name match { case nme.inline => Mod.Inline() + case nme.into => Mod.Into() case nme.opaque => Mod.Opaque() case nme.open => Mod.Open() case nme.transparent => Mod.Transparent() diff --git a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala index 31f074c3f633..2764715a3209 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala @@ -1148,7 +1148,7 @@ object Scanners { val lookahead = LookaheadScanner() while lookahead.nextToken() - lookahead.isNewLine || lookahead.isSoftModifier + lookahead.token == NEWLINE || lookahead.isSoftModifier do () modifierFollowers.contains(lookahead.token) } diff --git a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala index bc55371ec96a..5b9a62fcb7da 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala @@ -299,7 +299,7 @@ object Tokens extends TokensCommon { final val closingParens = BitSet(RPAREN, RBRACKET, RBRACE) - final val softModifierNames = Set(nme.inline, nme.opaque, nme.open, nme.transparent, nme.infix) + final val softModifierNames = Set(nme.inline, nme.into, nme.opaque, nme.open, nme.transparent, nme.infix) def showTokenDetailed(token: Int): String = debugString(token) diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index 0dcb06ae8c87..c08d29ed212e 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -326,9 +326,6 @@ class PlainPrinter(_ctx: Context) extends Printer { case AnnotatedType(tpe, annot) => if defn.SilentAnnots.contains(annot.symbol) && !printDebug then toText(tpe) - else if (annot.symbol == defn.IntoAnnot || annot.symbol == defn.IntoParamAnnot) - && !printDebug - then atPrec(GlobalPrec)( Str("into ") ~ toText(tpe) ) else if annot.isInstanceOf[CaptureAnnotation] then toTextLocal(tpe) ~ "^" ~ toText(annot) else diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 3d987982cc20..232db7873aad 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -700,9 +700,6 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { && Feature.ccEnabled && !printDebug && Phases.checkCapturesPhase.exists // might be missing on -Ytest-pickler then toTextRetainsAnnot - else if annot.symbol.enclosingClass == defn.IntoAnnot && !printDebug then - atPrec(GlobalPrec): - Str("into ") ~ toText(arg) else toTextAnnot case EmptyTree => "" diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 2eec6579492b..abab36e2259b 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -631,10 +631,11 @@ object Checking { */ def checkWellFormedModule(mdef: untpd.ModuleDef)(using Context) = val mods = mdef.mods - def flagSourcePos(flag: FlagSet) = - mods.mods.find(_.flags == flag).getOrElse(mdef).srcPos + def flagSourcePos(flag: Flag) = untpd.flagSourcePos(mdef, flag) if mods.is(Open) then report.error(ModifierNotAllowedForDefinition(Open), flagSourcePos(Open)) + if mods.is(Into) then + report.error(ModifierNotAllowedForDefinition(Into), flagSourcePos(Open)) if mods.is(Abstract) then report.error(ModifierNotAllowedForDefinition(Abstract), flagSourcePos(Abstract)) if mods.is(Sealed) then @@ -1146,7 +1147,7 @@ trait Checking { if sym.name == nme.apply && sym.owner.derivesFrom(defn.ConversionClass) && !sym.info.isErroneous - && !expected.isInto + && !expected.isConversionTargetType then def conv = methPart(tree) match case Select(qual, _) => qual.symbol.orElse(sym.owner) diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 89dc4cf53472..28997baa14cf 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -833,7 +833,9 @@ class Namer { typer: Typer => protected def typeSig(sym: Symbol): Type = original match case original: ValDef => if (sym.is(Module)) moduleValSig(sym) - else valOrDefDefSig(original, sym, Nil, identity)(using localContext(sym).setNewScope) + else + valOrDefDefSig(original, sym, Nil, identity)(using localContext(sym).setNewScope) + .suppressIntoIfParam(sym) case original: DefDef => // For the primary constructor DefDef, it is: // * indexed as a part of completing the class, with indexConstructor; and diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index f81c1bf19cb1..9f0f75f44f79 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -374,52 +374,6 @@ object RefChecks { && atPhase(typerPhase): loop(member.info.paramInfoss, other.info.paramInfoss) - /** A map of all occurrences of `into` in a member type. - * Key: number of parameter carrying `into` annotation(s) - * Value: A list of all depths of into annotations, where each - * function arrow increases the depth. - * Example: - * def foo(x: into A, y: => [X] => into (x: X) => into B): C - * produces the map - * (0 -> List(0), 1 -> List(1, 2)) - */ - type IntoOccurrenceMap = immutable.Map[Int, List[Int]] - - def intoOccurrences(tp: Type): IntoOccurrenceMap = - - def traverseInfo(depth: Int, tp: Type): List[Int] = tp match - case AnnotatedType(tp, annot) if annot.symbol == defn.IntoParamAnnot => - depth :: traverseInfo(depth, tp) - case AppliedType(tycon, arg :: Nil) if tycon.typeSymbol == defn.RepeatedParamClass => - traverseInfo(depth, arg) - case defn.FunctionOf(_, resType, _) => - traverseInfo(depth + 1, resType) - case RefinedType(parent, rname, mt: MethodOrPoly) => - traverseInfo(depth, mt) - case tp: MethodOrPoly => - traverseInfo(depth + 1, tp.resType) - case tp: ExprType => - traverseInfo(depth, tp.resType) - case _ => - Nil - - def traverseParams(n: Int, formals: List[Type], acc: IntoOccurrenceMap): IntoOccurrenceMap = - if formals.isEmpty then acc - else - val occs = traverseInfo(0, formals.head) - traverseParams(n + 1, formals.tail, if occs.isEmpty then acc else acc + (n -> occs)) - - def traverse(n: Int, tp: Type, acc: IntoOccurrenceMap): IntoOccurrenceMap = tp match - case tp: PolyType => - traverse(n, tp.resType, acc) - case tp: MethodType => - traverse(n + tp.paramInfos.length, tp.resType, traverseParams(n, tp.paramInfos, acc)) - case _ => - acc - - traverse(0, tp, immutable.Map.empty) - end intoOccurrences - val checker = if makeOverridingPairsChecker == null then OverridingPairsChecker(clazz, self) else makeOverridingPairsChecker(clazz, self) @@ -653,8 +607,6 @@ object RefChecks { overrideError(i"needs to be declared with @targetName(${"\""}${other.targetName}${"\""}) so that external names match") else overrideError("cannot have a @targetName annotation since external names would be different") - else if intoOccurrences(memberTp) != intoOccurrences(otherTp) then - overrideError("has different occurrences of `into` modifiers", compareTypes = true) else if other.is(ParamAccessor) && !isInheritedAccessor(member, other) && !member.is(Tracked) // see remark on tracked members above then // (1.12) diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala index a93e010ddc34..ae2edf12160d 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala @@ -3178,6 +3178,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def Implicit: Flags = dotc.core.Flags.Implicit def Infix: Flags = dotc.core.Flags.Infix def Inline: Flags = dotc.core.Flags.Inline + def Into: Flags = dotc.core.Flags.Into def Invisible: Flags = dotc.core.Flags.Invisible def JavaDefined: Flags = dotc.core.Flags.JavaDefined def JavaStatic: Flags = dotc.core.Flags.JavaStatic @@ -3203,6 +3204,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def StableRealizable: Flags = dotc.core.Flags.StableRealizable @deprecated("Use JavaStatic instead", "3.3.0") def Static: Flags = dotc.core.Flags.JavaStatic def Synthetic: Flags = dotc.core.Flags.Synthetic + def Tracked: Flags = dotc.core.Flags.Tracked def Trait: Flags = dotc.core.Flags.Trait def Transparent: Flags = dotc.core.Flags.Transparent diff --git a/compiler/test/dotc/run-test-pickling.excludelist b/compiler/test/dotc/run-test-pickling.excludelist index c880a4b78f23..c9e495979946 100644 --- a/compiler/test/dotc/run-test-pickling.excludelist +++ b/compiler/test/dotc/run-test-pickling.excludelist @@ -50,3 +50,5 @@ named-tuples-strawman-2.scala typeCheckErrors.scala i18150.scala +# Pickling differences with local parameters export forwarders of methods with into parameters. But their external type is the same +Parser.scala diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index 6c144f436690..f6ac6e8e4b8d 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -213,12 +213,6 @@ FunArgType ::= Type FunArgTypes ::= FunArgType { ‘,’ FunArgType } ParamType ::= [‘=>’] ParamValueType ParamValueType ::= Type [‘*’] PostfixOp(t, "*") - | IntoType - | ‘(’ IntoType ‘)’ ‘*’ PostfixOp(t, "*") -IntoType ::= [‘into’] IntoTargetType Into(t) - | ‘(’ IntoType ‘)’ -IntoTargetType ::= Type - | FunTypeArgs (‘=>’ | ‘?=>’) IntoType TypeArgs ::= ‘[’ Types ‘]’ ts Refinement ::= :<<< [RefineDcl] {semi [RefineDcl]} >>> ds TypeBounds ::= [‘>:’ Type] [‘<:’ Type] TypeBoundsTree(lo, hi) diff --git a/docs/_docs/reference/experimental/into-modifier.md b/docs/_docs/reference/experimental/into-modifier.md deleted file mode 100644 index 54da5f976320..000000000000 --- a/docs/_docs/reference/experimental/into-modifier.md +++ /dev/null @@ -1,120 +0,0 @@ ---- -layout: doc-page -title: "The `into` Type Modifier" -redirectFrom: /docs/reference/other-new-features/into-modifier.html -nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/into-modifier.html ---- - -Scala 3's implicit conversions of the `scala.Conversion` class require a language import -``` -import scala.language.implicitConversions -``` -in any code that uses them as implicit conversions (code that calls conversions explicitly is not affected). If the import is missing, a feature warning is currently issued, and this will become an error in a future version of Scala 3. The motivation for this restriction is that code with hidden implicit conversions is hard to understand and might have correctness or performance problems that go undetected. - -There is one broad use case, however, where implicit conversions are very hard to replace. This is the case where an implicit conversion is used to adapt a method argument to its formal parameter type. An example from the standard library: -```scala -scala> val xs = List(0, 1) -scala> val ys = Array(2, 3) -scala> xs ++ ys -val res0: List[Int] = List(0, 1, 2, 3) -``` -The last input made use of an implicit conversion from `Array[Int]` to `IterableOnce[Int]` which is defined as a Scala 2 style implicit conversion in the standard library. Once the standard library is rewritten with Scala 3 conversions, this will -require a language import at the use site, which is clearly unacceptable. It is possible to avoid the need for implicit conversions using method overloading or type classes, but this often leads to longer and more complicated code, and neither of these alternatives work for vararg parameters. - -This is where the `into` modifier on parameter types comes in. Here is a signature of the `++` method on `List[A]` that uses it: -```scala - def ++ (elems: into IterableOnce[A]): List[A] -``` -The `into` modifier on the type of `elems` means that implicit conversions can be applied to convert the actual argument to an `IterableOnce` value, and this without needing a language import. - -## Function arguments - -`into` also allows conversions on the results of function arguments. For instance, consider the new proposed signature of the `flatMap` method on `List[A]`: - -```scala - def flatMap[B](f: A => into IterableOnce[B]): List[B] -``` -This accepts all actual arguments `f` that, when applied to an `A`, give a result -that is convertible to `IterableOnce[B]`. So the following would work: -```scala -scala> val xs = List(1, 2, 3) -scala> xs.flatMap(x => x.toString * x) -val res2: List[Char] = List(1, 2, 2, 3, 3, 3) -``` -Here, the conversion from `String` to `Iterable[Char]` is applied on the results of `flatMap`'s function argument when it is applied to the elements of `xs`. - -## Vararg arguments - -When applied to a vararg parameter, `into` allows a conversion on each argument value individually. For example, consider a method `concatAll` that concatenates a variable -number of `IterableOnce[Char]` arguments, and also allows implicit conversions into `IterableOnce[Char]`: - -```scala -def concatAll(xss: (into IterableOnce[Char])*): List[Char] = - xss.foldLeft(List[Char]())(_ ++ _) -``` -Here, the call -```scala -concatAll(List('a'), "bc", Array('d', 'e')) -``` -would apply two _different_ implicit conversions: the conversion from `String` to `Iterable[Char]` gets applied to the second argument and the conversion from `Array[Char]` to `Iterable[Char]` gets applied to the third argument. - -Note that a vararg parameter type with into modifiers needs to be put in parentheses, as is shown in the example above. This is to make the precedence clear: each element of the argument sequence is converted by itself. - -## Retrofitting Scala 2 libraries - -There is also an annotation `@into` in the `scala.annotation` package that has -the same effect as an `into` modifier. It is intended to be used for retrofitting Scala 2 library code so that Scala 3 conversions can be applied to arguments without language imports. For instance, the definitions of -`++` and `flatMap` in the Scala 2.13 `List` class could be retrofitted as follows. -```scala - def ++ (elems: IterableOnce[A] @into): List[A] - def flatMap[B](f: A => IterableOnce[B] @into): List[B] -``` -For Scala 3 code, the `into` modifier is preferred, because it adheres to the principle that annotations should not influence typing and type inference in Scala. - -## Restrictions - -The `into` modifier is only allowed in the types of method parameters. It can be given either for the whole type, or some result type of a top-level function type, but not anywhere else. The `into` modifier does not propagate outside the method. In particular, a partially applied method does not propagate `into` modifiers to its result. - -**Example:** - -Say we have -```scala -def f(x: Int)(y: into Text): Unit -``` -then -```scala -f(3) : Text => Unit -``` -Note the `into` modifier is not longer present on the type of `f(3)`. Therefore, follow-on arguments to `f(3)` do not allow implicit conversions. Generally it is not possible to -define function types that allow implicit conversions on their arguments, but it is possible to define SAM types that allow conversions. E.g. -```scala -trait ConvArg: - def apply(x: into Text): Unit - -val x: ConvArg = f(3)(_) -``` - -Note this is similar to the way vararg parameters are handled in Scala. If we have -```scala -def g(x: Int)(y: Int*): Unit -``` -then -```scala -g(4) : Seq[Int] => Unit -``` -Observe that the vararg annotation also got dropped in the result type of `g(4)`. - -## Syntax changes - -The addition to the grammar is: -``` -ParamType ::= [‘=>’] ParamValueType -ParamValueType ::= Type [‘*’] - | IntoType - | ‘(’ IntoType ‘)’ ‘*’ -IntoType ::= [‘into’] IntoTargetType - | ‘(’ IntoType ‘)’ -IntoTargetType ::= Type - | FunTypeArgs (‘=>’ | ‘?=>’) IntoType -``` -As the grammar shows, `into` can only applied in the type of a parameter; it is illegal in other positions. Also, `into` modifiers in vararg types have to be enclosed in parentheses. diff --git a/docs/_docs/reference/experimental/into.md b/docs/_docs/reference/experimental/into.md new file mode 100644 index 000000000000..8b54865c2cba --- /dev/null +++ b/docs/_docs/reference/experimental/into.md @@ -0,0 +1,285 @@ +--- +layout: doc-page +title: The `into` Type and Modifier +redirectFrom: /docs/reference/other-new-features/into.html +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/into.html +--- + +This feature is not yet part of the Scala 3 language definition. It can be made available by a language import: + +```scala +import scala.language.experimental.into +``` + + +## Summary + +Scala 3 offers two alternative schemes to allow implicit conversions using Scala-3's `Conversion` +class without requiring a language import. + +The first scheme is +to have a special type `into[T]` which serves as a marker that conversions into that type are allowed. These types are typically used in parameters of methods that are designed to work with implicit conversions of their arguments. This allows fine-grained control over where implicit conversions should be allowed. We call this scheme "_into as a type constructor_". + +The second scheme allows `into` as a soft modifier on traits, classes, and opaque type aliases. If a type definition is declared with this modifier, conversions to that type are allowed. The second scheme requires that one has control over the conversion target types so that an `into` can be added to their declaration. It is appropriate where there are a few designated types that are meant to be conversion targets. If that's the case, migration from Scala 2 to Scala 3 +becomes easier since no function signatures need to be rewritten. We call this scheme "_into as a modifier_". + + +## Motivation + +Scala 3's implicit conversions of the `scala.Conversion` class require a language import +``` +import scala.language.implicitConversions +``` +in any code that uses them as implicit conversions (code that calls conversions explicitly is not affected). If the import is missing, a feature warning is currently issued, and this will become an error in future versions of Scala 3. The motivation for this restriction is two-fold: + + - Code with hidden implicit conversions is hard to understand and might have correctness or performance issues that go undetected. + - If we require explicit user opt-in for implicit conversions, we can significantly improve type inference by propagating expected type information more widely in those parts of the program where there is no opt-in. + +There is one broad use case, however, where implicit conversions are very hard to replace. This is the case where an implicit conversion is used to adapt a method argument to its formal parameter type. An example from the standard library: +```scala +scala> val xs = List(0, 1) +scala> val ys = Array(2, 3) +scala> xs ++ ys +val res0: List[Int] = List(0, 1, 2, 3) +``` +The input line `xs ++ ys` makes use of an implicit conversion from `Array[Int]` to `IterableOnce[Int]`. This conversion is defined in the standard library as an `implicit def`. Once the standard library is rewritten with Scala 3 conversions, this will require a language import at the use site, which is clearly unacceptable. It is possible to avoid the need for implicit conversions using method overloading or type classes, but this often leads to longer and more complicated code, and neither of these alternatives work for vararg parameters. + +## First Scheme: `into` as a Type Constructor + +This is where the `into` type constructor comes in. Here is a signature of a `++` method on `List[A]` that uses it: + +```scala + def ++ (elems: into[IterableOnce[A]]): List[A] +``` +The `into` wrapper on the type of `elems` means that implicit conversions can be applied to convert the actual argument to an `IterableOnce` value, and this without needing a language import. + +`into` is defined as follows in the companion object of the `scala.Conversion` class: +```scala +opaque type into[T] >: T = T +``` +Types of the form `into[T]` are treated specially during type checking. If the expected type of an expression is `into[T]` then an implicit conversion to that type can be inserted without the need for a language import. + +Note: Unlike other types, `into` starts with a lower-case letter. This emphasizes the fact that `into` is treated specially by the compiler, by making `into` look more like a keyword than a regular type. + +### Example 1 + +```scala +given Conversion[Array[Int], IterableOnce[Int]] = wrapIntArray +val xs: List[Int] = List(1) +val ys: Array[Int] = Array(2, 3) +xs ++ ys +``` +This inserts the given conversion on the `ys` argument in `xs ++ ys`. It typechecks without a feature warning since the formal parameter of `++` is of type `into[IterableOnce]`, which is also the expected type of `ys`. + +### Example 2 + +Consider a simple expression AST type: +```scala +enum Expr: + case Neg(e: Expr) + case Add(e1: Expr, e2: Expr) + case Const(n: Int) +import Expr.* +``` +Say we'd like to build `Expr` trees without explicit `Const` wrapping, as in `Add(1, Neg(2))`. The usual way to achieve this is with an implicit conversion from `Int` to `Const`: +```scala +given Conversion[Int, Const] = Const(_) +``` +Normally, that would require a language import in all source modules that construct `Expr` trees. We can avoid this requirement on user code by declaring `Neg` and `Add` with `into` parameters: +```scala +enum Expr: + case Neg(e: into[Expr]) + case Add(e1: into[Expr], e2: into[Expr]) + case Const(n: Int) +``` +This would allow conversions from `Int` to `Const` when constructing trees but not elsewhere. + +### `into` in Function Results + +`into` allows conversions everywhere it appears as expected type, including in the results of function arguments. For instance, consider the new proposed signature of the `flatMap` method on `List[A]`: + +```scala + def flatMap[B](f: A => into[IterableOnce[B]]): List[B] +``` +This accepts all actual arguments `f` that, when applied to an `A`, give a result +that is convertible to `IterableOnce[B]`. So the following would work: +```scala +scala> val xs = List(1, 2, 3) +scala> xs.flatMap(x => x.toString * x) +val res2: List[Char] = List(1, 2, 2, 3, 3, 3) +``` +Here, the conversion from `String` to `Iterable[Char]` is applied on the results of `flatMap`'s function argument when it is applied to the elements of `xs`. + +### Vararg arguments + +When applied to a vararg parameter, `into` allows a conversion on each argument value individually. For example, consider a method `concatAll` that concatenates a variable +number of `IterableOnce[Char]` arguments, and also allows implicit conversions into `IterableOnce[Char]`: + +```scala +def concatAll(xss: into[IterableOnce[Char]]*): List[Char] = + xss.foldRight(Nil)(_ ++: _) +``` +Here, the call +```scala +concatAll(List('a'), "bc", Array('d', 'e')) +``` +would apply two _different_ implicit conversions: the conversion from `String` to `Iterable[Char]` gets applied to the second argument and the conversion from `Array[Char]` to `Iterable[Char]` gets applied to the third argument. + + +### Unwrapping `into` + +Since `into[T]` is an opaque type, its run-time representation is just `T`. +At compile time, the type `into[T]` is a known supertype of the type `T`. So if `t: T`, then +```scala + val x: into[T] = t +``` +typechecks but +```scala +val y: T = x // error +``` +is ill-typed. We can recover the underlying type `T` using the `underlying` extension method which is also defined in object `Conversion`: +```scala +import Conversion.underlying + +val y: T = x.underlying // ok +``` +However, the next section shows that unwrapping with `.underlying` is not needed for parameters, which is the most common use case. So explicit unwrapping should be quite rare. + + + +### Dropping `into` for Parameters in Method Bodies + +The typical use cases for `into` wrappers are for parameters. Here, they specify that the +corresponding arguments can be converted to the formal parameter types. On the other hand, inside a method, a parameter type can be assumed to be of the underlying type since the conversion already took place when the enclosing method was called. This is reflected in the type system which erases `into` wrappers in the local types of parameters +as they are seen in a method body. Here is an example: +```scala + def ++ (elems: into[IterableOnce[A]]): List[A] = + val buf = ListBuffer[A]() + for elem <- elems.iterator do // no `.underlying` needed here + buf += elems + buf.toList +``` +Inside the `++` method, the `elems` parameter is of type `IterableOnce[A]`, not `into[IterableOne[A]]`. Hence, we can simply write `elems.iterator` to get at the `iterator` method of the `IterableOnce` class. + +Specifically, we erase all `into` wrappers in the local types of parameter types that appear in covariant or invariant position. Contravariant `into` wrappers are kept since these typically are on the parameters of function arguments. + +### Into Constructors in Type Aliases + +Since `into` is a regular type constructor, it can be used anywhere, including in type aliases and type parameters. For instance, in the Scala standard library we could define +```scala +type ToIterator[T] = into[IterableOnce[T]] +``` +and then `++`, `flatMap` and other functions could use this alias in their parameter types. The effect would be the same as when `into` is written out explicitly. + +## Second Scheme: `into` as a Modifier + +The `into` scheme discussed so far strikes a nice balance between explicitness and convenience. But migrating to it from Scala 2 implicits does require major changes since possibly a large number of function signatures has to be changed to allow conversions on the arguments. This might ultimately hold back migration to Scala 3 implicits. + +To facilitate migration, we also introduce an alternative way to specify target types of implicit conversions. We allow `into` as a soft modifier on +classes, traits, and opaque type aliases. If a type definition is declared with `into`, then implicit conversions into that type don't need a language import. + +For instance, the Laminar framework +defines a trait `Modifier` that is commonly used as a parameter type of user-defined methods and that should support implicit conversions into it. +`Modifier` is commonly used as a parameter type in both Laminar framework functions and in application-level functions that use Laminar. + +We can support implicit conversions to `Modifier`s simply by making `Modifier` an `into` trait: +```scala +into trait Modifier ... +``` +This means implicit `Conversion` instances with `Modifier` results can be inserted without requiring a language import. + +Here is a simplified example: +```scala +trait Modifier +given Conversion[Option[Node], Modifier] = ... +given Conversion[Seq[Node], Modifier] = ... + +def f(x: Source, m: Modifier) = ... +f(source, Some(node)) // inserts conversion +``` + +The `into`-as-a-modifier scheme is handy in codebases that have a small set of specific types that are intended as the targets of implicit conversions defined in the same codebase. Laminar's `Modifier` is a typical example. But the scheme can be easily abused by making the number of `into` types too large. One should restrict the number of `into`-declared types to the absolute minimum. In particular, never make a type `into` to just cater for the possibility that someone might want to later add an implicit conversion to it. + + +## Details: Conversion target types + +To make the preceding descriptions more precise: An implicit conversion is permitted without an `implicitConversions` language import if the target type is a valid conversion target type. A valid conversion target type is one of the following: + + - A type of the form `into[T]`. + - A reference `p.C` to a class, trait, or opaque type alias `C` that is declared with an `into` modifier. The reference can be followed by type arguments. + - A type alias of a valid conversion target type. + - A match type that reduces to a valid conversion target type. + - An annotated type `T @ann` where `T` is a valid conversion target type. + - A refined type `T {...}` where `T` is a valid conversion target type. + - A union `T | U` of two valid conversion target types `T` and `U`. + - An intersection `T & U` of two valid conversion target types `T` and `U`. + - An instance of a type parameter that is explicitly instantiated to a valid conversion target type. + +Type parameters that are not fully instantiated do not count as valid conversion target types. For instance, consider: + +```scala + trait Token + class Keyword(str: String) + given Conversion[String, Keyword] = KeyWord(_) + + List[into[Keyword]]("if", "then", "else") +``` +This type-checks since the target type of the list elements is the type parameter of the `List.apply` method which is explicitly instantiated to `into[Keyword]`. On the other hand, if we continue the example as follows we get an error: +```scala + val ifKW: into[Keyword] = "if" + val ys: List[into[Keyword]] = List(ifKW, "then", "else") +``` +Here, the type variable of `List.apply` is not explicitly instantiated +when we check the `List(...)` arguments (it is just upper-bounded by the target type `into[Keyword]`). This is not enough to allow +implicit conversions on the second and third arguments. + +Subclasses of `into` classes or traits do not count as valid conversion target types. For instance, consider: + +```scala +into trait T +class C(x: Int) extends T +given Conversion[Int, C] = C(_) + +def f(x: T) = () +def g(x: C) = () +f(1) // ok +g(1) // error +``` +The call `f("abc")` type-checks since `f`'s parameter type `T` is `into`. +But the call `g("abc")` does not type-check since `g`'s parameter type `C` is not `into`. It does not matter that `C` extends a trait `T` that is `into`. + + +## Why Two Different Schemes? + +Can we make do with just one scheme instead of two? In practice this would be difficult. + +Let's first take a look the `Expr` example, which uses into-as-a-constructor. Could it be rewritten to use into-as-a-modifier? +This would mean we have to add `into` to the whole `Expr` enum. Adding it to just `Const` is not enough, since `Add` and `Neg` take `Expr` arguments, not `Const` arguments. + +But we might not always have permission to change the `Expr` enum. For instance, `Expr` could be defined in a lower level library without implicit conversions, but later we want to make `Expr` construction convenient by eliding `Const` wrappers in some higher-level library or application. With `into` constructors, this is easy: Define the implicit conversion and facade methods that construct `Expr` trees while taking `into[Expr]` parameters. +With `into` modifiers there is no way to achieve the same. + +A possibly more important objection is that even if we could add the `into` modifier to `Expr`, it would be bad style to do so! We want to allow for implicit conversion in the very specific case where we build an `Expr` tree using the `Add` and `Neg` constructors. Our applications could have lots of other methods that take `Expr` trees, for instance to analyze them or evaluate them. +We probably do not want to allow implicit conversions for the arguments of all these other methods. The `into` modifier is too unspecific to distinguish the good use case from the problematic ones. + +On the other hand, there are also situations where into-as-a-modifier is the practical choice. To see this, consider again the `Modifier` use case in Laminar. +We could avoid the `into` modifier by wrapping all `Modifier` parameters +with the `into` constructor. This would be a lot more work than adding just the single `into` modifier. Worse, functions taking `Modifier` parameters are found both in the Laminar framework code and in many applications using it. The framework and the applications would have to be upgraded in lockstep. When Laminar upgrades to Scala 3 implicits, all applications would have to be rewritten, which would make such a migration very cumbersome. + +One can try to mitigate the effort by playing with type aliases. For instance, a hypothetical future Laminar using Scala 3 conversions could rename the +trait `Modifier` to `ModifierTrait` and define an alias +```scala +type Modifier = into[ModifierTrait] +``` +Then the source code of applications would not have to change (unless these applications define classes directly extending `Modifier`). But that future Laminar would not be binary compatible with the current one, since the name +of the original `Modifier` trait has changed. In summary, upgrading Laminar to use Scala 3 conversions could keep either source compatibility or binary compatibility but not both at the same time. + + +## Syntax Changes + +``` +LocalModifier ::= ... | ‘into’ +``` + +`into` is a soft modifier. It is only allowed classes, traits, and opaque type aliases. + diff --git a/docs/sidebar.yml b/docs/sidebar.yml index aecd974326ab..647bc995a4f8 100644 --- a/docs/sidebar.yml +++ b/docs/sidebar.yml @@ -160,7 +160,7 @@ subsection: - page: reference/experimental/numeric-literals.md - page: reference/experimental/explicit-nulls.md - page: reference/experimental/main-annotation.md - - page: reference/experimental/into-modifier.md + - page: reference/experimental/into.md - page: reference/experimental/cc.md - page: reference/experimental/purefuns.md - page: reference/experimental/tupled-function.md diff --git a/language-server/test/dotty/tools/languageserver/CompletionTest.scala b/language-server/test/dotty/tools/languageserver/CompletionTest.scala index 8f05d6ad11da..528aa1055c6f 100644 --- a/language-server/test/dotty/tools/languageserver/CompletionTest.scala +++ b/language-server/test/dotty/tools/languageserver/CompletionTest.scala @@ -32,7 +32,9 @@ class CompletionTest { @Test def completionFromScalaPackage: Unit = { code"class Foo { val foo: Conv${m1} }" - .completion(("Conversion", Class, "Conversion")) + .completion( + ("Conversion", Class, "Conversion"), + ("Conversion", Module, "Conversion")) } @Test def implicitSearchCrash: Unit = diff --git a/library/src/scala/Conversion.scala b/library/src/scala/Conversion.scala index f6267dc79fb2..cbae3448dab9 100644 --- a/library/src/scala/Conversion.scala +++ b/library/src/scala/Conversion.scala @@ -29,3 +29,20 @@ abstract class Conversion[-T, +U] extends Function1[T, U]: extension (x: T) /** `x.convert` converts a value `x` of type `T` to type `U` */ def convert = this(x) + +object Conversion: + import annotation.experimental + + /** An opaque type alias to declare "into" parameter types that allow implicit conversions + * on corresponding arguments. If the expected type of an expression t is into[T], implicit + * conversions are tried from the type of `t` to `T`. `into[T]` types are erased to `T` + * in all covariant positions of the types of parameter symbols. + */ + @experimental + opaque type into[+T] >: T = T + + /** Unwrap an `into` */ + extension [T](x: into[T]) + @experimental def underlying: T = x + +end Conversion \ No newline at end of file diff --git a/library/src/scala/annotation/internal/$into.scala b/library/src/scala/annotation/internal/$into.scala index 4d8788724e25..bad0e399d389 100644 --- a/library/src/scala/annotation/internal/$into.scala +++ b/library/src/scala/annotation/internal/$into.scala @@ -1,15 +1,12 @@ package scala.annotation.internal import annotation.experimental -/** An internal annotation on (part of) a parameter type that allows implicit conversions - * for its arguments. The publicly visible `into` annotation in the parent package - * `annotation` gets mapped to `$into` by the compiler in all places where - * conversions should be allowed. The reason for the split into two annotations - * is that `annotation.into` is given in source code and may propagate in unspecified - * ways through type inference. By contrast `$into` is constrained to occur only - * on parameters of method types. This makes implicit conversion insertion - * predictable and independent of the un-specified aspects of type inference. +/** An internal annotation on (part of) a parameter type that serves as a marker where + * the original type was of the form `into[T]`. These annotated types are mapped back + * to `into[T]` types when forming a method types from the parameter types. The idea is + * that `T @$into` is equivalent to `T`, whereas `into[T]` is only a known supertype of + * `T`. Hence, we don't need to use `.underlying` to go from an into type to its + * underlying type in the types of local parameters. */ @experimental -class $into() extends annotation.StaticAnnotation - +class $into extends annotation.StaticAnnotation \ No newline at end of file diff --git a/library/src/scala/annotation/into.scala b/library/src/scala/annotation/into.scala deleted file mode 100644 index 70a53ff9478d..000000000000 --- a/library/src/scala/annotation/into.scala +++ /dev/null @@ -1,10 +0,0 @@ -package scala.annotation -import annotation.experimental - -/** An annotation on (part of) a parameter type that allows implicit conversions - * for its arguments. The `into` modifier on parameter types in Scala 3 is - * mapped to this annotation. The annotation is intended to be used directly in - * Scala 2 sources only. For Scala 3, the `into` modifier should be preferred. - */ -@experimental -class into() extends annotation.StaticAnnotation diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index a7f857e8a719..b817b512f125 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -9,10 +9,12 @@ object MiMaFilters { // Additions that require a new minor version of the library Build.mimaPreviousDottyVersion -> Seq( ProblemFilters.exclude[MissingClassProblem]("scala.annotation.internal.readOnlyCapability"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.Conversion.underlying"), + ProblemFilters.exclude[MissingClassProblem]("scala.Conversion$"), // Scala.js-only class ProblemFilters.exclude[FinalClassProblem]("scala.scalajs.runtime.AnonFunctionXXL"), - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.scalajs.runtime.AnonFunctionXXL.this"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.scalajs.runtime.AnonFunctionXXL.this"), ), // Additions since last LTS diff --git a/tests/neg/i21786.scala b/tests/neg/i21786.scala deleted file mode 100644 index c5bb9c595d32..000000000000 --- a/tests/neg/i21786.scala +++ /dev/null @@ -1 +0,0 @@ -into class X // error diff --git a/tests/neg/into-inferred.check b/tests/neg/into-inferred.check new file mode 100644 index 000000000000..f96fe4bbafaf --- /dev/null +++ b/tests/neg/into-inferred.check @@ -0,0 +1,39 @@ +-- [E007] Type Mismatch Error: tests/neg/into-inferred.scala:34:32 ----------------------------------------------------- +34 | val l1: List[into[Keyword]] = l :+ "then" :+ "else" // error + | ^^^^^^^^^^^^^^^^^^^^^ + | Found: List[Conversion.into[Keyword] | String] + | Required: List[Conversion.into[Keyword]] + | + | longer explanation available when compiling with `-explain` +-- Feature Warning: tests/neg/into-inferred.scala:22:43 ---------------------------------------------------------------- +22 | val ys: List[into[Keyword]] = List(ifKW, "then", "else") // warn // warn + | ^^^^^^ + | Use of implicit conversion given instance given_Conversion_String_Keyword in object Test should be enabled + | by adding the import clause 'import scala.language.implicitConversions' + | or by setting the compiler option -language:implicitConversions. + | See the Scala docs for value scala.language.implicitConversions for a discussion + | why the feature should be explicitly enabled. +-- Feature Warning: tests/neg/into-inferred.scala:22:51 ---------------------------------------------------------------- +22 | val ys: List[into[Keyword]] = List(ifKW, "then", "else") // warn // warn + | ^^^^^^ + | Use of implicit conversion given instance given_Conversion_String_Keyword in object Test should be enabled + | by adding the import clause 'import scala.language.implicitConversions' + | or by setting the compiler option -language:implicitConversions. + | See the Scala docs for value scala.language.implicitConversions for a discussion + | why the feature should be explicitly enabled. +-- Feature Warning: tests/neg/into-inferred.scala:35:42 ---------------------------------------------------------------- +35 | val l2: List[into[Keyword]] = l ++ List("then", "else") // warn // warn + | ^^^^^^ + | Use of implicit conversion given instance given_Conversion_String_Keyword in object Test should be enabled + | by adding the import clause 'import scala.language.implicitConversions' + | or by setting the compiler option -language:implicitConversions. + | See the Scala docs for value scala.language.implicitConversions for a discussion + | why the feature should be explicitly enabled. +-- Feature Warning: tests/neg/into-inferred.scala:35:50 ---------------------------------------------------------------- +35 | val l2: List[into[Keyword]] = l ++ List("then", "else") // warn // warn + | ^^^^^^ + | Use of implicit conversion given instance given_Conversion_String_Keyword in object Test should be enabled + | by adding the import clause 'import scala.language.implicitConversions' + | or by setting the compiler option -language:implicitConversions. + | See the Scala docs for value scala.language.implicitConversions for a discussion + | why the feature should be explicitly enabled. diff --git a/tests/neg/into-inferred.scala b/tests/neg/into-inferred.scala new file mode 100644 index 000000000000..e2d459394ab8 --- /dev/null +++ b/tests/neg/into-inferred.scala @@ -0,0 +1,37 @@ +//> using options -feature +import language.experimental.into +import Conversion.{into, underlying} + +trait Token +class Keyword(str: String) +case class Phrase(words: into[Keyword]*) + +object Test: + given Conversion[String, Keyword] = Keyword(_) + + val xs = List[into[Keyword]]("if", "then", "else") // ok + val _: List[Keyword] = xs.map(_.underlying) + + val p = Phrase("if", "then", "else") // ok + val ws = p.words + val _: Seq[Keyword] = ws + + val p2 = Phrase(xs*) // ok + + val ifKW: into[Keyword] = "if" + val ys: List[into[Keyword]] = List(ifKW, "then", "else") // warn // warn + + val s = Set(ifKW) + val s1 = s + "then" + "else" + val _: Set[into[Keyword]] = s1 + val s2 = s ++ List("then", "else") + val s3: Set[into[Keyword] | String] = s2 + val s4 = s3.map(_.underlying) + val _: Set[Keyword | String] = s4 + + + val l = List(ifKW) + val l1: List[into[Keyword]] = l :+ "then" :+ "else" // error + val l2: List[into[Keyword]] = l ++ List("then", "else") // warn // warn + + diff --git a/tests/neg/into-mods.check b/tests/neg/into-mods.check new file mode 100644 index 000000000000..30ff99372b99 --- /dev/null +++ b/tests/neg/into-mods.check @@ -0,0 +1,16 @@ +-- Error: tests/neg/into-mods.scala:10:11 ------------------------------------------------------------------------------ +10 | into def foo = 22 // error + | ^ + | values cannot be into +-- [E156] Syntax Error: tests/neg/into-mods.scala:7:12 ----------------------------------------------------------------- +7 |into object M // error + |^^^^^^^^^^^^^ + |Modifier into is not allowed for this definition +-- [E156] Syntax Error: tests/neg/into-mods.scala:12:2 ----------------------------------------------------------------- +12 | into type T = Int // error + | ^^^^ + | Modifier into is not allowed for this definition +-- Error: tests/neg/into-mods.scala:11:11 ------------------------------------------------------------------------------ +11 | into val x = 33 // error + | ^^^^^^^^^^^^^^^ + | modifier(s) `into` incompatible with value definition diff --git a/tests/neg/into-mods.scala b/tests/neg/into-mods.scala new file mode 100644 index 000000000000..7b426471c0e2 --- /dev/null +++ b/tests/neg/into-mods.scala @@ -0,0 +1,14 @@ +import language.experimental.into + +into class Test + +into trait T + +into object M // error + +object Test: + into def foo = 22 // error + into val x = 33 // error + into type T = Int // error + into opaque type U = Int // ok + diff --git a/tests/neg/into-override.check b/tests/neg/into-override.check index 812470494a8b..bd5ef554e218 100644 --- a/tests/neg/into-override.check +++ b/tests/neg/into-override.check @@ -1,21 +1,30 @@ --- [E164] Declaration Error: tests/neg/into-override.scala:16:15 ------------------------------------------------------- -16 | override def f(x: into X) = super.f(x) // error - | ^ - | error overriding method f in trait A of type (x: X): Unit; - | method f of type (x: into X): Unit has different occurrences of `into` modifiers +-- [E120] Naming Error: tests/neg/into-override.scala:16:6 ------------------------------------------------------------- +16 |trait C[X] extends A[X]: // error + | ^ + | Name clash between defined and inherited member: + | def f(x: X): Unit in trait A at line 11 and + | override def f(x: Conversion.into[X]): Unit in trait C at line 17 + | have the same type after erasure. | - | longer explanation available when compiling with `-explain` --- [E164] Declaration Error: tests/neg/into-override.scala:18:6 -------------------------------------------------------- -18 |class D[X] extends B[X], C[X] // error + | Consider adding a @targetName annotation to one of the conflicting definitions + | for disambiguation. +-- [E120] Naming Error: tests/neg/into-override.scala:19:6 ------------------------------------------------------------- +19 |class D[X] extends B[X], C[X] // error | ^ - | error overriding method f in trait B of type (x: X): Unit; - | method f in trait C of type (x: into X): Unit has different occurrences of `into` modifiers + | Name clash between inherited members: + | override def f(x: X): Unit in trait B at line 14 and + | override def f(x: Conversion.into[X]): Unit in trait C at line 17 + | have the same type after erasure. | - | longer explanation available when compiling with `-explain` --- [E164] Declaration Error: tests/neg/into-override.scala:21:15 ------------------------------------------------------- -21 | override def f(x: X) = super.f(x) // error - | ^ - | error overriding method f in trait C of type (x: into X): Unit; - | method f of type (x: X): Unit has different occurrences of `into` modifiers + | Consider adding a @targetName annotation to one of the conflicting definitions + | for disambiguation. +-- [E120] Naming Error: tests/neg/into-override.scala:21:6 ------------------------------------------------------------- +21 |trait E[X] extends C[X]: // error + | ^ + | Name clash between defined and inherited member: + | override def f(x: Conversion.into[X]): Unit in trait C at line 17 and + | override def f(x: X): Unit in trait E at line 22 + | have the same type after erasure. | - | longer explanation available when compiling with `-explain` + | Consider adding a @targetName annotation to one of the conflicting definitions + | for disambiguation. diff --git a/tests/neg/into-override.scala b/tests/neg/into-override.scala index 645ae8756003..6a25ddbeb7f0 100644 --- a/tests/neg/into-override.scala +++ b/tests/neg/into-override.scala @@ -1,6 +1,7 @@ //> using options -Xfatal-warnings import language.experimental.into +import Conversion.into class Text(val str: String) @@ -12,12 +13,12 @@ trait A[X]: trait B[X] extends A[X]: override def f(x: X) = super.f(x) -trait C[X] extends A[X]: - override def f(x: into X) = super.f(x) // error +trait C[X] extends A[X]: // error + override def f(x: into[X]) = super.f(x) class D[X] extends B[X], C[X] // error -trait E[X] extends C[X]: - override def f(x: X) = super.f(x) // error +trait E[X] extends C[X]: // error + override def f(x: X) = super.f(x) + -def f = new D[Text].f("abc") diff --git a/tests/neg/into-syntax.check b/tests/neg/into-syntax.check index ad1f95db93be..8c54ad75af17 100644 --- a/tests/neg/into-syntax.check +++ b/tests/neg/into-syntax.check @@ -41,21 +41,15 @@ | Missing return type | | longer explanation available when compiling with `-explain` --- [E006] Not Found Error: tests/neg/into-syntax.scala:7:17 ------------------------------------------------------------ +-- Error: tests/neg/into-syntax.scala:7:17 ----------------------------------------------------------------------------- 7 | def f1(x: List[into Int]) = () // error // error | ^^^^ - | Not found: type into - did you mean into.type? - | - | longer explanation available when compiling with `-explain` --- [E006] Not Found Error: tests/neg/into-syntax.scala:21:18 ----------------------------------------------------------- + | Type argument into does not have the same kind as its bound +-- [E056] Syntax Error: tests/neg/into-syntax.scala:21:18 -------------------------------------------------------------- 21 | def f11(x: ((y: into Int) => into Int => Int)*) = () // error // error | ^^^^ - | Not found: type into - did you mean into.type? - | - | longer explanation available when compiling with `-explain` --- [E006] Not Found Error: tests/neg/into-syntax.scala:27:22 ----------------------------------------------------------- + | Missing type parameter for into +-- Error: tests/neg/into-syntax.scala:27:22 ---------------------------------------------------------------------------- 27 | def f17(x: into (y: into Int, z: into Int) => into Int) = () // error // error // error | ^^^^ - | Not found: type into - did you mean into.type? - | - | longer explanation available when compiling with `-explain` + | Type argument into does not have the same kind as its bound diff --git a/tests/neg/into-syntax.scala b/tests/neg/into-syntax.scala deleted file mode 100644 index 8f48a603adf6..000000000000 --- a/tests/neg/into-syntax.scala +++ /dev/null @@ -1,27 +0,0 @@ -//> using options -feature - -import language.experimental.into - - -object x1: - def f1(x: List[into Int]) = () // error // error -object x3: - def f3(x: ((into Int))) = () // ok -object x4: - def f4(x: into Int*) = () // error -object x5: - def f5(x: ((into Int))*) = () // ok - -object x6: - def f6(x: (into Int)*) = () // ok - def f7(x: (Int => into Int)*) = () // ok - def f8(x: (Int => (into Int))*) = () // ok - def f9(x: (y: Int) => into Int) = () // ok - def f10(x: ((y: Int) => into Int)*) = () // ok - def f11(x: ((y: into Int) => into Int => Int)*) = () // error // error - -object x7: - def f14(x: (into Int) => Int) = () // error - def f15(x: (into Int, into Int)) = () // error // error - def f16(x: (into Int, into Int) => Int) = () // error // error - def f17(x: into (y: into Int, z: into Int) => into Int) = () // error // error // error diff --git a/tests/new/test.scala b/tests/new/test.scala index dc1891f3525c..d350e15a8c9f 100644 --- a/tests/new/test.scala +++ b/tests/new/test.scala @@ -1,8 +1,15 @@ -type Person = (name: String, age: Int) -trait A: - type T +package foo + +package object bar: + opaque type O[X] >: X = X + +class Test: + import bar.O + + val x = "abc" + val y: O[String] = x + //val z: String = y + -class B: - type U =:= A { type T = U } diff --git a/tests/pos/into-bigint.scala b/tests/pos/into-bigint.scala index 409b5e79da2c..9ecac8c68dcd 100644 --- a/tests/pos/into-bigint.scala +++ b/tests/pos/into-bigint.scala @@ -1,13 +1,14 @@ import language.experimental.into +import Conversion.into class BigInt(x: Int): - def + (other: into BigInt): BigInt = ??? - def * (other: into BigInt): BigInt = ??? + def + (other: into[BigInt]): BigInt = ??? + def * (other: into[BigInt]): BigInt = ??? object BigInt: given Conversion[Int, BigInt] = BigInt(_) - extension (x: into BigInt) + extension (x: into[BigInt]) def + (other: BigInt): BigInt = ??? def * (other: BigInt): BigInt = ??? diff --git a/tests/pos/into-class.scala b/tests/pos/into-class.scala index 2638b9a0234a..989c932475cb 100644 --- a/tests/pos/into-class.scala +++ b/tests/pos/into-class.scala @@ -1,8 +1,9 @@ import language.experimental.into +import Conversion.into class Text(str: String) -case class C(x: into Text) +case class C(x: into[Text]) case class D(x: Text) diff --git a/tests/pos/into-expr.scala b/tests/pos/into-expr.scala new file mode 100644 index 000000000000..85b287668ee8 --- /dev/null +++ b/tests/pos/into-expr.scala @@ -0,0 +1,16 @@ + +//> using options -feature -Xfatal-warnings + +import language.experimental.into +import Conversion.into + +enum Expr: + case Neg(e: into[Expr]) + case Add(e1: into[Expr], e2: into[Expr]) + case Const(n: Int) +import Expr.* + +given Conversion[Int, Const] = Const(_) + +def Test = + Add(1, Neg(2)) diff --git a/tests/pos/into-sam.scala b/tests/pos/into-sam.scala index 7513b5c5b0de..c4b528fce215 100644 --- a/tests/pos/into-sam.scala +++ b/tests/pos/into-sam.scala @@ -2,16 +2,17 @@ //> using options -feature -Xfatal-warnings import language.experimental.into +import Conversion.into class Text(val str: String) given Conversion[String, Text] = Text(_) object Test: - def f(x: Int)(y: into Text): Unit = () + def f(x: Int)(y: into[Text]): Unit = () val _: Text => Unit = f(3) trait ConvArg: - def apply(x: into Text): Unit + def apply(x: into[Text]): Unit val x: ConvArg = f(3)(_) diff --git a/tests/pos/into-separate/Test_2.scala b/tests/pos/into-separate/Test_2.scala new file mode 100644 index 000000000000..afcd46522229 --- /dev/null +++ b/tests/pos/into-separate/Test_2.scala @@ -0,0 +1,16 @@ +//> using options -feature +package test +import language.experimental.into + +object Test: + given Conversion[Int, C] = C(_) + + def f(x: T) = () + f(1) // ok + + given stringToKeyword: Conversion[String, Keyword] = Keyword(_) + + val dclKeywords = Set[Keyword]("def", "val") // ok + val keywords = dclKeywords + "if" + "then" + "else" // ok + + diff --git a/tests/pos/into-separate/classes_1.scala b/tests/pos/into-separate/classes_1.scala new file mode 100644 index 000000000000..b39bfe840e3e --- /dev/null +++ b/tests/pos/into-separate/classes_1.scala @@ -0,0 +1,8 @@ +package test + +import language.experimental.into + +into trait T +class C(x: Int) extends T + +into class Keyword(str: String) diff --git a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala index 36075f0a2cee..6a867233b49b 100644 --- a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala +++ b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala @@ -50,7 +50,8 @@ val experimentalDefinitionInLibrary = Set( "scala.caps.use", //// New feature: into - "scala.annotation.into", + "scala.Conversion$.into", + "scala.Conversion$.underlying", "scala.annotation.internal.$into", //// New feature: Macro annotations diff --git a/tests/run/Parser.scala b/tests/run/Parser.scala index 48c3af73ecec..33b3379d2c76 100644 --- a/tests/run/Parser.scala +++ b/tests/run/Parser.scala @@ -1,4 +1,5 @@ import language.experimental.into +import Conversion.into type Input = List[String] @@ -12,7 +13,7 @@ def empty[T](x: T) = Parser(in => Success(x, in)) def fail(msg: String) = Parser(in => Failure(msg)) class ParserOps[T](p: Parser[T]): - def ~ [U](q: => into Parser[U]): Parser[(T, U)] = Parser(in => + def ~ [U](q: => into[Parser[U]]): Parser[(T, U)] = Parser(in => p.parse(in) match case Success(x, in1) => q.parse(in1) match @@ -20,7 +21,7 @@ class ParserOps[T](p: Parser[T]): case fail: Failure => fail case fail: Failure => fail ) - def | [U](q: => into Parser[T]): Parser[T] = Parser(in => + def | [U](q: => into[Parser[T]]): Parser[T] = Parser(in => p.parse(in) match case s: Success[_] => s case fail: Failure => q.parse(in) @@ -30,9 +31,9 @@ class ParserOps[T](p: Parser[T]): case Success(x, in1) => Success(f(x), in1) case fail: Failure => fail ) - def ~> [U](q: => into Parser[U]): Parser[U] = + def ~> [U](q: => into[Parser[U]]): Parser[U] = (p ~ q).map(_(1)) - def <~ [U](q: => into Parser[U]): Parser[T] = + def <~ [U](q: => into[Parser[U]]): Parser[T] = (p ~ q).map(_(0)) def parseAll(in: Input): ParseResult[T] = p.parse(in) match @@ -64,10 +65,10 @@ def token(p: String => Boolean, expected: String): Parser[String] = Parser { def token(str: String): Parser[String] = token(str == _, s"`$str`") -def opt[T](p: into Parser[T]): Parser[Option[T]] = +def opt[T](p: into[Parser[T]]): Parser[Option[T]] = p.map(Some(_)) | empty(None) -def rep[T](p: into Parser[T]): Parser[List[T]] = +def rep[T](p: into[Parser[T]]): Parser[List[T]] = (p ~ rep(p)).map(_ :: _) | empty(Nil) diff --git a/tests/run/convertible.scala b/tests/run/convertible.scala index 7a92964a1f31..affb0698ed70 100644 --- a/tests/run/convertible.scala +++ b/tests/run/convertible.scala @@ -1,6 +1,7 @@ //> using options -feature -Xfatal-warnings import language.experimental.into +import Conversion.into class Text(val str: String) @@ -8,20 +9,20 @@ given Conversion[String, Text] = Text(_) @main def Test = - def f(xxx: into Text, yyy: => into Text, zs: (into Text)*) = + def f(xxx: into[Text], yyy: => into[Text], zs: into[Text]*) = println(s"${xxx.str} ${yyy.str} ${zs.map(_.str).mkString(" ")}") f("abc", "def") // ok f("abc", "def", "xyz", "uvw") // ok f("abc", "def", "xyz", Text("uvw")) // ok - def g(x: () => into Text) = + def g(x: () => into[Text]) = println(x().str) g(() => "hi") trait C[X]: - def f(x: into X) = x + def f(x: into[X]) = x class D[X] extends C[X] diff --git a/tests/warn/convertible.scala b/tests/warn/convertible.scala index b701cac32cec..655d935adb59 100644 --- a/tests/warn/convertible.scala +++ b/tests/warn/convertible.scala @@ -1,6 +1,7 @@ //> using options -feature import language.experimental.into +import Conversion.into class Text(val str: String) @@ -15,10 +16,10 @@ object Test: f("abc", "def", "xyz", "uvw") // warn // warn // warn // warn f("abc", "def", "xyz", Text("uvw")) // warn // warn // warn - def g(x: into Text) = + def g(x: into[Text]) = println(x.str) - def g2(x: into Text) = + def g2(x: into[Text]) = println(x.str) def g3(x: Text) = @@ -26,19 +27,19 @@ object Test: g("abc") // OK val gg = g - gg("abc") // warn, eta expansion does not preserve into + gg("abc") // ok val c1 = if ??? then g else g2 - c1("abc") // warn, eta expansion does not preserve into + c1("abc") // ok, lub type = into[Text] => Unit val c2 = if ??? then g else g3 - c2("abc") // warn, eta expansion does not preserve into + c2("abc") // warn, lub type is Text => Unit val c3 = if ??? then g3 else g - c3("abc") // warn, eta expansion does not preserve into + c3("abc") // warn, lub type is Text => Unit def h1[X](x: X)(y: X): Unit = () - def h(x: into Text) = + def h(x: into[Text]) = val y = h1(x) - y("abc") // warn, eta expansion does not preserve into \ No newline at end of file + y("abc") // ok \ No newline at end of file diff --git a/tests/warn/into-as-mod.scala b/tests/warn/into-as-mod.scala new file mode 100644 index 000000000000..587bb375e4d1 --- /dev/null +++ b/tests/warn/into-as-mod.scala @@ -0,0 +1,29 @@ +//> using options -feature + +import language.experimental.into +import Conversion.into + +into trait T +class C(x: Int) extends T + +object K: + into opaque type A = C + given Conversion[Int, A] = C(_) + +object Test: + given Conversion[Int, C] = C(_) + + def f(x: T) = () + def g(x: C) = () + f(1) // ok + g(1) // warn + + import K.* + def h(x: A) = () + h(1) + + into class Keyword(str: String) + given stringToKeyword: Conversion[String, Keyword] = Keyword(_) + + val dclKeywords = Set[Keyword]("def", "val") // ok + val keywords = dclKeywords + "if" + "then" + "else" // ok