diff --git a/compiler/src/main/scala/aqua/compiler/AquaCompiler.scala b/compiler/src/main/scala/aqua/compiler/AquaCompiler.scala index 6f18b6ee..c5613545 100644 --- a/compiler/src/main/scala/aqua/compiler/AquaCompiler.scala +++ b/compiler/src/main/scala/aqua/compiler/AquaCompiler.scala @@ -6,15 +6,16 @@ import aqua.parser.{Ast, ParserError} import aqua.semantics.header.Picker.setImportPaths import aqua.semantics.header.{HeaderHandler, Picker} import aqua.semantics.{FileId, SemanticError, Semantics} + import cats.arrow.FunctionK import cats.data.* import cats.syntax.either.* import cats.syntax.functor.* import cats.syntax.show.* -import cats.{~>, Comonad, Monad, Monoid, Order, Show} +import cats.{Comonad, Monad, Monoid, Order, Show, ~>} import scribe.Logging -class AquaCompiler[F[_]: Monad, E, I: FileId, S[_]: Comonad, C: Monoid: Picker]( +class AquaCompiler[F[_]: Monad, E, I: FileId, S[_]: Comonad, C: Picker]( headerHandler: HeaderHandler[S, C], semantics: Semantics[S, C] ) extends Logging { @@ -38,11 +39,11 @@ class AquaCompiler[F[_]: Monad, E, I: FileId, S[_]: Comonad, C: Monoid: Picker]( // Analyze the body, with prepared initial context _ = logger.trace("semantic processing...") processed <- semantics - .process(body, headerSem.initCtx) + .process(body, headerSem.init) .toCompileRes // Handle exports, declares - finalize the resulting context rc <- headerSem - .finCtx(processed) + .fin(processed) .toCompileRes } yield rc.setImportPaths(importPaths) diff --git a/compiler/src/main/scala/aqua/compiler/CompilerAPI.scala b/compiler/src/main/scala/aqua/compiler/CompilerAPI.scala index 67d4036e..5f7f6177 100644 --- a/compiler/src/main/scala/aqua/compiler/CompilerAPI.scala +++ b/compiler/src/main/scala/aqua/compiler/CompilerAPI.scala @@ -44,24 +44,14 @@ object CompilerAPI extends Logging { private def getAquaCompiler[F[_]: Monad, E, I: FileId, S[_]: Comonad]( config: AquaCompilerConf ): AquaCompiler[F, E, I, S, RawContext] = { - given Monoid[RawContext] = RawContext - .implicits( - RawContext.blank.copy( - parts = Chain - .fromSeq(config.constants ++ ConstantRaw.defaultConstants(config.relayVarName)) - .map(const => RawContext.blank -> const) - ) - ) - .rawContextMonoid - - val semantics = new RawSemantics[S]() - given LocationsAlgebra[S, State[RawContext, *]] = DummyLocationsInterpreter() - new AquaCompiler[F, E, I, S, RawContext]( + val constants = config.constants ++ ConstantRaw.defaultConstants(config.relayVarName) + + new AquaCompiler( new HeaderHandler(), - semantics + new RawSemantics(constants) ) } diff --git a/language-server/language-server-api/src/main/scala/aqua/lsp/LSPCompiler.scala b/language-server/language-server-api/src/main/scala/aqua/lsp/LSPCompiler.scala index aa6d87c5..b0f715b7 100644 --- a/language-server/language-server-api/src/main/scala/aqua/lsp/LSPCompiler.scala +++ b/language-server/language-server-api/src/main/scala/aqua/lsp/LSPCompiler.scala @@ -20,41 +20,14 @@ object LSPCompiler { private def getLspAquaCompiler[F[_]: Monad, E, I: FileId, S[_]: Comonad]( config: AquaCompilerConf ): AquaCompiler[F, E, I, S, LspContext[S]] = { - given Monoid[LspContext[S]] = LspContext - .implicits( - LspContext.blank.copy(raw = - RawContext.blank.copy( - parts = Chain - .fromSeq(config.constants ++ ConstantRaw.defaultConstants(config.relayVarName)) - .map(const => RawContext.blank -> const) - ) - ) - ) - .lspContextMonoid - - given Monoid[HeaderSem[S, LspContext[S]]] with { - override def empty: HeaderSem[S, LspContext[S]] = - HeaderSem(Monoid[LspContext[S]].empty, (c, _) => validNec(c)) - - override def combine( - a: HeaderSem[S, LspContext[S]], - b: HeaderSem[S, LspContext[S]] - ): HeaderSem[S, LspContext[S]] = { - HeaderSem( - a.initCtx |+| b.initCtx, - (c, i) => a.finInitCtx(c, i).andThen(b.finInitCtx(_, i)) - ) - } - } - - val semantics = new LspSemantics[S]() - given LocationsAlgebra[S, State[LspContext[S], *]] = LocationsInterpreter[S, LspContext[S]]() - new AquaCompiler[F, E, I, S, LspContext[S]]( - new HeaderHandler(), - semantics + val constants = config.constants ++ ConstantRaw.defaultConstants(config.relayVarName) + + new AquaCompiler( + headerHandler = new HeaderHandler(), + semantics = new LspSemantics(constants) ) } diff --git a/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala b/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala index b3c0ed70..e531d9f7 100644 --- a/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala +++ b/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala @@ -3,10 +3,10 @@ package aqua.lsp import aqua.parser.lexer.{LiteralToken, NamedTypeToken, Token} import aqua.raw.{RawContext, RawPart} import aqua.semantics.header.Picker +import aqua.semantics.rules.locations.LocationsState import aqua.semantics.rules.locations.{TokenLocation, VariableInfo} import aqua.semantics.{SemanticError, SemanticWarning} import aqua.types.{AbilityType, ArrowType, Type} -import aqua.semantics.rules.locations.LocationsState import cats.syntax.monoid.* import cats.{Monoid, Semigroup} @@ -32,8 +32,10 @@ object LspContext { def blank[S[_]]: LspContext[S] = LspContext[S](raw = RawContext()) - given [S[_]]: Semigroup[LspContext[S]] = - (x: LspContext[S], y: LspContext[S]) => + given [S[_]]: Monoid[LspContext[S]] with { + override def empty = blank[S] + + override def combine(x: LspContext[S], y: LspContext[S]) = LspContext[S]( raw = x.raw |+| y.raw, abDefinitions = x.abDefinitions ++ y.abDefinitions, @@ -45,21 +47,6 @@ object LspContext { warnings = x.warnings ++ y.warnings, importPaths = x.importPaths ++ y.importPaths ) - - trait Implicits[S[_]] { - val lspContextMonoid: Monoid[LspContext[S]] - } - - def implicits[S[_]](init: LspContext[S]): Implicits[S] = new Implicits[S] { - - override val lspContextMonoid: Monoid[LspContext[S]] = new Monoid[LspContext[S]] { - override def empty: LspContext[S] = init - - override def combine(x: LspContext[S], y: LspContext[S]): LspContext[S] = { - Semigroup[LspContext[S]].combine(x, y) - } - } - } given [S[_]]: Picker[LspContext[S]] with { @@ -85,13 +72,11 @@ object LspContext { override def addPart(ctx: LspContext[S], part: (LspContext[S], RawPart)): LspContext[S] = ctx.copy(raw = ctx.raw.addPart(part._1.raw -> part._2)) - override def setInit(ctx: LspContext[S], ctxInit: Option[LspContext[S]]): LspContext[S] = - ctx.copy(raw = ctx.raw.setInit(ctxInit.map(_.raw))) - - override def all(ctx: LspContext[S]): Set[String] = - ctx.raw.all override def module(ctx: LspContext[S]): Option[String] = ctx.raw.module - override def declares(ctx: LspContext[S]): Set[String] = ctx.raw.declares + + override def declaredNames(ctx: LspContext[S]): Set[String] = ctx.raw.declaredNames + + override def allNames(ctx: LspContext[S]): Set[String] = ctx.raw.allNames override def setAbility(ctx: LspContext[S], name: String, ctxAb: LspContext[S]): LspContext[S] = ctx.copy( @@ -103,15 +88,23 @@ object LspContext { ) ) - override def setImportPaths(ctx: LspContext[S], importPaths: Map[String, String]): LspContext[S] = + override def setImportPaths( + ctx: LspContext[S], + importPaths: Map[String, String] + ): LspContext[S] = ctx.copy(importPaths = importPaths) override def setModule( ctx: LspContext[S], - name: Option[String], + name: String + ): LspContext[S] = + ctx.copy(raw = ctx.raw.setModule(name)) + + override def setDeclares( + ctx: LspContext[S], declares: Set[String] ): LspContext[S] = - ctx.copy(raw = ctx.raw.setOptModule(name, declares)) + ctx.copy(raw = ctx.raw.setDeclares(declares)) override def setExports( ctx: LspContext[S], @@ -154,22 +147,20 @@ object LspContext { override def pickHeader(ctx: LspContext[S]): LspContext[S] = ctx.copy(raw = ctx.raw.pickHeader) - override def pickDeclared( - ctx: LspContext[S] - )(using Semigroup[LspContext[S]]): LspContext[S] = + override def pickDeclared(ctx: LspContext[S]): LspContext[S] = ctx.copy(raw = ctx.raw.pickDeclared) } /* NOTE: This instance is used to generate LocationsAlgebra[S, State[LspContext[S], *]] - to reuse the code from the body semantics in the header semantics + to reuse the code from the body semantics in the header semantics */ given [S[_]]: Lens[LspContext[S], LocationsState[S]] = { - val get: LspContext[S] => LocationsState[S] = + val get: LspContext[S] => LocationsState[S] = ctx => LocationsState(ctx.variables) val replace: LocationsState[S] => LspContext[S] => LspContext[S] = locs => ctx => ctx.copy(variables = locs.variables) - + Lens(get)(replace) } } diff --git a/language-server/language-server-api/src/main/scala/aqua/lsp/LspSemantics.scala b/language-server/language-server-api/src/main/scala/aqua/lsp/LspSemantics.scala index 92a0bc5c..f988996c 100644 --- a/language-server/language-server-api/src/main/scala/aqua/lsp/LspSemantics.scala +++ b/language-server/language-server-api/src/main/scala/aqua/lsp/LspSemantics.scala @@ -3,6 +3,8 @@ package aqua.lsp import aqua.parser.Ast import aqua.parser.head.{ImportExpr, ImportFromExpr, UseExpr, UseFromExpr} import aqua.parser.lexer.{LiteralToken, Token} +import aqua.raw.ConstantRaw +import aqua.semantics.header.Picker.* import aqua.semantics.rules.locations.LocationsState import aqua.semantics.{CompilerState, RawSemantics, SemanticError, SemanticWarning, Semantics} @@ -18,7 +20,9 @@ import cats.syntax.reducible.* import monocle.Lens import monocle.macros.GenLens -class LspSemantics[S[_]] extends Semantics[S, LspContext[S]] { +class LspSemantics[S[_]]( + constants: List[ConstantRaw] = Nil +) extends Semantics[S, LspContext[S]] { private def getImportTokens(ast: Ast[S]): List[LiteralToken[S]] = ast.head.collect { @@ -38,11 +42,12 @@ class LspSemantics[S[_]] extends Semantics[S, LspContext[S]] { init: LspContext[S] ): ProcessResult = { - val rawState = CompilerState.init[S](init.raw) + val withConstants = init.addFreeParts(constants) + val rawState = CompilerState.init[S](withConstants.raw) val initState = rawState.copy( locations = rawState.locations.copy( - variables = rawState.locations.variables ++ init.variables + variables = rawState.locations.variables ++ withConstants.variables ) ) @@ -55,7 +60,8 @@ class LspSemantics[S[_]] extends Semantics[S, LspContext[S]] { new LocationsInterpreter[S, CompilerState[S]]() RawSemantics - .interpret(ast, initState, init.raw) + .interpret(ast, withConstants.raw) + .run(initState) .map { case (state, ctx) => LspContext( raw = ctx, diff --git a/model/raw/src/main/scala/aqua/raw/Raw.scala b/model/raw/src/main/scala/aqua/raw/Raw.scala index fdb6084d..f6c5c9d8 100644 --- a/model/raw/src/main/scala/aqua/raw/Raw.scala +++ b/model/raw/src/main/scala/aqua/raw/Raw.scala @@ -1,6 +1,8 @@ package aqua.raw +import aqua.raw.RawPart.contextPart import aqua.raw.ops.{FuncOp, RawTag} + import cats.Semigroup import cats.syntax.semigroup.* @@ -13,25 +15,17 @@ object Raw { case class Empty(log: String) extends Raw - given Semigroup[Raw] with - - import RawPart.RPSMonoid - import RawPart.contextPart + given Semigroup[Raw] with { override def combine(x: Raw, y: Raw): Raw = (x, y) match { - case (l: FuncOp, r: FuncOp) => - FuncOp(l.tree |+| r.tree) + case (l: FuncOp, r: FuncOp) => FuncOp(l.tree |+| r.tree) case (l: Empty, r: Empty) => Empty(l.log + " |+| " + r.log) case (_: Empty, r) => r case (l, _: Empty) => l - case (l, r) => - RPSMonoid.combine( - contextPart(l), - contextPart(r) - ) - + case (l, r) => contextPart(l) |+| contextPart(r) } + } } diff --git a/model/raw/src/main/scala/aqua/raw/RawContext.scala b/model/raw/src/main/scala/aqua/raw/RawContext.scala index 6f4a5f2b..857b6377 100644 --- a/model/raw/src/main/scala/aqua/raw/RawContext.scala +++ b/model/raw/src/main/scala/aqua/raw/RawContext.scala @@ -15,8 +15,6 @@ import scala.collection.immutable.SortedMap /** * RawContext is essentially a model of the source code – the first one we get to from the AST. * - * @param init - * Initial context – collected imports, needed for re-exporting in AquaContext later * @param module * This file's module name * @param declares @@ -29,7 +27,6 @@ import scala.collection.immutable.SortedMap * Abilities (e.g. used contexts) available in the scope */ case class RawContext( - init: Option[RawContext] = None, module: Option[String] = None, declares: Set[String] = Set.empty, exports: Map[String, Option[String]] = Map.empty, @@ -89,16 +86,11 @@ case class RawContext( lazy val allDefinedAbilities: Map[String, AbilityType] = all(_.definedAbilities) - def `type`(name: String): Option[StructType] = - NonEmptyMap - .fromMap( - SortedMap.from( - collectPartsMap { - case rp if declares(rp.name) || module.isEmpty => rp.rawPartType - } - ) - ) - .map(StructType(name, _)) + lazy val allNames: Set[String] = + parts.map { case (_, p) => p.name }.toList.toSet + + lazy val declaredNames: Set[String] = + allNames.filter(declares.contains) override def toString: String = s"""|module: ${module.getOrElse("unnamed")} @@ -113,29 +105,17 @@ case class RawContext( object RawContext { val blank: RawContext = RawContext() - given Semigroup[RawContext] = - (x: RawContext, y: RawContext) => + given Monoid[RawContext] with { + + override def empty: RawContext = blank + + override def combine(x: RawContext, y: RawContext) = RawContext( - x.init.flatMap(xi => y.init.map(xi |+| _)) orElse x.init orElse y.init, x.module orElse y.module, x.declares ++ y.declares, x.exports ++ y.exports, x.parts ++ y.parts, x.abilities ++ y.abilities ) - - trait Implicits { - val rawContextMonoid: Monoid[RawContext] - } - - def implicits(init: RawContext): Implicits = new Implicits { - - override val rawContextMonoid: Monoid[RawContext] = new Monoid[RawContext] { - override def empty: RawContext = init - - override def combine(x: RawContext, y: RawContext): RawContext = - Semigroup[RawContext].combine(x, y) - } - } } diff --git a/model/raw/src/main/scala/aqua/raw/RawPart.scala b/model/raw/src/main/scala/aqua/raw/RawPart.scala index a072a057..5505eb7f 100644 --- a/model/raw/src/main/scala/aqua/raw/RawPart.scala +++ b/model/raw/src/main/scala/aqua/raw/RawPart.scala @@ -1,8 +1,9 @@ package aqua.raw +import aqua.types.Type + import cats.Monoid import cats.data.Chain -import aqua.types.Type trait RawPart extends Raw { def name: String @@ -16,13 +17,11 @@ object RawPart { case class Parts(parts: Chain[RawPart]) extends Raw - implicit object RPSMonoid extends Monoid[Parts] { + given Monoid[Parts] with { override def empty: Parts = Parts(Chain.empty) override def combine(x: Parts, y: Parts): Parts = - Parts( - x.parts ++ y.parts - ) + Parts(x.parts ++ y.parts) } def contextPart(raw: Raw): Parts = raw match { diff --git a/model/src/main/scala/aqua/model/AquaContext.scala b/model/src/main/scala/aqua/model/AquaContext.scala index 9be94379..84ccb107 100644 --- a/model/src/main/scala/aqua/model/AquaContext.scala +++ b/model/src/main/scala/aqua/model/AquaContext.scala @@ -184,19 +184,15 @@ object AquaContext extends Logging { .foldLeft[(AquaContext, Cache)] { // Laziness unefficiency happens here logger.trace(s"raw: ${rawContext.module}") - val (i, c) = - rawContext.init - .map(fromRawContext(_, cache)) - .getOrElse(blank -> cache) val (abs, absCache) = - rawContext.abilities.foldLeft[(Map[String, AquaContext], Cache)]((Map.empty, c)) { + rawContext.abilities.foldLeft[(Map[String, AquaContext], Cache)]((Map.empty, cache)) { case ((acc, cAcc), (k, v)) => val (abCtx, abCache) = fromRawContext(v, cAcc) (acc + (k -> abCtx), abCache) } - (i |+| blank.copy(abilities = abs)) -> absCache + blank.copy(abilities = abs) -> absCache } { case ((ctx, ctxCache), (partContext, c: ConstantRaw)) => logger.trace("Adding constant " + c.name) diff --git a/semantics/src/main/scala/aqua/semantics/CompilerState.scala b/semantics/src/main/scala/aqua/semantics/CompilerState.scala index ef7783d5..df913305 100644 --- a/semantics/src/main/scala/aqua/semantics/CompilerState.scala +++ b/semantics/src/main/scala/aqua/semantics/CompilerState.scala @@ -2,8 +2,7 @@ package aqua.semantics import aqua.mangler.ManglerState import aqua.parser.lexer.Token -import aqua.raw.Raw -import aqua.raw.RawContext +import aqua.raw.{Raw, RawContext} import aqua.semantics.rules.abilities.AbilitiesState import aqua.semantics.rules.definitions.DefinitionsState import aqua.semantics.rules.locations.LocationsState @@ -33,7 +32,6 @@ case class CompilerState[S[_]]( } object CompilerState { - type St[S[_]] = State[CompilerState[S], Raw] def init[F[_]](ctx: RawContext): CompilerState[F] = CompilerState( @@ -60,28 +58,4 @@ object CompilerState { given [S[_]]: Lens[CompilerState[S], DefinitionsState[S]] = GenLens[CompilerState[S]](_.definitions) - given [S[_]]: Monoid[St[S]] with { - override def empty: St[S] = State.pure(Raw.Empty("compiler state monoid empty")) - - override def combine(x: St[S], y: St[S]): St[S] = for { - a <- x.get - b <- y.get - _ <- State.set( - CompilerState[S]( - a.report |+| b.report, - a.mangler |+| b.mangler, - a.names |+| b.names, - a.abilities |+| b.abilities, - a.types |+| b.types, - locations = a.locations |+| b.locations - ) - ) - am <- x - ym <- y - } yield { - // println(s"MONOID COMBINE $am $ym") - am |+| ym - } - } - } diff --git a/semantics/src/main/scala/aqua/semantics/RawSemantics.scala b/semantics/src/main/scala/aqua/semantics/RawSemantics.scala index 922823db..8787f570 100644 --- a/semantics/src/main/scala/aqua/semantics/RawSemantics.scala +++ b/semantics/src/main/scala/aqua/semantics/RawSemantics.scala @@ -4,7 +4,7 @@ import aqua.errors.Errors.internalError import aqua.parser.lexer.{LiteralToken, Token} import aqua.parser.{Ast, Expr} import aqua.raw.ops.* -import aqua.raw.{Raw, RawContext, RawPart} +import aqua.raw.{ConstantRaw, Raw, RawContext, RawPart} import aqua.semantics.header.Picker import aqua.semantics.header.Picker.* import aqua.semantics.rules.abilities.{AbilitiesAlgebra, AbilitiesInterpreter, AbilitiesState} @@ -28,8 +28,8 @@ import cats.syntax.traverse.* import cats.{Eval, Monad} import scribe.Logging -class RawSemantics[S[_]](using - Picker[RawContext] +class RawSemantics[S[_]]( + constants: List[ConstantRaw] = Nil ) extends Semantics[S, RawContext] { override def process( @@ -40,8 +40,11 @@ class RawSemantics[S[_]](using given LocationsAlgebra[S, State[CompilerState[S], *]] = new DummyLocationsInterpreter[S, CompilerState[S]]() + val withConstants = init.addFreeParts(constants) + RawSemantics - .interpret(ast, CompilerState.init(init), init) + .interpret(ast, withConstants) + .run(CompilerState.init(withConstants)) .map { case (state, ctx) => EitherT( Writer @@ -315,48 +318,22 @@ object RawSemantics extends Logging { .map(_.raw) } - private def astToState[S[_]](ast: Ast[S])(using - locations: LocationsAlgebra[S, Interpreter[S, *]] - ): Interpreter[S, Raw] = - transpile[S](ast) - // If there are any errors, they're inside CompilerState[S] def interpret[S[_]]( ast: Ast[S], - initState: CompilerState[S], init: RawContext )(using LocationsAlgebra[S, Interpreter[S, *]] - ): Eval[(CompilerState[S], RawContext)] = - astToState[S](ast) - .run(initState) - .map { - case (state, _: Raw.Empty) => - // No `parts`, but has `init` - ( - state, - RawContext.blank.copy( - init = Some(init.copy(module = init.module.map(_ + "|init"))) - .filter(_ != RawContext.blank) - ) - ) + ): Interpreter[S, RawContext] = + transpile(ast).map { + case raw: (Raw.Empty | RawPart | RawPart.Parts) => + val parts = raw match { + case rps: RawPart.Parts => rps.parts.toList + case rp: RawPart => List(rp) + case _: Raw.Empty => List.empty + } - case (state, part: (RawPart | RawPart.Parts)) => - state -> RawPart - .contextPart(part) - .parts - .foldLeft( - RawContext.blank.copy( - init = Some(init.copy(module = init.module.map(_ + "|init"))) - .filter(_ != RawContext.blank) - ) - ) { case (ctx, p) => - ctx.copy(parts = ctx.parts :+ (ctx -> p)) - } - - case (_, m) => - internalError( - s"Unexpected Raw ($m)" - ) - } + init.addParts(parts) + case m => internalError(s"Unexpected Raw ($m)") + } } diff --git a/semantics/src/main/scala/aqua/semantics/header/ExportSem.scala b/semantics/src/main/scala/aqua/semantics/header/ExportSem.scala index 2684f1bc..a5e44fed 100644 --- a/semantics/src/main/scala/aqua/semantics/header/ExportSem.scala +++ b/semantics/src/main/scala/aqua/semantics/header/ExportSem.scala @@ -21,7 +21,6 @@ import cats.syntax.validated.* import cats.{Comonad, Monoid} class ExportSem[S[_]: Comonad, C](expr: ExportExpr[S])(using - acm: Monoid[C], picker: Picker[C], locations: LocationsAlgebra[S, State[C, *]] ) { @@ -56,7 +55,7 @@ class ExportSem[S[_]: Comonad, C](expr: ExportExpr[S])(using ).validNec } - private def finSem(ctx: C, initCtx: C): ValidatedNec[SemanticError[S], C] = { + private def finSem(ctx: C): ValidatedNec[SemanticError[S], C] = { val pubs = expr.pubs .map( _.bimap( @@ -65,23 +64,21 @@ class ExportSem[S[_]: Comonad, C](expr: ExportExpr[S])(using ).merge ) - val tokens = pubs.toList.flatMap { - case ((token, name), (renameToken, _)) => - renameToken.map(name -> _).toList :+ (name, token) + val tokens = pubs.toList.flatMap { case ((token, name), (renameToken, _)) => + renameToken.map(name -> _).toList :+ (name, token) } - val ctxWithExportLocations = ctx.addOccurences(tokens) - val sumCtx = initCtx |+| ctxWithExportLocations + val resCtx = ctx.addOccurences(tokens) pubs.map { case ((token, name), (_, rename)) => - sumCtx + resCtx .pick(name, rename, declared = false) .as(Map(name -> rename)) .toValid( error( token, s"Files has no $name declaration or import, " + - s"cannot export, available functions: ${sumCtx.funcNames.mkString(", ")}" + s"cannot export, available functions: ${resCtx.funcNames.mkString(", ")}" ) ) .ensure( @@ -89,11 +86,11 @@ class ExportSem[S[_]: Comonad, C](expr: ExportExpr[S])(using token, s"Can not export '$name' as it is an ability" ) - )(_ => !sumCtx.isAbility(name)) - .toValidatedNec <* exportFuncChecks(sumCtx, token, name) + )(_ => !resCtx.isAbility(name)) + .toValidatedNec <* exportFuncChecks(resCtx, token, name) } - .prepend(validNec(sumCtx.exports)) + .prepend(validNec(resCtx.exports)) .combineAll - .map(sumCtx.setExports) + .map(resCtx.setExports) } } diff --git a/semantics/src/main/scala/aqua/semantics/header/HeaderHandler.scala b/semantics/src/main/scala/aqua/semantics/header/HeaderHandler.scala index 21e74076..832d66e5 100644 --- a/semantics/src/main/scala/aqua/semantics/header/HeaderHandler.scala +++ b/semantics/src/main/scala/aqua/semantics/header/HeaderHandler.scala @@ -4,8 +4,8 @@ import aqua.parser.Ast import aqua.parser.head.* import aqua.parser.lexer.{Ability, Token} import aqua.semantics.header.Picker.* -import aqua.semantics.{HeaderError, SemanticError} import aqua.semantics.rules.locations.LocationsAlgebra +import aqua.semantics.{HeaderError, SemanticError} import cats.data.* import cats.data.Validated.* @@ -20,7 +20,7 @@ class HeaderHandler[S[_]: Comonad, C](using acm: Monoid[C], headMonoid: Monoid[HeaderSem[S, C]], picker: Picker[C], - // NOTE: This typeclass is here to reuse + // NOTE: This typeclass is here to reuse // the code from the body semantics locations: LocationsAlgebra[S, State[C, *]] ) { @@ -56,7 +56,7 @@ class HeaderHandler[S[_]: Comonad, C](using .toValidNec( error( token, - s"Imported file `declares ${ctx.declares.mkString(", ")}`, no $name declared. Try adding `declares $name` to that file." + s"Imported file `declares ${ctx.declaredNames.mkString(", ")}`, no $name declared. Try adding `declares $name` to that file." ) ) } @@ -87,38 +87,32 @@ class HeaderHandler[S[_]: Comonad, C](using case f @ ImportExpr(_) => // Import everything from a file - resolve(f).map(fc => HeaderSem(fc, (c, _) => validNec(c))) + resolve(f).map(HeaderSem.fromInit) case f @ ImportFromExpr(_, _) => // Import, map declarations resolve(f) .andThen(getFrom(f, _)) - .map { ctx => - HeaderSem(ctx, (c, _) => validNec(c)) - } + .map(HeaderSem.fromInit) case f @ UseExpr(_, asModule) => // Import, move into a module scope resolve(f) .andThen(toModule(_, f.token, asModule)) - .map { fc => - HeaderSem(fc, (c, _) => validNec(c)) - } + .map(HeaderSem.fromInit) case f @ UseFromExpr(_, _, asModule) => // Import, cherry-pick declarations, move to a module scope resolve(f) .andThen(getFrom(f, _)) .andThen(toModule(_, f.token, Some(asModule))) - .map { fc => - HeaderSem(fc, (c, _) => validNec(c)) - } + .map(HeaderSem.fromInit) case ee: ExportExpr[S] => ExportSem(ee).headerSem case f: FilenameExpr[S] => - resolve(f).map(fc => HeaderSem(fc, (c, _) => validNec(c))) + resolve(f).map(HeaderSem.fromInit) } val (module, other) = diff --git a/semantics/src/main/scala/aqua/semantics/header/HeaderSem.scala b/semantics/src/main/scala/aqua/semantics/header/HeaderSem.scala index 1343b45a..f5a1217d 100644 --- a/semantics/src/main/scala/aqua/semantics/header/HeaderSem.scala +++ b/semantics/src/main/scala/aqua/semantics/header/HeaderSem.scala @@ -2,34 +2,44 @@ package aqua.semantics.header import aqua.raw.RawContext import aqua.semantics.SemanticError -import cats.{Comonad, Monoid} + import cats.data.* -import cats.syntax.monoid.* import cats.data.Validated.validNec +import cats.syntax.monoid.* +import cats.syntax.validated.* +import cats.{Comonad, Monoid} +/** + * Semantics for handling a header expression + * (e.g. `aqua Name declares *`, `export`, `use` etc.) + * + * @param init Initial context that will be combined with others and passed to body semantics + * @param fin Finalization function to which context after body semantics will be passed + */ case class HeaderSem[S[_], C]( - initCtx: C, - finInitCtx: (C, C) => ValidatedNec[SemanticError[S], C] -) { - - def finCtx: C => ValidatedNec[SemanticError[S], C] = - finInitCtx(_, initCtx) -} + init: C, + fin: C => ValidatedNec[SemanticError[S], C] +) object HeaderSem { - given [S[_]: Comonad](using - rc: Monoid[RawContext] - ): Monoid[HeaderSem[S, RawContext]] with { - override def empty: HeaderSem[S, RawContext] = HeaderSem(rc.empty, (c, _) => validNec(c)) + def fromInit[S[_], C](init: C): HeaderSem[S, C] = + HeaderSem(init, c => c.validNec) + + given [S[_]: Comonad, C](using + rc: Monoid[C] + ): Monoid[HeaderSem[S, C]] with { + + override def empty: HeaderSem[S, C] = + HeaderSem.fromInit(rc.empty) override def combine( - a: HeaderSem[S, RawContext], - b: HeaderSem[S, RawContext] - ): HeaderSem[S, RawContext] = + a: HeaderSem[S, C], + b: HeaderSem[S, C] + ): HeaderSem[S, C] = HeaderSem( - a.initCtx |+| b.initCtx, - (c, i) => a.finInitCtx(c, i).andThen(b.finInitCtx(_, i)) + a.init |+| b.init, + c => a.fin(c) |+| b.fin(c) ) } } diff --git a/semantics/src/main/scala/aqua/semantics/header/ModuleSem.scala b/semantics/src/main/scala/aqua/semantics/header/ModuleSem.scala index 2bb0fb82..d7756a94 100644 --- a/semantics/src/main/scala/aqua/semantics/header/ModuleSem.scala +++ b/semantics/src/main/scala/aqua/semantics/header/ModuleSem.scala @@ -16,46 +16,37 @@ import cats.syntax.validated.* import cats.{Comonad, Monoid} class ModuleSem[S[_]: Comonad, C: Picker](expr: ModuleExpr[S])(using - acm: Monoid[C], locations: LocationsAlgebra[S, State[C, *]] ) { import expr.* def headerSem: Res[S, C] = { - val shouldDeclare = declareNames.map(_.value).toSet ++ declareCustom.map(_.value) - lazy val sem = HeaderSem( // Save module header info - acm.empty.setModule( - name.value, - shouldDeclare - ), - (ctx, initCtx) => - val sumCtx = ctx |+| initCtx + Picker[C].blank.setModule(name.value), + ctx => // When file is handled, check that all the declarations exists - if (declareAll.nonEmpty) - val allDeclared = ctx.all ++ initCtx.all - sumCtx.setModule(name.value, declares = allDeclared).validNec + if (declareAll.nonEmpty) ctx.setDeclares(ctx.allNames).validNec else { + val declares = declareNames.fproductLeft(_.value) ::: declareCustom.fproductLeft(_.value) + val names = declares.map { case (name, _) => name }.toSet + val res = ctx.setDeclares(names).addOccurences(declares) + // summarize contexts to allow redeclaration of imports - ( - declareNames.fproductLeft(_.value) ::: declareCustom.fproductLeft(_.value) - ).map { case (n, t) => - sumCtx - .pick(n, None, sumCtx.module.nonEmpty) + declares.map { case (n, t) => + res + .pick(n, None, ctx.module.nonEmpty) .toValidNec( error( t, s"`$n` is expected to be declared, but declaration is not found in the file" ) - ).void - }.combineAll.as { - val tokens = declareNames.map(n => n.value -> n) ++ declareCustom.map(a => a.value -> a) - val ctxWithDeclaresLoc = sumCtx.addOccurences(tokens) - // TODO: why module name and declares is lost? where is it lost? - ctxWithDeclaresLoc.setModule(name.value, declares = shouldDeclare) - } + ) + .void + // TODO: Should not it be possible to make `.combineAll` the final result? + // Seems like `.pick` does not return much information + }.combineAll.as(res) } ) diff --git a/semantics/src/main/scala/aqua/semantics/header/Picker.scala b/semantics/src/main/scala/aqua/semantics/header/Picker.scala index 4444206a..79305af0 100644 --- a/semantics/src/main/scala/aqua/semantics/header/Picker.scala +++ b/semantics/src/main/scala/aqua/semantics/header/Picker.scala @@ -2,30 +2,31 @@ package aqua.semantics.header import aqua.raw.{RawContext, RawPart} import aqua.types.{AbilityType, ArrowType, Type} + import cats.Semigroup +import cats.syntax.foldable.* import cats.syntax.semigroup.* // Able to pick info from different contexts trait Picker[A] { - - def all(ctx: A): Set[String] def funcNames(ctx: A): Set[String] def definedAbilityNames(ctx: A): Set[String] def blank: A def pick(ctx: A, name: String, rename: Option[String], declared: Boolean): Option[A] - def pickDeclared(ctx: A)(implicit semi: Semigroup[A]): A + def pickDeclared(ctx: A): A def pickHeader(ctx: A): A def module(ctx: A): Option[String] + def allNames(ctx: A): Set[String] + def declaredNames(ctx: A): Set[String] def exports(ctx: A): Map[String, Option[String]] def isAbility(ctx: A, name: String): Boolean def funcReturnAbilityOrArrow(ctx: A, name: String): Boolean def funcAcceptAbility(ctx: A, name: String): Boolean - def declares(ctx: A): Set[String] def setAbility(ctx: A, name: String, ctxAb: A): A def setImportPaths(ctx: A, importPaths: Map[String, String]): A - def setModule(ctx: A, name: Option[String], declares: Set[String]): A + def setModule(ctx: A, name: String): A + def setDeclares(ctx: A, declares: Set[String]): A def setExports(ctx: A, exports: Map[String, Option[String]]): A - def setInit(ctx: A, ctxInit: Option[A]): A def addPart(ctx: A, part: (A, RawPart)): A } @@ -34,15 +35,17 @@ object Picker { extension [A: Picker](p: A) { def blank: A = Picker[A].blank - def all: Set[String] = Picker[A].all(p) + def funcNames: Set[String] = Picker[A].funcNames(p) def definedAbilityNames: Set[String] = Picker[A].definedAbilityNames(p) def pick(name: String, rename: Option[String], declared: Boolean): Option[A] = Picker[A].pick(p, name, rename, declared) - def pickDeclared(implicit semi: Semigroup[A]): A = Picker[A].pickDeclared(p) + def pickDeclared: A = Picker[A].pickDeclared(p) def pickHeader: A = Picker[A].pickHeader(p) def module: Option[String] = Picker[A].module(p) + def declaredNames: Set[String] = Picker[A].declaredNames(p) + def allNames: Set[String] = Picker[A].allNames(p) def exports: Map[String, Option[String]] = Picker[A].exports(p) def isAbility(name: String): Boolean = Picker[A].isAbility(p, name) @@ -50,17 +53,23 @@ object Picker { def funcReturnAbilityOrArrow(name: String): Boolean = Picker[A].funcReturnAbilityOrArrow(p, name) def funcAcceptAbility(name: String): Boolean = Picker[A].funcAcceptAbility(p, name) - def declares: Set[String] = Picker[A].declares(p) def setAbility(name: String, ctx: A): A = Picker[A].setAbility(p, name, ctx) - def setImportPaths(importPaths: Map[String, String]): A = Picker[A].setImportPaths(p, importPaths) - def setInit(ctx: Option[A]): A = Picker[A].setInit(p, ctx) + + def setImportPaths(importPaths: Map[String, String]): A = + Picker[A].setImportPaths(p, importPaths) def addPart(part: (A, RawPart)): A = Picker[A].addPart(p, part) - def setModule(name: String, declares: Set[String]): A = - Picker[A].setModule(p, Some(name), declares) + def addParts(parts: List[RawPart]): A = + parts.foldLeft(p) { case (ctx, part) => ctx.addPart(ctx -> part) } - def setOptModule(name: Option[String], declares: Set[String]): A = - Picker[A].setModule(p, name, declares) + def addFreeParts(parts: List[RawPart]): A = + parts.foldLeft(p) { case (ctx, part) => ctx.addPart(blank -> part) } + + def setModule(name: String): A = + Picker[A].setModule(p, name) + + def setDeclares(declares: Set[String]): A = + Picker[A].setDeclares(p, declares) def setExports(exports: Map[String, Option[String]]): A = Picker[A].setExports(p, exports) @@ -108,13 +117,11 @@ object Picker { override def addPart(ctx: RawContext, part: (RawContext, RawPart)): RawContext = ctx.copy(parts = ctx.parts :+ part) - override def setInit(ctx: RawContext, ctxInit: Option[RawContext]): RawContext = - ctx.copy(init = ctxInit) - - override def all(ctx: RawContext): Set[String] = - ctx.`type`("").map(_.fields.toNel.map(_._1).toList.toSet).getOrElse(Set.empty) override def module(ctx: RawContext): Option[String] = ctx.module - override def declares(ctx: RawContext): Set[String] = ctx.declares + + override def declaredNames(ctx: RawContext): Set[String] = ctx.declaredNames + + override def allNames(ctx: RawContext): Set[String] = ctx.allNames override def setAbility(ctx: RawContext, name: String, ctxAb: RawContext): RawContext = ctx.copy(abilities = Map(name -> ctxAb)) @@ -123,12 +130,11 @@ object Picker { override def setImportPaths(ctx: RawContext, importPaths: Map[String, String]): RawContext = ctx - override def setModule( - ctx: RawContext, - name: Option[String], - declares: Set[String] - ): RawContext = - ctx.copy(module = name, declares = declares) + override def setModule(ctx: RawContext, name: String): RawContext = + ctx.copy(module = Some(name)) + + override def setDeclares(ctx: RawContext, declares: Set[String]): RawContext = + ctx.copy(declares = declares) override def setExports(ctx: RawContext, exports: Map[String, Option[String]]): RawContext = ctx.copy(exports = exports) @@ -151,14 +157,12 @@ object Picker { override def pickHeader(ctx: RawContext): RawContext = RawContext.blank.copy(module = ctx.module, declares = ctx.declares, exports = ctx.exports) - override def pickDeclared(ctx: RawContext)(implicit semi: Semigroup[RawContext]): RawContext = + override def pickDeclared(ctx: RawContext): RawContext = if (ctx.module.isEmpty) ctx else ctx.declares.toList .flatMap(n => pick(ctx, n, None, ctx.module.nonEmpty)) - .foldLeft(pickHeader(ctx))( - _ |+| _ - ) + .foldLeft(pickHeader(ctx))(_ |+| _) } } diff --git a/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesState.scala b/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesState.scala index 37d04654..01dbfe0a 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesState.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesState.scala @@ -1,15 +1,15 @@ package aqua.semantics.rules.abilities -import aqua.raw.{RawContext, ServiceRaw} -import aqua.raw.value.ValueRaw +import aqua.parser.lexer.Token.name import aqua.parser.lexer.{Name, NamedTypeToken, Token, ValueToken} +import aqua.raw.value.ValueRaw +import aqua.raw.{RawContext, ServiceRaw} import aqua.types.ArrowType import cats.Monoid +import cats.data.NonEmptyList import cats.syntax.foldable.* import cats.syntax.functor.* -import cats.data.NonEmptyList -import aqua.parser.lexer.Token.name case class AbilitiesState[S[_]]( stack: List[AbilitiesState.Frame[S]] = Nil, @@ -60,19 +60,6 @@ object AbilitiesState { ) } - given [S[_]]: Monoid[AbilitiesState[S]] with { - override def empty: AbilitiesState[S] = AbilitiesState() - - override def combine(x: AbilitiesState[S], y: AbilitiesState[S]): AbilitiesState[S] = - AbilitiesState( - Nil, - x.services ++ y.services, - x.abilities ++ y.abilities, - x.rootServiceIds ++ y.rootServiceIds, - x.definitions ++ y.definitions - ) - } - def init[S[_]](context: RawContext): AbilitiesState[S] = AbilitiesState( services = context.allServices.keySet, diff --git a/semantics/src/main/scala/aqua/semantics/rules/names/NamesState.scala b/semantics/src/main/scala/aqua/semantics/rules/names/NamesState.scala index 6fc3e521..1875967a 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/names/NamesState.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/names/NamesState.scala @@ -49,18 +49,6 @@ object NamesState { copy[S](arrows = arrows.updated(n.value, at)) } - implicit def namesStateMonoid[S[_]]: Monoid[NamesState[S]] = new Monoid[NamesState[S]] { - override def empty: NamesState[S] = NamesState[S]() - - override def combine(x: NamesState[S], y: NamesState[S]): NamesState[S] = - NamesState( - stack = Nil, - rootArrows = x.rootArrows ++ y.rootArrows, - definitions = x.definitions ++ y.definitions, - constants = x.constants ++ y.constants - ) - } - def init[S[_]](context: RawContext): NamesState[S] = NamesState( rootArrows = context.allFuncs.map { case (s, fc) => diff --git a/semantics/src/main/scala/aqua/semantics/rules/report/ReportState.scala b/semantics/src/main/scala/aqua/semantics/rules/report/ReportState.scala index a56d9075..e52b8299 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/report/ReportState.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/report/ReportState.scala @@ -17,16 +17,3 @@ final case class ReportState[S[_]]( def reportWarning(token: Token[S], hints: List[String]): ReportState[S] = copy(warnings = warnings.append(SemanticWarning(token, hints))) } - -object ReportState { - - given [S[_]]: Monoid[ReportState[S]] with { - override val empty: ReportState[S] = ReportState() - - override def combine(x: ReportState[S], y: ReportState[S]): ReportState[S] = - ReportState( - errors = x.errors ++ y.errors, - warnings = x.warnings ++ y.warnings - ) - } -} diff --git a/semantics/src/main/scala/aqua/semantics/rules/types/TypesState.scala b/semantics/src/main/scala/aqua/semantics/rules/types/TypesState.scala index 418116b1..4ab1d359 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/types/TypesState.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/types/TypesState.scala @@ -36,15 +36,6 @@ object TypesState { retVals: Option[List[ValueRaw]] ) - given [S[_]]: Monoid[TypesState[S]] with { - override def empty: TypesState[S] = TypesState() - - override def combine(x: TypesState[S], y: TypesState[S]): TypesState[S] = - TypesState( - strict = x.strict ++ y.strict, - ) - } - def init[S[_]](context: RawContext): TypesState[S] = TypesState(strict = context.allTypes) } diff --git a/semantics/src/test/scala/aqua/semantics/HeaderSpec.scala b/semantics/src/test/scala/aqua/semantics/HeaderSpec.scala index 4228adf0..85bdea73 100644 --- a/semantics/src/test/scala/aqua/semantics/HeaderSpec.scala +++ b/semantics/src/test/scala/aqua/semantics/HeaderSpec.scala @@ -9,10 +9,10 @@ import aqua.raw.arrow.{ArrowRaw, FuncRaw} import aqua.raw.ops.RawTag import aqua.raw.value.VarRaw import aqua.semantics.header.{HeaderHandler, HeaderSem} +import aqua.semantics.rules.locations.{DummyLocationsInterpreter, LocationsAlgebra} import aqua.types.{AbilityType, ArrowType, NilType, ProductType, ScalarType} -import aqua.semantics.rules.locations.{LocationsAlgebra, DummyLocationsInterpreter} -import cats.data.{State, Chain, NonEmptyList, NonEmptyMap, Validated} +import cats.data.{Chain, NonEmptyList, NonEmptyMap, State, Validated} import cats.free.Cofree import cats.syntax.applicative.* import cats.{Eval, Id, Monoid} @@ -22,9 +22,7 @@ import org.scalatest.matchers.should.Matchers class HeaderSpec extends AnyFlatSpec with Matchers with Inside { - given Monoid[RawContext] = RawContext.implicits(RawContext.blank).rawContextMonoid - - given LocationsAlgebra[Id, State[RawContext, *]] = + given LocationsAlgebra[Id, State[RawContext, *]] = DummyLocationsInterpreter[Id, RawContext]() val handler = new HeaderHandler[Id, RawContext]() @@ -73,7 +71,7 @@ class HeaderSpec extends AnyFlatSpec with Matchers with Inside { val initCtx = funcCtx(funcName, arrowType) - val result = handler.sem(Map.empty, ast).andThen(_.finCtx(initCtx)) + val result = handler.sem(Map.empty, ast).andThen(_.fin(initCtx)) inside(result) { case Validated.Invalid(errors) => atLeast(1, errors.toChain.toList) shouldBe a[HeaderError[Id]] @@ -89,7 +87,7 @@ class HeaderSpec extends AnyFlatSpec with Matchers with Inside { val initCtx = funcCtx(funcName, arrowType) - val result = handler.sem(Map.empty, ast).andThen(_.finCtx(initCtx)) + val result = handler.sem(Map.empty, ast).andThen(_.fin(initCtx)) inside(result) { case Validated.Invalid(errors) => atLeast(1, errors.toChain.toList) shouldBe a[HeaderError[Id]] diff --git a/utils/mangler/src/main/scala/aqua/mangler/ManglerState.scala b/utils/mangler/src/main/scala/aqua/mangler/ManglerState.scala index 1b0f17cd..689c56c6 100644 --- a/utils/mangler/src/main/scala/aqua/mangler/ManglerState.scala +++ b/utils/mangler/src/main/scala/aqua/mangler/ManglerState.scala @@ -9,32 +9,31 @@ case class ManglerState(namesNumbers: Map[String, Int] = Map.empty) { // find unique names that have not yet been used def findNewNames(introduce: Set[String]): (ManglerState, Map[String, String]) = { - introduce.foldLeft(this, Map.empty[String, String]) { - case ((state, newNames), name) => - val namesNumbers = state.namesNumbers - if (!namesNumbers.contains(name)) { - val newState = state.copy( - namesNumbers = namesNumbers - .updated(name, 0) - ) + introduce.foldLeft(this, Map.empty[String, String]) { case ((state, newNames), name) => + val namesNumbers = state.namesNumbers + if (!namesNumbers.contains(name)) { + val newState = state.copy( + namesNumbers = namesNumbers + .updated(name, 0) + ) - (newState, newNames) - } else { - val (newNumber, newName) = LazyList - .from(namesNumbers.getOrElse(name, 0)) - .map(n => n -> genName(name, n)) - .dropWhile { case (_, newName) => - namesNumbers.contains(newName) - } - .head - val newState = copy( - namesNumbers = namesNumbers - .updated(name, newNumber + 1) - .updated(newName, 0) - ) + (newState, newNames) + } else { + val (newNumber, newName) = LazyList + .from(namesNumbers.getOrElse(name, 0)) + .map(n => n -> genName(name, n)) + .dropWhile { case (_, newName) => + namesNumbers.contains(newName) + } + .head + val newState = copy( + namesNumbers = namesNumbers + .updated(name, newNumber + 1) + .updated(newName, 0) + ) - (newState, newNames + (name -> newName)) - } + (newState, newNames + (name -> newName)) + } } } @@ -51,13 +50,3 @@ case class ManglerState(namesNumbers: Map[String, Int] = Map.empty) { (newState, newNames(name)) } } - -object ManglerState { - - given Monoid[ManglerState] with { - override val empty: ManglerState = ManglerState() - - override def combine(x: ManglerState, y: ManglerState): ManglerState = - ManglerState(namesNumbers = x.namesNumbers ++ y.namesNumbers) - } -}