diff --git a/.gitignore b/.gitignore index d52c9365..0ed698b6 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,8 @@ .idea .bsp +.metals +.vscode +.bloop +metals.sbt target project/target \ No newline at end of file diff --git a/.scalafmt.conf b/.scalafmt.conf index 04974b20..ecd39b1d 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -58,4 +58,5 @@ rewrite { rules = [ SortImports ] -} \ No newline at end of file +} +#runner.dialect = scala3 diff --git a/README.md b/README.md index b9559642..17cb757d 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Please refer to [Aqua Book](https://doc.fluence.dev/aqua-book/) to learn how to ## Compiler CLI -To build the Aqua compiler, clone the repo & run `sbt assembly`, +To build the Aqua compiler, clone the repo & run `sbt cli/assembly`, or simply download the latest JAR file from the [releases](https://github.com/fluencelabs/aqua/releases) page. It requires `java` to run Aqua compiler from the command line: diff --git a/build.sbt b/build.sbt index 71eee15e..85996637 100644 --- a/build.sbt +++ b/build.sbt @@ -17,7 +17,7 @@ val declineV = "2.1.0" name := "aqua-hll" val commons = Seq( - baseAquaVersion := "0.1.13", + baseAquaVersion := "0.1.14", version := baseAquaVersion.value + "-" + sys.env.getOrElse("BUILD_NUMBER", "SNAPSHOT"), scalaVersion := dottyVersion, libraryDependencies ++= Seq( diff --git a/cli/src/main/scala/aqua/ErrorRendering.scala b/cli/src/main/scala/aqua/ErrorRendering.scala index 58233edd..3089a5f6 100644 --- a/cli/src/main/scala/aqua/ErrorRendering.scala +++ b/cli/src/main/scala/aqua/ErrorRendering.scala @@ -5,7 +5,7 @@ import aqua.files.FileModuleId import aqua.io.AquaFileError import aqua.parser.lift.FileSpan import aqua.parser.{BlockIndentError, FuncReturnError, LexerError} -import aqua.semantics.{RulesViolated, WrongAST} +import aqua.semantics.{HeaderError, RulesViolated, WrongAST} import cats.Show object ErrorRendering { @@ -61,6 +61,11 @@ object ErrorRendering { .focus(2) .map(_.toConsoleStr(message, Console.CYAN)) .getOrElse("(Dup error, but offset is beyond the script)") + "\n" + case HeaderError(token, message) => + token.unit._1 + .focus(2) + .map(_.toConsoleStr(message, Console.CYAN)) + .getOrElse("(Dup error, but offset is beyond the script)") + "\n" case WrongAST(ast) => s"Semantic error" diff --git a/cli/src/main/scala/aqua/files/FileModuleId.scala b/cli/src/main/scala/aqua/files/FileModuleId.scala index b20c0c13..8947e01c 100644 --- a/cli/src/main/scala/aqua/files/FileModuleId.scala +++ b/cli/src/main/scala/aqua/files/FileModuleId.scala @@ -1,11 +1,18 @@ package aqua.files import fs2.io.file.Path +import cats.Order case class FileModuleId private (file: Path) object FileModuleId { + implicit object FileModuleIdOrder extends Order[FileModuleId] { + + override def compare(x: FileModuleId, y: FileModuleId): Int = + x.file.toString.compareTo(y.file.toString) + } + def apply(file: Path): FileModuleId = new FileModuleId(file.absolute.normalize) } diff --git a/compiler/src/main/scala/aqua/compiler/AquaCompiler.scala b/compiler/src/main/scala/aqua/compiler/AquaCompiler.scala index 64a50eca..034f733d 100644 --- a/compiler/src/main/scala/aqua/compiler/AquaCompiler.scala +++ b/compiler/src/main/scala/aqua/compiler/AquaCompiler.scala @@ -6,19 +6,22 @@ import aqua.model.AquaContext import aqua.model.transform.TransformConfig import aqua.model.transform.res.AquaRes import aqua.parser.lift.LiftParser +import aqua.parser.Ast import aqua.semantics.Semantics +import aqua.semantics.header.HeaderSem import cats.data.Validated.{validNec, Invalid, Valid} -import cats.data.{Chain, NonEmptyChain, Validated, ValidatedNec} +import cats.data.{Chain, NonEmptyChain, NonEmptyMap, Validated, ValidatedNec} import cats.syntax.applicative.* import cats.syntax.flatMap.* import cats.syntax.functor.* import cats.syntax.traverse.* -import cats.{Comonad, Monad} +import cats.syntax.monoid.* +import cats.{Comonad, Monad, Monoid, Order} import scribe.Logging object AquaCompiler extends Logging { - def compile[F[_]: Monad, E, I, S[_]: Comonad]( + def compile[F[_]: Monad, E, I: Order, S[_]: Comonad]( sources: AquaSources[F, E, I], liftI: (I, String) => LiftParser[S], backend: Backend, @@ -26,27 +29,56 @@ object AquaCompiler extends Logging { ): F[ValidatedNec[AquaError[I, E, S], Chain[AquaCompiled[I]]]] = { import config.aquaContextMonoid type Err = AquaError[I, E, S] + type Ctx = NonEmptyMap[I, AquaContext] + type ValidatedCtx = ValidatedNec[Err, Ctx] + new AquaParser[F, E, I, S](sources, liftI) - .resolve[ValidatedNec[Err, AquaContext]] { ast => context => - context.andThen { ctx => - Semantics - .process(ast, ctx) - .leftMap(_.map[Err](CompileError(_))) - } - } - .map { - case Valid(modules) => - Linker.link[I, AquaError[I, E, S], ValidatedNec[Err, AquaContext]]( - modules, - cycle => CycleError[I, E, S](cycle.map(_.id)) - ) match { - case Valid(filesWithContext) => + .resolve[ValidatedCtx](mod => + context => + // Context with prepared imports + context.andThen(ctx => + // To manage imports, exports run HeaderSem + HeaderSem + .sem( + mod.imports.view + .mapValues(ctx(_)) + .collect { case (fn, Some(fc)) => fn -> fc } + .toMap, + mod.body.head + ) + .andThen { headerSem => + // Analyze the body, with prepared initial context + Semantics + .process( + mod.body, + headerSem.initCtx + ) + // Handle exports, declares – finalize the resulting context + .andThen(headerSem.finCtx) + .map(rc => NonEmptyMap.one(mod.id, rc)) + } + // The whole chain returns a semantics error finally + .leftMap(_.map[Err](CompileError(_))) + ) + ) + .map( + _.andThen(modules => + Linker + .link[I, AquaError[I, E, S], ValidatedCtx]( + modules, + cycle => CycleError[I, E, S](cycle.map(_.id)), + // By default, provide an empty context for this module's id + i => validNec(NonEmptyMap.one(i, Monoid.empty[AquaContext])) + ) + .andThen { filesWithContext => filesWithContext .foldLeft[ValidatedNec[Err, Chain[AquaProcessed[I]]]]( validNec(Chain.nil) ) { case (acc, (i, Valid(context))) => - acc combine validNec(Chain.one(AquaProcessed(i, context))) + acc combine validNec( + Chain.fromSeq(context.toNel.toList.map { case (i, c) => AquaProcessed(i, c) }) + ) case (acc, (_, Invalid(errs))) => acc combine Invalid(errs) } @@ -56,13 +88,12 @@ object AquaCompiler extends Logging { AquaCompiled(ap.id, compiled) } ) - case i @ Invalid(_) => i - } - case i @ Invalid(_) => i - } + } + ) + ) } - def compileTo[F[_]: Monad, E, I, S[_]: Comonad, T]( + def compileTo[F[_]: Monad, E, I: Order, S[_]: Comonad, T]( sources: AquaSources[F, E, I], liftI: (I, String) => LiftParser[S], backend: Backend, diff --git a/compiler/src/main/scala/aqua/compiler/AquaParser.scala b/compiler/src/main/scala/aqua/compiler/AquaParser.scala index f36a053f..b9a11db5 100644 --- a/compiler/src/main/scala/aqua/compiler/AquaParser.scala +++ b/compiler/src/main/scala/aqua/compiler/AquaParser.scala @@ -2,7 +2,7 @@ package aqua.compiler import aqua.linker.{AquaModule, Modules} import aqua.parser.Ast -import aqua.parser.head.ImportExpr +import aqua.parser.head.{FilenameExpr, ImportExpr} import aqua.parser.lift.LiftParser import cats.data.{Chain, NonEmptyChain, Validated, ValidatedNec} import cats.syntax.applicative.* @@ -32,23 +32,37 @@ class AquaParser[F[_]: Monad, E, I, S[_]: Comonad]( ) // Resolve imports (not parse, just resolve) of the given file - def resolveImports(id: I, ast: Ast[S]): F[ValidatedNec[Err, Map[I, Err]]] = + def resolveImports(id: I, ast: Body): F[ValidatedNec[Err, AquaModule[I, Err, Body]]] = ast.head.tailForced .map(_.head) - .collect { case ImportExpr(filename) => + .collect { case fe: FilenameExpr[F] => sources - .resolveImport(id, filename.value.drop(1).dropRight(1)) + .resolveImport(id, fe.fileValue) .map( _.bimap( - _.map(ResolveImportsErr(id, filename, _)), - importId => Chain.one[(I, Err)](importId -> ImportErr(filename)) + _.map[Err](ResolveImportsErr(id, fe.filename, _)), + importId => + Chain.one[(I, (String, Err))](importId -> (fe.fileValue, ImportErr(fe.filename))) ) ) } .traverse(identity) .map( - _.foldLeft(Validated.validNec[Err, Chain[(I, Err)]](Chain.nil))(_ combine _) - .map(_.toList.toMap) + _.foldLeft(Validated.validNec[Err, Chain[(I, (String, Err))]](Chain.nil))(_ combine _).map { + collected => + AquaModule[I, Err, Body]( + id, + // How filenames correspond to the resolved IDs + collected.map { case (i, (fn, _)) => + fn -> i + }.toList.toMap[String, I], + // Resolved IDs to errors that point to the import in source code + collected.map { case (i, (_, err)) => + i -> err + }.toList.toMap[I, Err], + ast + ) + } ) // Parse sources, convert to modules @@ -56,7 +70,7 @@ class AquaParser[F[_]: Monad, E, I, S[_]: Comonad]( parseSources.flatMap { case Validated.Valid(srcs) => srcs.traverse { case (id, ast) => - resolveImports(id, ast).map(_.map(AquaModule(id, _, ast)).map(Chain.one)) + resolveImports(id, ast).map(_.map(Chain.one)) }.map( _.foldLeft(Validated.validNec[Err, Chain[AquaModule[I, Err, Body]]](Chain.empty))( _ combine _ @@ -66,7 +80,7 @@ class AquaParser[F[_]: Monad, E, I, S[_]: Comonad]( Validated.invalid[NonEmptyChain[Err], Chain[AquaModule[I, Err, Body]]](errs).pure[F] }.map(_.map(_.foldLeft(Modules[I, Err, Body]())(_.add(_, toExport = true)))) - def loadModule(imp: I): F[ValidatedNec[Err, AquaModule[I, Err, Ast[S]]]] = + def loadModule(imp: I): F[ValidatedNec[Err, AquaModule[I, Err, Body]]] = sources .load(imp) .map(_.leftMap(_.map[Err](SourcesErr(_))).andThen { src => @@ -75,7 +89,7 @@ class AquaParser[F[_]: Monad, E, I, S[_]: Comonad]( }) .flatMap { case Validated.Valid(ast) => - resolveImports(imp, ast).map(_.map(AquaModule(imp, _, ast))) + resolveImports(imp, ast) case Validated.Invalid(errs) => Validated.invalid[NonEmptyChain[Err], AquaModule[I, Err, Ast[S]]](errs).pure[F] } @@ -106,7 +120,9 @@ class AquaParser[F[_]: Monad, E, I, S[_]: Comonad]( case err => err.pure[F] } - def resolve[T](transpile: Ast[S] => T => T): F[ValidatedNec[Err, Modules[I, Err, T => T]]] = - resolveSources.map(_.map(_.map(transpile))) + def resolve[T]( + transpile: AquaModule[I, Err, Body] => T => T + ): F[ValidatedNec[Err, Modules[I, Err, T => T]]] = + resolveSources.map(_.map(_.mapModuleToBody(transpile))) } diff --git a/compiler/src/main/scala/aqua/compiler/AquaProcessed.scala b/compiler/src/main/scala/aqua/compiler/AquaProcessed.scala index 5cdd6e41..cfccba19 100644 --- a/compiler/src/main/scala/aqua/compiler/AquaProcessed.scala +++ b/compiler/src/main/scala/aqua/compiler/AquaProcessed.scala @@ -3,5 +3,5 @@ package aqua.compiler import aqua.model.AquaContext case class AquaProcessed[I](id: I, context: AquaContext) { - def hasOutput: Boolean = context.funcs.nonEmpty + def hasOutput: Boolean = context.funcs.nonEmpty || context.services.nonEmpty } diff --git a/linker/src/main/scala/aqua/linker/AquaModule.scala b/linker/src/main/scala/aqua/linker/AquaModule.scala index 9f58c831..fd38f996 100644 --- a/linker/src/main/scala/aqua/linker/AquaModule.scala +++ b/linker/src/main/scala/aqua/linker/AquaModule.scala @@ -1,8 +1,10 @@ package aqua.linker -case class AquaModule[I, E, T](id: I, dependsOn: Map[I, E], body: T) { +case class AquaModule[I, E, T](id: I, imports: Map[String, I], dependsOn: Map[I, E], body: T) { def map[TT](f: T => TT): AquaModule[I, E, TT] = copy(body = f(body)) + def mapWithId[TT](f: (I, T) => TT): AquaModule[I, E, TT] = copy(body = f(id, body)) + def mapErr[EE](f: E => EE): AquaModule[I, EE, T] = copy(dependsOn = dependsOn.view.mapValues(f).toMap) } diff --git a/linker/src/main/scala/aqua/linker/Linker.scala b/linker/src/main/scala/aqua/linker/Linker.scala index 516ee541..8171db06 100644 --- a/linker/src/main/scala/aqua/linker/Linker.scala +++ b/linker/src/main/scala/aqua/linker/Linker.scala @@ -2,7 +2,7 @@ package aqua.linker import cats.data.{NonEmptyChain, Validated, ValidatedNec} import cats.kernel.{Monoid, Semigroup} -import cats.syntax.monoid._ +import cats.syntax.semigroup._ import scribe.Logging import scala.annotation.tailrec @@ -50,9 +50,10 @@ object Linker extends Logging { } } - def link[I, E, T: Monoid]( + def link[I, E, T: Semigroup]( modules: Modules[I, E, T => T], - cycleError: List[AquaModule[I, E, T => T]] => E + cycleError: List[AquaModule[I, E, T => T]] => E, + empty: I => T ): ValidatedNec[E, Map[I, T]] = if (modules.dependsOn.nonEmpty) Validated.invalid(modules.dependsOn.values.reduce(_ ++ _)) else { @@ -60,7 +61,7 @@ object Linker extends Logging { Validated.fromEither( result - .map(_.view.filterKeys(modules.exports).mapValues(_.apply(Monoid[T].empty)).toMap) + .map(_.collect { case (i, f) if modules.exports(i) => i -> f(empty(i)) }) .left .map(NonEmptyChain.one) ) diff --git a/linker/src/main/scala/aqua/linker/Modules.scala b/linker/src/main/scala/aqua/linker/Modules.scala index 68e86372..5371b58d 100644 --- a/linker/src/main/scala/aqua/linker/Modules.scala +++ b/linker/src/main/scala/aqua/linker/Modules.scala @@ -28,6 +28,9 @@ case class Modules[I, E, T]( def map[TT](f: T => TT): Modules[I, E, TT] = copy(loaded = loaded.view.mapValues(_.map(f)).toMap) + def mapModuleToBody[TT](f: AquaModule[I, E, T] => TT): Modules[I, E, TT] = + copy(loaded = loaded.view.mapValues(v => v.map(_ => f(v))).toMap) + def mapErr[EE](f: E => EE): Modules[I, EE, T] = copy( loaded = loaded.view.mapValues(_.mapErr(f)).toMap, diff --git a/linker/src/test/scala/aqua/linker/LinkerSpec.scala b/linker/src/test/scala/aqua/linker/LinkerSpec.scala index 7e32d582..959f226c 100644 --- a/linker/src/test/scala/aqua/linker/LinkerSpec.scala +++ b/linker/src/test/scala/aqua/linker/LinkerSpec.scala @@ -15,6 +15,7 @@ class LinkerSpec extends AnyFlatSpec with Matchers { .add( AquaModule[String, String, String => String]( "mod1", + Map.empty, Map("mod2" -> "unresolved mod2 in mod1"), _ ++ " | mod1" ), @@ -24,17 +25,19 @@ class LinkerSpec extends AnyFlatSpec with Matchers { Linker.link[String, String, String]( withMod1, - cycle => cycle.map(_.id).mkString(" -> ") + cycle => cycle.map(_.id).mkString(" -> "), + _ => "" ) should be(Validated.invalidNec("unresolved mod2 in mod1")) val withMod2 = - withMod1.add(AquaModule("mod2", Map.empty, _ ++ " | mod2")) + withMod1.add(AquaModule("mod2", Map.empty, Map.empty, _ ++ " | mod2")) withMod2.isResolved should be(true) Linker.link[String, String, String]( withMod2, - cycle => cycle.map(_.id + "?").mkString(" -> ") + cycle => cycle.map(_.id + "?").mkString(" -> "), + _ => "" ) should be(Validated.validNec(Map("mod1" -> " | mod2 | mod1"))) } diff --git a/model/src/main/scala/aqua/model/AquaContext.scala b/model/src/main/scala/aqua/model/AquaContext.scala index f83f592f..fcb4c916 100644 --- a/model/src/main/scala/aqua/model/AquaContext.scala +++ b/model/src/main/scala/aqua/model/AquaContext.scala @@ -12,6 +12,9 @@ import scribe.Logging import scala.collection.immutable.SortedMap case class AquaContext( + module: Option[String], + declares: Set[String], + exports: Option[AquaContext], funcs: Map[String, FuncCallable], types: Map[String, Type], values: Map[String, ValueModel], @@ -23,6 +26,26 @@ case class AquaContext( private def prefixFirst[T](prefix: String, pair: (String, T)): (String, T) = (prefix + pair._1, pair._2) + def pick( + name: String, + rename: Option[String], + declared: Boolean = module.nonEmpty + ): Option[AquaContext] = + Option + .when(!declared || declares(name)) { + val targetName = rename.getOrElse(name) + def getter[T](g: AquaContext => Map[String, T]): Map[String, T] = + g(this).get(name).map(targetName -> _).map(Map(_)).getOrElse(Map.empty) + AquaContext.blank.copy( + funcs = getter(_.funcs), + types = getter(_.types), + values = getter(_.values), + abilities = getter(_.abilities), + services = getter(_.services) + ) + } + .filter(_.`type`(name).nonEmpty) + def allTypes(prefix: String = ""): Map[String, Type] = abilities .foldLeft(types) { case (ts, (k, v)) => @@ -73,7 +96,8 @@ object AquaContext extends Logging { implicit val aquaContextMonoid: Monoid[AquaContext] } - val blank: AquaContext = AquaContext(Map.empty, Map.empty, Map.empty, Map.empty, Map.empty) + val blank: AquaContext = + AquaContext(None, Set.empty, None, Map.empty, Map.empty, Map.empty, Map.empty, Map.empty) def implicits(init: AquaContext): Implicits = new Implicits { @@ -83,8 +107,15 @@ object AquaContext extends Logging { override def empty: AquaContext = init + // TODO is it the right way? override def combine(x: AquaContext, y: AquaContext): AquaContext = AquaContext( + x.module.orElse(y.module), + x.declares ++ y.declares, + x.exports + .flatMap(xe => y.exports.map(combine(xe, _))) + .orElse(x.exports) + .orElse(y.exports), x.funcs ++ y.funcs, x.types ++ y.types, x.values ++ y.values, @@ -96,6 +127,9 @@ object AquaContext extends Logging { def fromServiceModel(sm: ServiceModel, serviceId: ValueModel): AquaContext = AquaContext( + module = Some(sm.name), + declares = sm.`type`.fields.toNel.map(_._1).toList.toSet, + exports = None, funcs = sm.arrows.toSortedMap.map { case (fnName, arrowType) => val (args, call, ret) = ArgsCall.arrowToArgsCallRet(arrowType) fnName -> @@ -130,7 +164,7 @@ object AquaContext extends Logging { ) (ctx |+| add, exportContext |+| add) case ((ctx, exportContext), func: FuncModel) => - val fr = func.capture(ctx.funcs, ctx.values) + val fr = func.capture(ctx.allFuncs(), ctx.allValues()) val add = Monoid.empty[AquaContext].copy(funcs = ctx.funcs.updated(func.name, fr)) (ctx |+| add, exportContext |+| add) diff --git a/model/transform/src/main/scala/aqua/model/transform/res/AquaRes.scala b/model/transform/src/main/scala/aqua/model/transform/res/AquaRes.scala index f7a49a4b..452fcef1 100644 --- a/model/transform/src/main/scala/aqua/model/transform/res/AquaRes.scala +++ b/model/transform/src/main/scala/aqua/model/transform/res/AquaRes.scala @@ -10,10 +10,16 @@ case class AquaRes(funcs: Chain[FuncRes], services: Chain[ServiceRes]) { } object AquaRes { + private val blank = AquaRes(Chain.nil, Chain.nil) def fromContext(ctx: AquaContext, conf: TransformConfig): AquaRes = - AquaRes( - funcs = Chain.fromSeq(ctx.funcs.values.toSeq).map(Transform.fn(_, conf)), - services = Chain.fromSeq(ctx.services.values.toSeq).map(ServiceRes.fromModel(_)) - ) + ctx.exports + .map(ex => + AquaRes( + funcs = Chain.fromSeq(ex.funcs.values.toSeq).map(Transform.fn(_, conf)), + services = Chain.fromSeq(ex.services.values.toSeq).map(ServiceRes.fromModel(_)) + ) + ) + .getOrElse(blank) + } diff --git a/parser/src/main/scala/aqua/parser/Ast.scala b/parser/src/main/scala/aqua/parser/Ast.scala index d8cd79de..39a39def 100644 --- a/parser/src/main/scala/aqua/parser/Ast.scala +++ b/parser/src/main/scala/aqua/parser/Ast.scala @@ -9,26 +9,29 @@ import cats.free.Cofree import cats.parse.Parser0 as P0 import cats.{Comonad, Eval} -case class Ast[F[_]](head: Ast.Head[F], tree: Ast.Tree[F]) { +case class Ast[S[_]](head: Ast.Head[S], tree: Ast.Tree[S]) { - def cata[T](folder: (Expr[F], Chain[T]) => Eval[T]): Eval[T] = - Cofree.cata[Chain, Expr[F], T](tree)(folder) + def cata[T](folder: (Expr[S], Chain[T]) => Eval[T]): Eval[T] = + Cofree.cata[Chain, Expr[S], T](tree)(folder) + + def cataHead[T](folder: (HeaderExpr[S], Chain[T]) => Eval[T]): Eval[T] = + Cofree.cata[Chain, HeaderExpr[S], T](head)(folder) } object Ast { - type Tree[F[_]] = Cofree[Chain, Expr[F]] - type Head[F[_]] = Cofree[Chain, HeaderExpr[F]] + type Tree[S[_]] = Cofree[Chain, Expr[S]] + type Head[S[_]] = Cofree[Chain, HeaderExpr[S]] - def parser[F[_]: LiftParser: Comonad](): P0[ValidatedNec[ParserError[F], Ast[F]]] = - (HeadExpr.ast[F].with1 ~ RootExpr.ast[F]()).map { case (head, bodyMaybe) => + def parser[S[_]: LiftParser: Comonad](): P0[ValidatedNec[ParserError[S], Ast[S]]] = + (HeadExpr.ast[S].with1 ~ RootExpr.ast[S]()).map { case (head, bodyMaybe) => bodyMaybe.map(Ast(head, _)) } - def fromString[F[_]: LiftParser: Comonad](script: String): ValidatedNec[ParserError[F], Ast[F]] = - parser[F]() + def fromString[S[_]: LiftParser: Comonad](script: String): ValidatedNec[ParserError[S], Ast[S]] = + parser[S]() .parseAll(script) match { case Right(value) => value - case Left(e) => Validated.invalidNec(LexerError[F](e.wrapErr)) + case Left(e) => Validated.invalidNec(LexerError[S](e.wrapErr)) } } diff --git a/parser/src/main/scala/aqua/parser/expr/AbilityIdExpr.scala b/parser/src/main/scala/aqua/parser/expr/AbilityIdExpr.scala index 4eb63d45..4e30b743 100644 --- a/parser/src/main/scala/aqua/parser/expr/AbilityIdExpr.scala +++ b/parser/src/main/scala/aqua/parser/expr/AbilityIdExpr.scala @@ -13,7 +13,7 @@ case class AbilityIdExpr[F[_]](ability: Ability[F], id: Value[F]) object AbilityIdExpr extends Expr.Leaf { override def p[F[_]: LiftParser: Comonad]: P[AbilityIdExpr[F]] = - ((Ability.ab[F] <* ` `) ~ Value.`value`).map { case (ability, id) => + ((Ability.dotted[F] <* ` `) ~ Value.`value`).map { case (ability, id) => AbilityIdExpr(ability, id) } diff --git a/parser/src/main/scala/aqua/parser/expr/CallArrowExpr.scala b/parser/src/main/scala/aqua/parser/expr/CallArrowExpr.scala index 9b0e69e2..28e88485 100644 --- a/parser/src/main/scala/aqua/parser/expr/CallArrowExpr.scala +++ b/parser/src/main/scala/aqua/parser/expr/CallArrowExpr.scala @@ -18,7 +18,7 @@ object CallArrowExpr extends Expr.Leaf { override def p[F[_]: LiftParser: Comonad]: P[CallArrowExpr[F]] = ((comma(Name.p[F]) <* ` <- `).backtrack.?.with1 ~ - ((Ability.ab[F] <* `.`).?.with1 ~ + ((Ability.dotted[F] <* `.`).?.with1 ~ Name.p[F] ~ comma0(Value.`value`[F].surroundedBy(`/s*`)).between(`(` <* `/s*`, `/s*` *> `)`))).map { case (variables, ((ability, funcName), args)) => diff --git a/parser/src/main/scala/aqua/parser/head/ExportExpr.scala b/parser/src/main/scala/aqua/parser/head/ExportExpr.scala new file mode 100644 index 00000000..637d15b3 --- /dev/null +++ b/parser/src/main/scala/aqua/parser/head/ExportExpr.scala @@ -0,0 +1,20 @@ +package aqua.parser.head + +import aqua.parser.lexer.Token.* +import aqua.parser.lexer.{Literal, Value, Token} +import aqua.parser.lift.LiftParser +import cats.Comonad +import cats.data.NonEmptyList +import cats.parse.Parser +import cats.syntax.either.* + +case class ExportExpr[F[_]](pubs: NonEmptyList[FromExpr.NameOrAbAs[F]]) extends HeaderExpr[F] { + override def token: Token[F] = + pubs.head.bimap(_._1, _._1).fold(identity, identity) +} + +object ExportExpr extends HeaderExpr.Leaf { + + override def p[F[_]: LiftParser: Comonad]: Parser[ExportExpr[F]] = + (`_export` *> ` `) *> comma(FromExpr.nameOrAbAs[F]).map(ExportExpr(_)) +} diff --git a/parser/src/main/scala/aqua/parser/head/FilenameExpr.scala b/parser/src/main/scala/aqua/parser/head/FilenameExpr.scala new file mode 100644 index 00000000..370268c7 --- /dev/null +++ b/parser/src/main/scala/aqua/parser/head/FilenameExpr.scala @@ -0,0 +1,11 @@ +package aqua.parser.head + +import aqua.parser.lexer.{Literal, Token} + +trait FilenameExpr[F[_]] extends HeaderExpr[F] { + def filename: Literal[F] + + override def token: Token[F] = filename + + def fileValue: String = filename.value.drop(1).dropRight(1) +} diff --git a/parser/src/main/scala/aqua/parser/head/FromExpr.scala b/parser/src/main/scala/aqua/parser/head/FromExpr.scala new file mode 100644 index 00000000..5feea1f4 --- /dev/null +++ b/parser/src/main/scala/aqua/parser/head/FromExpr.scala @@ -0,0 +1,22 @@ +package aqua.parser.head + +import aqua.parser.lexer.Token._ +import aqua.parser.lexer.{Ability, Name} +import aqua.parser.lift.LiftParser +import cats.Comonad +import cats.data.NonEmptyList +import cats.parse.Parser as P + +trait FromExpr[F[_]] { + def imports: NonEmptyList[FromExpr.NameOrAbAs[F]] +} + +object FromExpr { + type NameOrAbAs[F[_]] = Either[Name.As[F], Ability.As[F]] + + def nameOrAbAs[F[_]: LiftParser: Comonad]: P[NameOrAbAs[F]] = + Name.nameAs[F].map(Left(_)) | Ability.abAs[F].map(Right(_)) + + def importFrom[F[_]: LiftParser: Comonad]: P[NonEmptyList[NameOrAbAs[F]]] = + comma[NameOrAbAs[F]](nameOrAbAs[F]) <* ` ` <* `from` +} \ No newline at end of file diff --git a/parser/src/main/scala/aqua/parser/head/HeadExpr.scala b/parser/src/main/scala/aqua/parser/head/HeadExpr.scala index bf76481e..6794e3dc 100644 --- a/parser/src/main/scala/aqua/parser/head/HeadExpr.scala +++ b/parser/src/main/scala/aqua/parser/head/HeadExpr.scala @@ -3,25 +3,30 @@ package aqua.parser.head import aqua.parser.Ast import aqua.parser.lexer.Token.` \n+` import aqua.parser.lift.LiftParser +import aqua.parser.lift.LiftParser.* import cats.{Comonad, Eval} import cats.data.Chain import cats.free.Cofree import cats.parse.{Parser => P, Parser0 => P0} +import aqua.parser.lexer.Token -case class HeadExpr[F[_]]() extends HeaderExpr[F] +case class HeadExpr[S[_]](token: Token[S]) extends HeaderExpr[S] object HeadExpr { def headExprs: List[HeaderExpr.Companion] = - ImportExpr :: Nil + UseFromExpr :: UseExpr :: ImportFromExpr :: ImportExpr :: ExportExpr :: Nil - def ast[F[_]: LiftParser: Comonad]: P0[Ast.Head[F]] = - P.repSep0(P.oneOf(headExprs.map(_.ast[F])), ` \n+`) + def ast[S[_]: LiftParser: Comonad]: P0[Ast.Head[S]] = + (P.unit.lift0.map(Token.lift) ~ ((ModuleExpr.p[S] <* ` \n+`).? ~ + P.repSep0(P.oneOf(headExprs.map(_.ast[S].backtrack)), ` \n+`).map(Chain.fromSeq)) .surroundedBy(` \n+`.?) - .? - .map { - case Some(exprs) => Chain.fromSeq(exprs) - case None => Chain.empty[Ast.Head[F]] - } - .map(exprs => Cofree(HeadExpr[F](), Eval.now(exprs))) + .?).map { + case (p, Some((maybeMod, exprs))) => + Cofree( + maybeMod.getOrElse(HeadExpr[S](p)), + Eval.now(exprs) + ) + case (p, None) => Cofree(HeadExpr[S](p), Eval.now(Chain.nil)) + } } diff --git a/parser/src/main/scala/aqua/parser/head/HeaderExpr.scala b/parser/src/main/scala/aqua/parser/head/HeaderExpr.scala index 4ae436fc..cf00bba4 100644 --- a/parser/src/main/scala/aqua/parser/head/HeaderExpr.scala +++ b/parser/src/main/scala/aqua/parser/head/HeaderExpr.scala @@ -1,20 +1,23 @@ package aqua.parser.head import aqua.parser.Ast +import aqua.parser.lexer.Token import aqua.parser.lift.LiftParser import cats.{Comonad, Eval} import cats.data.Chain import cats.free.Cofree -import cats.parse.{Parser => P} +import cats.parse.Parser as P -trait HeaderExpr[F[_]] +trait HeaderExpr[S[_]] { + def token: Token[S] +} object HeaderExpr { trait Companion { - def p[F[_]: LiftParser: Comonad]: P[HeaderExpr[F]] + def p[S[_]: LiftParser: Comonad]: P[HeaderExpr[S]] - def ast[F[_]: LiftParser: Comonad]: P[Ast.Head[F]] + def ast[S[_]: LiftParser: Comonad]: P[Ast.Head[S]] } abstract class Leaf extends Companion { diff --git a/parser/src/main/scala/aqua/parser/head/ImportExpr.scala b/parser/src/main/scala/aqua/parser/head/ImportExpr.scala index 453f5cd4..e6896957 100644 --- a/parser/src/main/scala/aqua/parser/head/ImportExpr.scala +++ b/parser/src/main/scala/aqua/parser/head/ImportExpr.scala @@ -6,7 +6,7 @@ import aqua.parser.lift.LiftParser import cats.Comonad import cats.parse.Parser -case class ImportExpr[F[_]](filename: Literal[F]) extends HeaderExpr[F] +case class ImportExpr[F[_]](filename: Literal[F]) extends FilenameExpr[F] object ImportExpr extends HeaderExpr.Leaf { diff --git a/parser/src/main/scala/aqua/parser/head/ImportFromExpr.scala b/parser/src/main/scala/aqua/parser/head/ImportFromExpr.scala new file mode 100644 index 00000000..677fdb8c --- /dev/null +++ b/parser/src/main/scala/aqua/parser/head/ImportFromExpr.scala @@ -0,0 +1,21 @@ +package aqua.parser.head + +import aqua.parser.lexer.Token.* +import aqua.parser.lexer.{Literal, Value} +import aqua.parser.lift.LiftParser +import cats.Comonad +import cats.data.NonEmptyList +import cats.parse.Parser + +case class ImportFromExpr[F[_]]( + imports: NonEmptyList[FromExpr.NameOrAbAs[F]], + filename: Literal[F] +) extends FilenameExpr[F] with FromExpr[F] + +object ImportFromExpr extends HeaderExpr.Leaf { + + override def p[F[_]: LiftParser: Comonad]: Parser[HeaderExpr[F]] = + (`import` *> FromExpr.importFrom[F].surroundedBy(` `) ~ Value.string[F]).map { + case (imports, filename) => ImportFromExpr(imports, filename) + } +} diff --git a/parser/src/main/scala/aqua/parser/head/ModuleExpr.scala b/parser/src/main/scala/aqua/parser/head/ModuleExpr.scala new file mode 100644 index 00000000..2d6734d2 --- /dev/null +++ b/parser/src/main/scala/aqua/parser/head/ModuleExpr.scala @@ -0,0 +1,54 @@ +package aqua.parser.head + +import aqua.parser.lexer.Token.* +import aqua.parser.lexer.Token +import aqua.parser.lexer.{Ability, Literal, Name, Value} +import aqua.parser.lift.LiftParser +import aqua.parser.lift.LiftParser.* +import cats.Comonad +import cats.parse.Parser + +case class ModuleExpr[F[_]]( + name: Ability[F], + declareAll: Option[Token[F]], + declareNames: List[Name[F]], + declareCustom: List[Ability[F]] +) extends HeaderExpr[F] { + override def token: Token[F] = name +} + +object ModuleExpr extends HeaderExpr.Leaf { + + type NameOrAb[F[_]] = Either[Name[F], Ability[F]] + + def nameOrAb[F[_]: LiftParser: Comonad]: Parser[NameOrAb[F]] = + Name.p[F].map(Left(_)) | Ability.ab[F].map(Right(_)) + + def nameOrAbList[F[_]: LiftParser: Comonad]: Parser[List[NameOrAb[F]]] = + comma[NameOrAb[F]](nameOrAb[F]).map(_.toList) + + def nameOrAbListOrAll[F[_]: LiftParser: Comonad]: Parser[Either[List[NameOrAb[F]], Token[F]]] = + nameOrAbList[F].map(Left(_)) | `star`.lift.map(Token.lift(_)).map(Right(_)) + + override def p[F[_]: LiftParser: Comonad]: Parser[ModuleExpr[F]] = + (`module` *> ` ` *> Ability.ab[F] ~ + (` declares ` *> nameOrAbListOrAll[F]).?).map { + case (name, None) => + ModuleExpr(name, None, Nil, Nil) + case (name, Some(Left(exportMembers))) => + ModuleExpr( + name, + None, + exportMembers.collect { case Left(x) => x }, + exportMembers.collect { case Right(x) => x } + ) + case (name, Some(Right(point))) => + ModuleExpr( + name, + Some(point), + Nil, + Nil + ) + } + +} diff --git a/parser/src/main/scala/aqua/parser/head/UseExpr.scala b/parser/src/main/scala/aqua/parser/head/UseExpr.scala new file mode 100644 index 00000000..706f36d8 --- /dev/null +++ b/parser/src/main/scala/aqua/parser/head/UseExpr.scala @@ -0,0 +1,21 @@ +package aqua.parser.head + +import aqua.parser.lexer.Token.* +import aqua.parser.lexer.{Ability, Literal, Value} +import aqua.parser.lift.LiftParser +import cats.Comonad +import cats.parse.Parser + +case class UseExpr[F[_]]( + filename: Literal[F], + asModule: Option[Ability[F]] +) extends FilenameExpr[F] + +object UseExpr extends HeaderExpr.Leaf { + + override def p[F[_]: LiftParser: Comonad]: Parser[HeaderExpr[F]] = + (`use` *> Value + .string[F] ~ (` as ` *> Ability.ab[F]).?).map { case (filename, asModule) => + UseExpr(filename, asModule) + } +} diff --git a/parser/src/main/scala/aqua/parser/head/UseFromExpr.scala b/parser/src/main/scala/aqua/parser/head/UseFromExpr.scala new file mode 100644 index 00000000..816676dd --- /dev/null +++ b/parser/src/main/scala/aqua/parser/head/UseFromExpr.scala @@ -0,0 +1,23 @@ +package aqua.parser.head + +import aqua.parser.lexer.Token.* +import aqua.parser.lexer.{Ability, Literal, Name, Value} +import aqua.parser.lift.LiftParser +import cats.Comonad +import cats.data.NonEmptyList +import cats.parse.Parser + +case class UseFromExpr[F[_]]( + imports: NonEmptyList[FromExpr.NameOrAbAs[F]], + filename: Literal[F], + asModule: Ability[F] +) extends FilenameExpr[F] with FromExpr[F] + +object UseFromExpr extends HeaderExpr.Leaf { + + override def p[F[_]: LiftParser: Comonad]: Parser[HeaderExpr[F]] = + (`use` *> FromExpr.importFrom[F].surroundedBy(` `) ~ Value + .string[F] ~ (` as ` *> Ability.ab[F])).map { case ((imports, filename), asModule) => + UseFromExpr(imports, filename, asModule) + } +} diff --git a/parser/src/main/scala/aqua/parser/lexer/Ability.scala b/parser/src/main/scala/aqua/parser/lexer/Ability.scala index 2378973e..faf5cbaa 100644 --- a/parser/src/main/scala/aqua/parser/lexer/Ability.scala +++ b/parser/src/main/scala/aqua/parser/lexer/Ability.scala @@ -15,7 +15,14 @@ case class Ability[F[_]: Comonad](name: F[String]) extends Token[F] { } object Ability { + type As[F[_]] = (Ability[F], Option[Ability[F]]) def ab[F[_]: LiftParser: Comonad]: P[Ability[F]] = `Class`.lift.map(Ability(_)) + + def dotted[F[_]: LiftParser: Comonad]: P[Ability[F]] = + P.repSep(`Class`, `.`).map(_.toList.mkString(".")).lift.map(Ability(_)) + + def abAs[F[_]: LiftParser: Comonad]: P[As[F]] = + asOpt(ab[F]) } diff --git a/parser/src/main/scala/aqua/parser/lexer/Name.scala b/parser/src/main/scala/aqua/parser/lexer/Name.scala index 37e12fc9..87843cd6 100644 --- a/parser/src/main/scala/aqua/parser/lexer/Name.scala +++ b/parser/src/main/scala/aqua/parser/lexer/Name.scala @@ -16,6 +16,15 @@ case class Name[F[_]: Comonad](name: F[String]) extends Token[F] { object Name { + type As[F[_]] = (Name[F], Option[Name[F]]) + def p[F[_]: LiftParser: Comonad]: P[Name[F]] = `name`.lift.map(Name(_)) + + def dotted[F[_]: LiftParser: Comonad]: P[Name[F]] = + ((`Class`.repSep(`.`).map(_.toList.mkString(".")) ~ `.`).?.with1 ~ `name`).string.lift + .map(Name(_)) + + def nameAs[F[_]: LiftParser: Comonad]: P[As[F]] = + asOpt(p[F]) } diff --git a/parser/src/main/scala/aqua/parser/lexer/Token.scala b/parser/src/main/scala/aqua/parser/lexer/Token.scala index 0a05840a..91cf38e6 100644 --- a/parser/src/main/scala/aqua/parser/lexer/Token.scala +++ b/parser/src/main/scala/aqua/parser/lexer/Token.scala @@ -25,8 +25,17 @@ object Token { val `const`: P[Unit] = P.string("const") val `data`: P[Unit] = P.string("data") val `import`: P[Unit] = P.string("import") + val `module`: P[Unit] = P.string("module") + val `declares`: P[Unit] = P.string("declares") + val ` declares ` : P[Unit] = `declares`.surroundedBy(` `) + val `declare`: P[Unit] = P.string("declare") + val `_export`: P[Unit] = P.string("export") + val `star`: P[Unit] = P.char('*') val `use`: P[Unit] = P.string("use") + val `from`: P[Unit] = P.string("from") + val ` from ` : P[Unit] = `from`.surroundedBy(` `) val `as`: P[Unit] = P.string("as") + val ` as ` : P[Unit] = `as`.surroundedBy(` `) val `alias`: P[Unit] = P.string("alias") val `service`: P[Unit] = P.string("service") val `func`: P[Unit] = P.string("func") @@ -64,7 +73,7 @@ object Token { val `.` : P[Unit] = P.char('.') val `"` : P[Unit] = P.char('"') val `*` : P[Unit] = P.char('*') - val exclamation : P[Unit] = P.char('!') + val exclamation: P[Unit] = P.char('!') val `[]` : P[Unit] = P.string("[]") val `⊤` : P[Unit] = P.char('⊤') val `⊥` : P[Unit] = P.char('⊥') @@ -92,4 +101,7 @@ object Token { def comma0[T](p: P[T]): P0[List[T]] = P.repSep0(p, `,` <* ` \n+`.rep0) + + def asOpt[T](p: P[T]): P[(T, Option[T])] = + p ~ (` as ` *> p).? } diff --git a/parser/src/main/scala/aqua/parser/lexer/TypeToken.scala b/parser/src/main/scala/aqua/parser/lexer/TypeToken.scala index 675d5d68..b83a557b 100644 --- a/parser/src/main/scala/aqua/parser/lexer/TypeToken.scala +++ b/parser/src/main/scala/aqua/parser/lexer/TypeToken.scala @@ -54,7 +54,12 @@ case class CustomTypeToken[F[_]: Comonad](name: F[String]) extends DataTypeToken } object CustomTypeToken { - def ct[F[_]: LiftParser: Comonad]: P[CustomTypeToken[F]] = `Class`.lift.map(CustomTypeToken(_)) + + def ct[F[_]: LiftParser: Comonad]: P[CustomTypeToken[F]] = + `Class`.lift.map(CustomTypeToken(_)) + + def dotted[F[_]: LiftParser: Comonad]: P[CustomTypeToken[F]] = + `Class`.repSep(`.`).string.lift.map(CustomTypeToken(_)) } case class BasicTypeToken[F[_]: Comonad](scalarType: F[ScalarType]) extends DataTypeToken[F] { @@ -115,7 +120,7 @@ object DataTypeToken { P.defer(`arraytypedef`[F]) :: P.defer(StreamTypeToken.`streamtypedef`) :: P.defer( OptionTypeToken.`optiontypedef` ) :: BasicTypeToken - .`basictypedef`[F] :: CustomTypeToken.ct[F] :: Nil + .`basictypedef`[F] :: CustomTypeToken.dotted[F] :: Nil ) } diff --git a/parser/src/main/scala/aqua/parser/lexer/Value.scala b/parser/src/main/scala/aqua/parser/lexer/Value.scala index f383914e..5d0b14e5 100644 --- a/parser/src/main/scala/aqua/parser/lexer/Value.scala +++ b/parser/src/main/scala/aqua/parser/lexer/Value.scala @@ -24,7 +24,7 @@ case class Literal[F[_]: Comonad](valueToken: F[String], ts: LiteralType) extend object Value { def varLambda[F[_]: LiftParser: Comonad]: P[VarLambda[F]] = - (Name.p[F] ~ LambdaOp.ops[F].?).map { case (n, l) ⇒ + (Name.dotted[F] ~ LambdaOp.ops[F].?).map { case (n, l) ⇒ VarLambda(n, l.fold[List[LambdaOp[F]]](Nil)(_.toList)) } diff --git a/parser/src/main/scala/aqua/parser/lift/LiftParser.scala b/parser/src/main/scala/aqua/parser/lift/LiftParser.scala index 19efb77e..e17f9ca9 100644 --- a/parser/src/main/scala/aqua/parser/lift/LiftParser.scala +++ b/parser/src/main/scala/aqua/parser/lift/LiftParser.scala @@ -3,26 +3,26 @@ package aqua.parser.lift import cats.Id import cats.parse.{Parser, Parser0} -trait LiftParser[F[_]] { - def lift[T](p: Parser[T]): Parser[F[T]] +trait LiftParser[S[_]] { + def lift[T](p: Parser[T]): Parser[S[T]] - def lift0[T](p0: Parser0[T]): Parser0[F[T]] + def lift0[T](p0: Parser0[T]): Parser0[S[T]] - def wrapErr(e: Parser.Error): F[Parser.Error] + def wrapErr(e: Parser.Error): S[Parser.Error] } object LiftParser { - implicit class LiftErrorOps[F[_]: LiftParser, T](e: Parser.Error) { - def wrapErr: F[Parser.Error] = implicitly[LiftParser[F]].wrapErr(e) + implicit class LiftErrorOps[S[_]: LiftParser, T](e: Parser.Error) { + def wrapErr: S[Parser.Error] = implicitly[LiftParser[S]].wrapErr(e) } - implicit class LiftParserOps[F[_]: LiftParser, T](parser: Parser[T]) { - def lift: Parser[F[T]] = implicitly[LiftParser[F]].lift(parser) + implicit class LiftParserOps[S[_]: LiftParser, T](parser: Parser[T]) { + def lift: Parser[S[T]] = implicitly[LiftParser[S]].lift(parser) } - implicit class LiftParser0Ops[F[_]: LiftParser, T](parser0: Parser0[T]) { - def lift0: Parser0[F[T]] = implicitly[LiftParser[F]].lift0(parser0) + implicit class LiftParser0Ops[S[_]: LiftParser, T](parser0: Parser0[T]) { + def lift0: Parser0[S[T]] = implicitly[LiftParser[S]].lift0(parser0) } object Implicits { diff --git a/semantics/src/main/scala/aqua/semantics/SemanticError.scala b/semantics/src/main/scala/aqua/semantics/SemanticError.scala index 568da215..b57f3d84 100644 --- a/semantics/src/main/scala/aqua/semantics/SemanticError.scala +++ b/semantics/src/main/scala/aqua/semantics/SemanticError.scala @@ -3,6 +3,7 @@ package aqua.semantics import aqua.parser.Ast import aqua.parser.lexer.Token -sealed trait SemanticError[F[_]] -case class RulesViolated[F[_]](token: Token[F], message: String) extends SemanticError[F] -case class WrongAST[F[_]](ast: Ast[F]) extends SemanticError[F] +sealed trait SemanticError[S[_]] +case class RulesViolated[S[_]](token: Token[S], message: String) extends SemanticError[S] +case class HeaderError[S[_]](token: Token[S], message: String) extends SemanticError[S] +case class WrongAST[S[_]](ast: Ast[S]) extends SemanticError[S] diff --git a/semantics/src/main/scala/aqua/semantics/Semantics.scala b/semantics/src/main/scala/aqua/semantics/Semantics.scala index 83a91879..140da5bf 100644 --- a/semantics/src/main/scala/aqua/semantics/Semantics.scala +++ b/semantics/src/main/scala/aqua/semantics/Semantics.scala @@ -27,13 +27,13 @@ import scribe.Logging object Semantics extends Logging { - def folder[F[_], G[_]](implicit - A: AbilitiesAlgebra[F, G], - N: NamesAlgebra[F, G], - T: TypesAlgebra[F, G] - ): (Expr[F], Chain[Free[G, Model]]) => Eval[Free[G, Model]] = { case (expr, inners) => + def folder[S[_], G[_]](implicit + A: AbilitiesAlgebra[S, G], + N: NamesAlgebra[S, G], + T: TypesAlgebra[S, G] + ): (Expr[S], Chain[Free[G, Model]]) => Eval[Free[G, Model]] = { case (expr, inners) => Eval later ExprSem - .getProg[F, G](expr) + .getProg[S, G](expr) .apply( // TODO instead of foldRight, do slidingWindow for 2 elements, merge right associative ones // Then foldLeft just like now @@ -50,57 +50,58 @@ object Semantics extends Logging { ) } - type Alg0[F[_], A] = EitherK[AbilityOp[F, *], NameOp[F, *], A] - type Alg[F[_], A] = EitherK[TypeOp[F, *], Alg0[F, *], A] + type Alg0[S[_], A] = EitherK[AbilityOp[S, *], NameOp[S, *], A] + type Alg[S[_], A] = EitherK[TypeOp[S, *], Alg0[S, *], A] - def transpile[F[_]](ast: Ast[F]): Free[Alg[F, *], Model] = - ast.cata(folder[F, Alg[F, *]]).value + def transpile[S[_]](ast: Ast[S]): Free[Alg[S, *], Model] = + ast.cata(folder[S, Alg[S, *]]).value - def interpret[F[_]](free: Free[Alg[F, *], Model]): State[CompilerState[F], Model] = { + def interpret[S[_]](free: Free[Alg[S, *], Model]): State[CompilerState[S], Model] = { import monocle.syntax.all._ - implicit val re: ReportError[F, CompilerState[F]] = - (st: CompilerState[F], token: Token[F], hint: String) => + implicit val re: ReportError[S, CompilerState[S]] = + (st: CompilerState[S], token: Token[S], hint: String) => st.focus(_.errors).modify(_.append(RulesViolated(token, hint))) - implicit val ns: Lens[CompilerState[F], NamesState[F]] = GenLens[CompilerState[F]](_.names) + implicit val ns: Lens[CompilerState[S], NamesState[S]] = GenLens[CompilerState[S]](_.names) - val names = new NamesInterpreter[F, CompilerState[F]]() + val names = new NamesInterpreter[S, CompilerState[S]]() - implicit val as: Lens[CompilerState[F], AbilitiesState[F]] = - GenLens[CompilerState[F]](_.abilities) + implicit val as: Lens[CompilerState[S], AbilitiesState[S]] = + GenLens[CompilerState[S]](_.abilities) - val abilities = new AbilitiesInterpreter[F, CompilerState[F]]() + val abilities = new AbilitiesInterpreter[S, CompilerState[S]]() - implicit val ts: Lens[CompilerState[F], TypesState[F]] = GenLens[CompilerState[F]](_.types) + implicit val ts: Lens[CompilerState[S], TypesState[S]] = GenLens[CompilerState[S]](_.types) - val types = new TypesInterpreter[F, CompilerState[F]]() + val types = new TypesInterpreter[S, CompilerState[S]]() - val interpreter0: FunctionK[Alg0[F, *], State[CompilerState[F], *]] = abilities or names - val interpreter: FunctionK[Alg[F, *], State[CompilerState[F], *]] = types or interpreter0 + val interpreter0: FunctionK[Alg0[S, *], State[CompilerState[S], *]] = abilities or names + val interpreter: FunctionK[Alg[S, *], State[CompilerState[S], *]] = types or interpreter0 - free.foldMap[State[CompilerState[F], *]](interpreter) + free.foldMap[State[CompilerState[S], *]](interpreter) } - private def astToState[F[_]](ast: Ast[F]): State[CompilerState[F], Model] = - (transpile[F] _ andThen interpret[F])(ast) + private def astToState[S[_]](ast: Ast[S]): State[CompilerState[S], Model] = + (transpile[S] _ andThen interpret[S])(ast) - def process[F[_]](ast: Ast[F], init: AquaContext)(implicit + def process[S[_]](ast: Ast[S], init: AquaContext)(implicit aqum: Monoid[AquaContext] - ): ValidatedNec[SemanticError[F], AquaContext] = - astToState[F](ast) - .run(CompilerState.init[F](init)) + ): ValidatedNec[SemanticError[S], AquaContext] = + astToState[S](ast) + .run(CompilerState.init[S](init)) .map { case (state, gen: ScriptModel) => val ctx = AquaContext.fromScriptModel(gen, init) NonEmptyChain .fromChain(state.errors) - .fold[ValidatedNec[SemanticError[F], AquaContext]](Valid(ctx))(Invalid(_)) + .fold[ValidatedNec[SemanticError[S], AquaContext]](Valid(ctx))(Invalid(_)) case (state, _) => NonEmptyChain .fromChain(state.errors) .map(Invalid(_)) - .getOrElse(Validated.invalidNec[SemanticError[F], AquaContext](WrongAST(ast))) + .getOrElse(Validated.invalidNec[SemanticError[S], AquaContext](WrongAST(ast))) } + // TODO: return as Eval .value } diff --git a/semantics/src/main/scala/aqua/semantics/expr/CallArrowSem.scala b/semantics/src/main/scala/aqua/semantics/expr/CallArrowSem.scala index fe0f5b05..c6fc3b16 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/CallArrowSem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/CallArrowSem.scala @@ -58,12 +58,12 @@ class CallArrowSem[F[_]](val expr: CallArrowExpr[F]) extends AnyVal { ability match { case Some(ab) => (A.getArrow(ab, funcName), A.getServiceId(ab)).mapN { - case (Some(at), Some(sid)) => - Option(at -> sid) // Here we assume that Ability is a Service that must be resolved + case (Some(at), Right(sid)) => + Some(callServiceTag(at, Option(sid))) + case (Some(at), Left(true)) => + Some(callServiceTag(at, None)) case _ => None - }.flatMap(_.map { case (arrowType, serviceId) => - callServiceTag(arrowType, Option(serviceId)) - }.traverse(identity)) + }.flatMap(_.traverse(identity)) case None => N.readArrow(funcName) .flatMap(_.map { arrowType => @@ -96,7 +96,7 @@ class CallArrowSem[F[_]](val expr: CallArrowExpr[F]) extends AnyVal { ) case None => CallArrowTag( - funcName = funcName.value, + funcName = ability.map(_.value + "." + funcName.value).getOrElse(funcName.value), Call(argsResolved, maybeExport) ) }) diff --git a/semantics/src/main/scala/aqua/semantics/header/HeaderSem.scala b/semantics/src/main/scala/aqua/semantics/header/HeaderSem.scala new file mode 100644 index 00000000..e2ce0edb --- /dev/null +++ b/semantics/src/main/scala/aqua/semantics/header/HeaderSem.scala @@ -0,0 +1,201 @@ +package aqua.semantics.header + +import aqua.model.AquaContext +import cats.data.Validated.{invalidNec, validNec, Invalid, Valid} +import cats.{Comonad, Eval, Monoid} +import cats.data.{Chain, NonEmptyChain, NonEmptyMap, Validated, ValidatedNec} +import aqua.parser.Ast +import aqua.parser.head.* +import aqua.parser.lexer.{Ability, Token} +import aqua.semantics.{HeaderError, SemanticError} +import cats.syntax.foldable.* +import cats.syntax.semigroup.* +import cats.instances.list.* +import cats.instances.option.* +import cats.free.Cofree + +case class HeaderSem[S[_]]( + initCtx: AquaContext, + finCtx: AquaContext => ValidatedNec[SemanticError[S], AquaContext] +) + +object HeaderSem { + type Res[S[_]] = ValidatedNec[SemanticError[S], HeaderSem[S]] + type ResAC[S[_]] = ValidatedNec[SemanticError[S], AquaContext] + + private implicit def headerSemMonoid[S[_]](implicit + acm: Monoid[AquaContext] + ): Monoid[HeaderSem[S]] = + new Monoid[HeaderSem[S]] { + override def empty: HeaderSem[S] = HeaderSem(acm.empty, validNec(_)) + + override def combine(a: HeaderSem[S], b: HeaderSem[S]): HeaderSem[S] = + HeaderSem( + a.initCtx |+| b.initCtx, + a.finCtx.andThen(_.andThen(b.finCtx)) + ) + } + + // Helper: monoidal combine of all the childrens after parent res + private def combineAnd[S[_]](children: Chain[Res[S]])(parent: Res[S])(implicit + acm: Monoid[AquaContext] + ): Eval[Res[S]] = + Eval.later(parent |+| children.combineAll) + + // Error generator with token pointer + private def error[S[_], T](token: Token[S], msg: String): ValidatedNec[SemanticError[S], T] = + invalidNec(HeaderError(token, msg)) + + def sem[S[_]: Comonad](imports: Map[String, AquaContext], header: Ast.Head[S])(implicit + acm: Monoid[AquaContext] + ): Res[S] = { + // Resolve a filename from given imports or fail + def resolve(f: FilenameExpr[S]): ResAC[S] = + imports + .get(f.fileValue) + .fold[ResAC[S]]( + error(f.token, "Cannot resolve the import") + )(validNec) + + // Get part of the declared context (for import/use ... from ... expressions) + def getFrom(f: FromExpr[S], ctx: AquaContext): ResAC[S] = + f.imports + .map[ResAC[S]]( + _.fold[ResAC[S]]( + { case (n, rn) => + ctx + .pick(n.value, rn.map(_.value)) + .map(validNec) + .getOrElse(error(n, s"Imported file has no ${n.value} declaration")) + }, + { case (n, rn) => + ctx + .pick(n.value, rn.map(_.value)) + .map(validNec) + .getOrElse(error(n, s"Imported file has no ${n.value} declaration")) + } + ) + ) + .reduce + + // Convert an imported context into a module (ability) + def toModule(ctx: AquaContext, tkn: Token[S], rename: Option[Ability[S]]): ResAC[S] = + rename + .map(_.value) + .orElse(ctx.module) + .fold[ResAC[S]]( + error( + tkn, + "Used module has no `module` header. Please add `module` header or use ... as ModuleName, or switch to import" + ) + )(modName => validNec(acm.empty.copy(abilities = Map(modName -> ctx)))) + + // Handler for every header expression, will be combined later + val onExpr: PartialFunction[HeaderExpr[S], Res[S]] = { + // Module header, like `module A declares *` + case ModuleExpr(name, declareAll, declareNames, declareCustom) => + validNec( + HeaderSem[S]( + // Save module header info + acm.empty.copy( + module = Some(name.value), + declares = declareNames.map(_.value).toSet ++ declareCustom.map(_.value) + ), + ctx => + // When file is handled, check that all the declarations exists + if (declareAll.nonEmpty) + validNec( + ctx.copy(declares = + ctx.`type`("").map(_.fields.toNel.map(_._1).toList.toSet).getOrElse(Set.empty) + ) + ) + else + ( + declareNames.map(n => n.value -> n) ::: declareCustom.map(a => a.value -> a) + ).map[ValidatedNec[SemanticError[S], Int]] { case (n, t) => + ctx + .pick(n, None) + // We just validate, nothing more + .map(_ => validNec(1)) + .getOrElse( + error( + t, + s"`${n}` is expected to be declared, but declaration is not found in the file" + ) + ) + }.combineAll + .map(_ => ctx) + ) + ) + + case f @ ImportExpr(_) => + // Import everything from a file + resolve(f).map(fc => HeaderSem[S](fc, validNec(_))) + case f @ ImportFromExpr(_, _) => + // Import, map declarations + resolve(f) + .andThen(getFrom(f, _)) + .map(ctx => HeaderSem[S](ctx, validNec(_))) + + case f @ UseExpr(_, asModule) => + // Import, move into a module scope + resolve(f) + .andThen(toModule(_, f.token, asModule)) + .map(fc => HeaderSem[S](fc, validNec(_))) + + case f @ UseFromExpr(_, _, asModule) => + // Import, cherry-pick declarations, move to a module scope + resolve(f) + .andThen(getFrom(f, _)) + .andThen(toModule(_, f.token, Some(asModule))) + .map(fc => HeaderSem[S](fc, validNec(_))) + + case ExportExpr(pubs) => + // Save exports, finally handle them + validNec( + HeaderSem[S]( + // Nothing there + acm.empty, + ctx => + pubs + .map( + _.fold( + { case (n, rn) => + ctx + .pick(n.value, rn.map(_.value), declared = false) + .map(validNec) + .getOrElse( + error(n, s"File has no ${n.value} declaration or import, cannot export") + ) + }, + { case (n, rn) => + ctx + .pick(n.value, rn.map(_.value), declared = false) + .map(validNec) + .getOrElse( + error(n, s"File has no ${n.value} declaration or import, cannot export") + ) + } + ) + ) + .foldLeft[ResAC[S]](validNec(ctx.exports.getOrElse(acm.empty)))(_ |+| _) + .map(expCtx => ctx.copy(exports = Some(expCtx))) + ) + ) + + case HeadExpr(token) => + // Old file exports everything + validNec(HeaderSem[S](acm.empty, ctx => validNec(ctx.copy(exports = Some(ctx))))) + + case f: FilenameExpr[S] => + resolve(f).map(fc => HeaderSem[S](fc, validNec(_))) + } + + Cofree + .cata[Chain, HeaderExpr[S], Res[S]](header) { case (expr, children) => + onExpr.lift.apply(expr).fold(Eval.later(children.combineAll))(combineAnd(children)(_)) + } + .value + } + +} diff --git a/semantics/src/main/scala/aqua/semantics/rules/ValuesAlgebra.scala b/semantics/src/main/scala/aqua/semantics/rules/ValuesAlgebra.scala index 8b8fe06c..40b3e987 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/ValuesAlgebra.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/ValuesAlgebra.scala @@ -37,7 +37,10 @@ class ValuesAlgebra[F[_], Alg[_]](implicit N: NamesAlgebra[F, Alg], T: TypesAlge case VarLambda(name, ops) => N.read(name).flatMap { case Some(t) => - T.resolveLambda(t, ops).map(Chain.fromSeq).map(VarModel(name.value, t, _)).map(Some(_)) + T.resolveLambda(t, ops) + .map(Chain.fromSeq) + .map(VarModel(name.value.replace('.', '_'), t, _)) + .map(Some(_)) case None => Free.pure(None) } diff --git a/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesAlgebra.scala b/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesAlgebra.scala index 66cc461d..6d403591 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesAlgebra.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesAlgebra.scala @@ -28,7 +28,7 @@ class AbilitiesAlgebra[F[_], Alg[_]](implicit A: InjectK[AbilityOp[F, *], Alg]) def setServiceId(name: Ability[F], id: Value[F], vm: ValueModel): Free[Alg, Boolean] = Free.liftInject[Alg](SetServiceId[F](name, id, vm)) - def getServiceId(name: Ability[F]): Free[Alg, Option[ValueModel]] = + def getServiceId(name: Ability[F]): Free[Alg, Either[Boolean, ValueModel]] = Free.liftInject[Alg](GetServiceId[F](name)) def beginScope(token: Token[F]): Free[Alg, Unit] = diff --git a/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesInterpreter.scala index 810e37b6..65fd1266 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesInterpreter.scala @@ -1,11 +1,11 @@ package aqua.semantics.rules.abilities -import aqua.model.{ServiceModel, ValueModel} +import aqua.model.{AquaContext, ServiceModel, ValueModel} import aqua.parser.lexer.Name import aqua.semantics.rules.{ReportError, StackInterpreter} import aqua.types.ArrowType import cats.data.{NonEmptyList, State} -import cats.syntax.functor._ +import cats.syntax.functor.* import cats.~> import monocle.Lens import monocle.macros.GenLens @@ -17,10 +17,12 @@ class AbilitiesInterpreter[F[_], X](implicit GenLens[AbilitiesState[F]](_.stack) ) with (AbilityOp[F, *] ~> State[X, *]) { - // TODO: resolve abilities as well private def getService(name: String): S[Option[ServiceModel]] = getState.map(_.services.get(name)) + private def getAbility(name: String): S[Option[AquaContext]] = + getState.map(_.abilities.get(name)) + override def apply[A](fa: AbilityOp[F, A]): State[X, A] = (fa match { case bs: BeginScope[F] => @@ -45,12 +47,25 @@ class AbilitiesInterpreter[F[_], X](implicit .fold( report( ga.arrow, - s"Service found, but arrow is undefined, available: ${arrows.value.keys.toNonEmptyList.toList + s"Service is found, but arrow is undefined, available: ${arrows.value.keys.toNonEmptyList.toList .mkString(", ")}" ).as(Option.empty[ArrowType]) )(a => State.pure(Some(a))) case None => - report(ga.name, "Ability with this name is undefined").as(Option.empty[ArrowType]) + getAbility(ga.name.value).flatMap { + case Some(abCtx) => + abCtx.funcs + .get(ga.arrow.value) + .fold( + report( + ga.arrow, + s"Ability is found, but arrow is undefined, available: ${abCtx.funcs.keys.toList + .mkString(", ")}" + ).as(Option.empty[ArrowType]) + )(a => State.pure(Some(a))) + case None => + report(ga.name, "Ability with this name is undefined").as(Option.empty[ArrowType]) + } } case s: SetServiceId[F] => @@ -68,24 +83,35 @@ class AbilitiesInterpreter[F[_], X](implicit } case s: GetServiceId[F] => - getState.flatMap(st => - st.stack - .flatMap(_.serviceIds.get(s.name.value).map(_._2)) - .headOption orElse st.rootServiceIds - .get( - s.name.value - ) - .map(_._2) orElse st.services.get(s.name.value).flatMap(_.defaultId) match { - case None => - report( - s.name, - s"Service ID unresolved, use `${s.name.value} id` expression to set it" - ) - .as(Option.empty[ValueModel]) + getService(s.name.value).flatMap { + case Some(_) => + getState.flatMap(st => + st.stack + .flatMap(_.serviceIds.get(s.name.value).map(_._2)) + .headOption orElse st.rootServiceIds + .get( + s.name.value + ) + .map(_._2) orElse st.services.get(s.name.value).flatMap(_.defaultId) match { + case None => + report( + s.name, + s"Service ID unresolved, use `${s.name.value} id` expression to set it" + ) + .as(Left[Boolean, ValueModel](false)) - case v => State.pure(v) - } - ) + case Some(v) => State.pure(Right(v)) + } + ) + case None => + getAbility(s.name.value).flatMap { + case Some(_) => State.pure(Left[Boolean, ValueModel](true)) + case None => + report(s.name, "Ability with this name is undefined").as( + Left[Boolean, ValueModel](false) + ) + } + } case da: DefineArrow[F] => mapStackHeadE( diff --git a/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesState.scala b/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesState.scala index e079fa38..f4928a53 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesState.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesState.scala @@ -47,5 +47,8 @@ object AbilitiesState { } def init[F[_]](context: AquaContext): AbilitiesState[F] = - AbilitiesState(services = context.allServices(), abilities = context.abilities) + AbilitiesState( + services = context.allServices(), + abilities = context.abilities // TODO is it the right way to collect abilities? Why? + ) } diff --git a/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilityOp.scala b/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilityOp.scala index 6e9ff4e8..8b166cd5 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilityOp.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilityOp.scala @@ -23,7 +23,7 @@ case class GetArrow[F[_]](name: Ability[F], arrow: Name[F]) extends AbilityOp[F, case class SetServiceId[F[_]](name: Ability[F], id: Value[F], vm: ValueModel) extends AbilityOp[F, Boolean] -case class GetServiceId[F[_]](name: Ability[F]) extends AbilityOp[F, Option[ValueModel]] +case class GetServiceId[F[_]](name: Ability[F]) extends AbilityOp[F, Either[Boolean, ValueModel]] case class BeginScope[F[_]](token: Token[F]) extends AbilityOp[F, Unit] diff --git a/semantics/src/main/scala/aqua/semantics/rules/types/TypesState.scala b/semantics/src/main/scala/aqua/semantics/rules/types/TypesState.scala index cbf880d0..97254f3d 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/types/TypesState.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/types/TypesState.scala @@ -156,5 +156,6 @@ object TypesState { ) } - def init[F[_]](context: AquaContext): TypesState[F] = TypesState(strict = context.allTypes()) + def init[F[_]](context: AquaContext): TypesState[F] = + TypesState(strict = context.allTypes()) }