mirror of
https://github.com/fluencelabs/aqua.git
synced 2024-12-04 06:30:17 +00:00
feat(compiler): Allow redeclaring functions [LNG-357] (#1127)
* Allow dots in use ... as * Add tests * Refactor implicits * Use QName in module name * Fix test * Refactor declares * Add PName * Propagate nested abilities * Fix compilation * Do not propagate context * Use alignCombine * Add test * Add more tests * Refactor test * Remove unused function * Refactor test * Refactor, add comments * Checkout aqua-src
This commit is contained in:
parent
f0ad76189a
commit
9c23a9d4ef
@ -26,6 +26,8 @@ val commons = Seq(
|
||||
scalaVersion := scalaV,
|
||||
libraryDependencies ++= Seq(
|
||||
"com.outr" %%% "scribe" % scribeV,
|
||||
"dev.optics" %%% "monocle-core" % monocleV,
|
||||
"dev.optics" %%% "monocle-macro" % monocleV,
|
||||
"org.scalatest" %%% "scalatest" % scalaTestV % Test,
|
||||
"org.scalatestplus" %%% "scalacheck-1-17" % scalaTestScalaCheckV % Test
|
||||
),
|
||||
@ -201,12 +203,6 @@ lazy val semantics = crossProject(JVMPlatform, JSPlatform)
|
||||
.withoutSuffixFor(JVMPlatform)
|
||||
.crossType(CrossType.Pure)
|
||||
.settings(commons)
|
||||
.settings(
|
||||
libraryDependencies ++= Seq(
|
||||
"dev.optics" %%% "monocle-core" % monocleV,
|
||||
"dev.optics" %%% "monocle-macro" % monocleV
|
||||
)
|
||||
)
|
||||
.dependsOn(raw, parser, errors, mangler)
|
||||
|
||||
lazy val compiler = crossProject(JVMPlatform, JSPlatform)
|
||||
@ -287,6 +283,7 @@ lazy val helpers = crossProject(JVMPlatform, JSPlatform)
|
||||
"org.typelevel" %%% "cats-free" % catsV
|
||||
)
|
||||
)
|
||||
.dependsOn(errors)
|
||||
|
||||
lazy val errors = crossProject(JVMPlatform, JSPlatform)
|
||||
.withoutSuffixFor(JVMPlatform)
|
||||
|
@ -29,6 +29,7 @@ import cats.syntax.show.*
|
||||
import org.scalatest.Inside
|
||||
import org.scalatest.flatspec.AnyFlatSpec
|
||||
import org.scalatest.matchers.should.Matchers
|
||||
import scala.annotation.tailrec
|
||||
|
||||
class AquaCompilerSpec extends AnyFlatSpec with Matchers with Inside {
|
||||
import ModelBuilder.*
|
||||
@ -497,10 +498,14 @@ class AquaCompilerSpec extends AnyFlatSpec with Matchers with Inside {
|
||||
}
|
||||
}
|
||||
|
||||
val moduleNames = List("Test", "Imp", "Sub", "Path").inits
|
||||
.takeWhile(_.nonEmpty)
|
||||
.map(_.mkString("."))
|
||||
.toList
|
||||
def paths(parts: List[String]): List[String] =
|
||||
parts.inits
|
||||
.takeWhile(_.nonEmpty)
|
||||
.map(_.mkString("."))
|
||||
.toList
|
||||
|
||||
val moduleNames = paths(List("Test", "Imp", "Sub", "Path"))
|
||||
val renames = paths(List("Renamed", "With", "New", "Name"))
|
||||
|
||||
it should "import function with `use`" in {
|
||||
def test(name: String, rename: Option[String]) = {
|
||||
@ -536,13 +541,15 @@ class AquaCompilerSpec extends AnyFlatSpec with Matchers with Inside {
|
||||
}
|
||||
|
||||
moduleNames.foreach { name =>
|
||||
val rename = "Imported"
|
||||
|
||||
withClue(s"Testing $name") {
|
||||
test(name, None)
|
||||
}
|
||||
withClue(s"Testing $name as $rename") {
|
||||
test(name, rename.some)
|
||||
|
||||
renames.foreach { rename =>
|
||||
withClue(s"Testing $name as $rename") {
|
||||
test(name, rename.some)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -664,13 +671,15 @@ class AquaCompilerSpec extends AnyFlatSpec with Matchers with Inside {
|
||||
}
|
||||
|
||||
moduleNames.foreach { name =>
|
||||
val rename = "Imported"
|
||||
|
||||
withClue(s"Testing $name") {
|
||||
test(name, None)
|
||||
}
|
||||
withClue(s"Testing $name as $rename") {
|
||||
test(name, rename.some)
|
||||
|
||||
renames.foreach { rename =>
|
||||
withClue(s"Testing $name as $rename") {
|
||||
test(name, rename.some)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -733,13 +742,15 @@ class AquaCompilerSpec extends AnyFlatSpec with Matchers with Inside {
|
||||
}
|
||||
|
||||
moduleNames.foreach { name =>
|
||||
val rename = "Imported"
|
||||
|
||||
withClue(s"Testing $name") {
|
||||
test(name, None)
|
||||
}
|
||||
withClue(s"Testing $name as $rename") {
|
||||
test(name, rename.some)
|
||||
|
||||
renames.foreach { rename =>
|
||||
withClue(s"Testing $name as $rename") {
|
||||
test(name, rename.some)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -878,6 +889,161 @@ class AquaCompilerSpec extends AnyFlatSpec with Matchers with Inside {
|
||||
}
|
||||
}
|
||||
|
||||
it should "import redeclared functions" in {
|
||||
|
||||
final case class Imp(
|
||||
idx: Int,
|
||||
name: String,
|
||||
rename: Option[String] = None,
|
||||
use: Option[Imp] = None
|
||||
) {
|
||||
def withUse(other: Imp): Imp = copy(use = Some(other))
|
||||
|
||||
lazy val path: String = s"import$idx.aqua"
|
||||
|
||||
lazy val declares: List[String] = use
|
||||
.map(u => u.declares.map(n => s"${u.access}.$n"))
|
||||
.getOrElse(Nil)
|
||||
.prepended("foo")
|
||||
|
||||
lazy val code: String =
|
||||
s"""|aqua $name declares ${declares.mkString(", ")}
|
||||
|
|
||||
|${use.fold("")(_.usage)}
|
||||
|
|
||||
|func foo() -> i32:
|
||||
| <- $idx
|
||||
|""".stripMargin
|
||||
|
||||
lazy val usage: String = s"use \"$path\"" + rename.fold("")(n => s" as $n")
|
||||
|
||||
lazy val access: String = rename.getOrElse(name)
|
||||
}
|
||||
|
||||
type NameRename = (String, Option[String])
|
||||
|
||||
def test(imps: List[NameRename]) = {
|
||||
(imps.length > 0) should be(true)
|
||||
|
||||
val top = imps.zipWithIndex.map { case ((name, rename), idx) =>
|
||||
Imp(idx + 1, name, rename)
|
||||
}.reduceRight { case (cur, prev) =>
|
||||
cur.withUse(prev)
|
||||
}
|
||||
|
||||
val (calls, vars) = top.declares.zipWithIndex.map { case (decl, idx) =>
|
||||
val v = s"v$idx"
|
||||
val call = s"$v <- ${top.access}.$decl()"
|
||||
call -> v
|
||||
}.unzip
|
||||
|
||||
val main =
|
||||
s"""|aqua Main
|
||||
|
|
||||
|export main
|
||||
|
|
||||
|${top.usage}
|
||||
|
|
||||
|func main() -> i32:
|
||||
| ${calls.mkString("\n ")}
|
||||
| <- ${vars.mkString(" + ")}
|
||||
|""".stripMargin
|
||||
|
||||
val imports = List
|
||||
.unfold(top.some)(
|
||||
_.map(i => i -> i.use)
|
||||
)
|
||||
.map(i => i.path -> i.code)
|
||||
.toMap
|
||||
|
||||
val src = Map(
|
||||
"main.aqua" -> main
|
||||
)
|
||||
|
||||
val transformCfg = TransformConfig(relayVarName = None)
|
||||
|
||||
insideRes(src, imports, transformCfg)(
|
||||
"main"
|
||||
) { case main :: _ =>
|
||||
val l = imps.length
|
||||
val res = LiteralModel.number(l * (l + 1) / 2)
|
||||
val expected = XorRes.wrap(
|
||||
respCall(transformCfg, res, initPeer),
|
||||
errorCall(transformCfg, 0, initPeer)
|
||||
)
|
||||
|
||||
main.body.equalsOrShowDiff(expected) should be(true)
|
||||
}
|
||||
}
|
||||
|
||||
// Simple
|
||||
(1 to 10).foreach { i =>
|
||||
val names = (1 to i).map(n => s"Imp$n").toList
|
||||
withClue(s"Testing ${names.mkString(" -> ")}") {
|
||||
test(names.map(_ -> none))
|
||||
}
|
||||
}
|
||||
|
||||
extension [A](l: List[List[A]]) {
|
||||
def rotate: List[List[A]] =
|
||||
l.foldLeft(List.empty[List[A]]) { case (acc, next) =>
|
||||
if (acc.isEmpty) next.map(List(_))
|
||||
else
|
||||
for {
|
||||
elem <- next
|
||||
prev <- acc
|
||||
} yield elem +: prev
|
||||
}
|
||||
}
|
||||
|
||||
// With subpaths
|
||||
(1 to 4).foreach { i =>
|
||||
(1 to i)
|
||||
.map(idx =>
|
||||
paths(
|
||||
List("Imp", "Sub", "Path")
|
||||
.map(p => s"$p$idx")
|
||||
)
|
||||
)
|
||||
.toList
|
||||
.rotate
|
||||
.foreach(names =>
|
||||
withClue(s"Testing ${names.mkString(" -> ")}") {
|
||||
test(names.map(_ -> none))
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
// With renames
|
||||
(1 to 3).foreach { i =>
|
||||
(1 to i)
|
||||
.map(idx =>
|
||||
for {
|
||||
name <- paths(
|
||||
List("Imp", "Sub", "Path")
|
||||
.map(p => s"$p$idx")
|
||||
)
|
||||
rename <- None :: paths(
|
||||
List("Rename", "To", "Other")
|
||||
.map(p => s"$p$idx")
|
||||
).map(_.some)
|
||||
} yield name -> rename
|
||||
)
|
||||
.toList
|
||||
.rotate
|
||||
.foreach(names =>
|
||||
val message = names.map { case (n, r) =>
|
||||
s"$n${r.fold("")(n => s" as $n")}"
|
||||
}.mkString(" -> ")
|
||||
|
||||
withClue(s"Testing $message") {
|
||||
test(names)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
it should "not generate error propagation in `if` with `noXor = true`" in {
|
||||
val src = Map(
|
||||
"index.aqua" ->
|
||||
|
@ -1,5 +1,6 @@
|
||||
package aqua.lsp
|
||||
|
||||
import aqua.helpers.data.PName
|
||||
import aqua.parser.lexer.{LiteralToken, NamedTypeToken, Token}
|
||||
import aqua.raw.{RawContext, RawPart}
|
||||
import aqua.semantics.header.Picker
|
||||
@ -94,15 +95,12 @@ object LspContext {
|
||||
): LspContext[S] =
|
||||
ctx.copy(importPaths = importPaths)
|
||||
|
||||
override def setModule(
|
||||
ctx: LspContext[S],
|
||||
name: String
|
||||
): LspContext[S] =
|
||||
override def setModule(ctx: LspContext[S], name: Option[String]): LspContext[S] =
|
||||
ctx.copy(raw = ctx.raw.setModule(name))
|
||||
|
||||
override def setDeclares(
|
||||
ctx: LspContext[S],
|
||||
declares: Set[String]
|
||||
declares: Set[PName]
|
||||
): LspContext[S] =
|
||||
ctx.copy(raw = ctx.raw.setDeclares(declares))
|
||||
|
||||
@ -145,6 +143,9 @@ object LspContext {
|
||||
)
|
||||
)
|
||||
|
||||
override def pick(ctx: LspContext[S], name: PName, declared: Boolean): Option[LspContext[S]] =
|
||||
ctx.raw.pick(name, declared).map(rc => ctx.copy(raw = rc))
|
||||
|
||||
override def pickHeader(ctx: LspContext[S]): LspContext[S] = ctx.copy(raw = ctx.raw.pickHeader)
|
||||
|
||||
override def pickDeclared(ctx: LspContext[S]): LspContext[S] =
|
||||
|
@ -1,5 +1,6 @@
|
||||
package aqua.raw
|
||||
|
||||
import aqua.helpers.data.PName
|
||||
import aqua.raw.arrow.FuncRaw
|
||||
import aqua.raw.value.ValueRaw
|
||||
import aqua.types.{AbilityType, StructType, Type}
|
||||
@ -8,8 +9,11 @@ import cats.Monoid
|
||||
import cats.Semigroup
|
||||
import cats.data.Chain
|
||||
import cats.data.NonEmptyMap
|
||||
import cats.syntax.align.*
|
||||
import cats.syntax.monoid.*
|
||||
import cats.syntax.option.*
|
||||
import monocle.Lens
|
||||
import monocle.macros.GenLens
|
||||
import scala.collection.immutable.SortedMap
|
||||
|
||||
/**
|
||||
@ -28,7 +32,7 @@ import scala.collection.immutable.SortedMap
|
||||
*/
|
||||
case class RawContext(
|
||||
module: Option[String] = None,
|
||||
declares: Set[String] = Set.empty,
|
||||
declares: Set[PName] = Set.empty,
|
||||
exports: Map[String, Option[String]] = Map.empty,
|
||||
parts: Chain[(RawContext, RawPart)] = Chain.empty,
|
||||
abilities: Map[String, RawContext] = Map.empty
|
||||
@ -51,6 +55,9 @@ case class RawContext(
|
||||
}
|
||||
.map(prefixFirst(prefix, _))
|
||||
|
||||
lazy val allAbilities: Map[String, RawContext] =
|
||||
all(_.abilities)
|
||||
|
||||
lazy val services: Map[String, ServiceRaw] = collectPartsMap { case srv: ServiceRaw => srv }
|
||||
|
||||
lazy val allServices: Map[String, ServiceRaw] =
|
||||
@ -87,10 +94,11 @@ case class RawContext(
|
||||
all(_.definedAbilities)
|
||||
|
||||
lazy val allNames: Set[String] =
|
||||
// TODO: How about names in abilities?
|
||||
parts.map { case (_, p) => p.name }.toList.toSet
|
||||
|
||||
lazy val declaredNames: Set[String] =
|
||||
allNames.intersect(declares)
|
||||
declares.map(_.value).toSet
|
||||
|
||||
override def toString: String =
|
||||
s"""|module: ${module.getOrElse("unnamed")}
|
||||
@ -105,6 +113,18 @@ case class RawContext(
|
||||
object RawContext {
|
||||
val blank: RawContext = RawContext()
|
||||
|
||||
val partsLens: Lens[RawContext, Chain[(RawContext, RawPart)]] =
|
||||
GenLens[RawContext](_.parts)
|
||||
|
||||
val abilitiesLens: Lens[RawContext, Map[String, RawContext]] =
|
||||
GenLens[RawContext](_.abilities)
|
||||
|
||||
def fromParts(parts: Chain[(RawContext, RawPart)]): RawContext =
|
||||
partsLens.set(parts)(blank)
|
||||
|
||||
def fromAbilities(abilities: Map[String, RawContext]): RawContext =
|
||||
abilitiesLens.set(abilities)(blank)
|
||||
|
||||
given Monoid[RawContext] with {
|
||||
|
||||
override def empty: RawContext = blank
|
||||
@ -115,7 +135,8 @@ object RawContext {
|
||||
x.declares ++ y.declares,
|
||||
x.exports ++ y.exports,
|
||||
x.parts ++ y.parts,
|
||||
x.abilities ++ y.abilities
|
||||
// This combines abilities (which are RawContexts too) recursively
|
||||
x.abilities.alignCombine(y.abilities)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -3,21 +3,18 @@ package aqua.parser
|
||||
import aqua.parser.Ast.Tree
|
||||
import aqua.parser.lexer.Token
|
||||
import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.expr.func.ReturnExpr
|
||||
import aqua.parser.lift.LiftParser.*
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
import aqua.parser.lift.{LiftParser, Span}
|
||||
import aqua.parser.Ast.Tree
|
||||
import aqua.parser.ListToTreeConverter
|
||||
|
||||
import cats.Show
|
||||
import cats.data.Chain.:==
|
||||
import cats.data.Validated.{invalid, invalidNec, invalidNel, valid, validNec, validNel}
|
||||
import cats.data.{Chain, NonEmptyChain, Validated, ValidatedNec}
|
||||
import cats.free.Cofree
|
||||
import cats.Show
|
||||
import cats.data.Validated.{invalid, invalidNec, invalidNel, valid, validNec, validNel}
|
||||
import cats.parse.{Parser as P, Parser0 as P0}
|
||||
import cats.syntax.comonad.*
|
||||
import cats.{~>, Comonad, Eval}
|
||||
import cats.{Comonad, Eval, ~>}
|
||||
import scribe.Logging
|
||||
|
||||
abstract class Expr[F[_]](val companion: Expr.Companion, val token: Token[F]) {
|
||||
|
@ -2,9 +2,10 @@ package aqua.parser
|
||||
|
||||
import aqua.parser.expr.RootExpr
|
||||
import aqua.parser.head.Header
|
||||
import aqua.parser.lift.LiftParser.LiftErrorOps
|
||||
import aqua.parser.lift.Span.S
|
||||
import aqua.parser.lift.{LiftParser, Span}
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.LiftParser.*
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.data.{Validated, ValidatedNec}
|
||||
import cats.free.Cofree
|
||||
|
@ -4,11 +4,12 @@ import aqua.parser.Expr
|
||||
import aqua.parser.lexer.Token._
|
||||
import aqua.parser.lexer.{NamedTypeToken, TypeToken}
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.Comonad
|
||||
import cats.parse.Parser
|
||||
import cats.~>
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan}
|
||||
|
||||
case class AliasExpr[F[_]](name: NamedTypeToken[F], target: TypeToken[F])
|
||||
extends Expr[F](AliasExpr, name) {
|
||||
|
@ -5,7 +5,7 @@ import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lexer.{ArrowTypeToken, BasicTypeToken, Name}
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.Comonad
|
||||
import cats.parse.Parser
|
||||
|
@ -1,16 +1,17 @@
|
||||
package aqua.parser.expr
|
||||
|
||||
import aqua.parser.Expr
|
||||
import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lexer.*
|
||||
import aqua.parser.lexer.PrefixToken
|
||||
import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lexer.VarToken
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.Comonad
|
||||
import cats.parse.Parser as P
|
||||
import cats.~>
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import aqua.parser.lexer.PrefixToken
|
||||
import aqua.parser.lexer.VarToken
|
||||
|
||||
case class ConstantExpr[F[_]](
|
||||
name: Name[F],
|
||||
|
@ -4,11 +4,12 @@ import aqua.parser.Expr
|
||||
import aqua.parser.lexer.NamedTypeToken
|
||||
import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.Comonad
|
||||
import cats.parse.Parser
|
||||
import cats.~>
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
|
||||
case class DataStructExpr[F[_]](name: NamedTypeToken[F]) extends Expr[F](DataStructExpr, name) {
|
||||
override def mapK[K[_]: Comonad](fk: F ~> K): DataStructExpr[K] = copy(name.mapK(fk))
|
||||
|
@ -5,7 +5,7 @@ import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lexer.{BasicTypeToken, Name, StreamTypeToken}
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.Comonad
|
||||
import cats.parse.Parser
|
||||
|
@ -1,17 +1,18 @@
|
||||
package aqua.parser.expr
|
||||
|
||||
import aqua.parser.expr.func.ArrowExpr
|
||||
import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lexer.Name
|
||||
import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
import aqua.parser.{Ast, Expr}
|
||||
|
||||
import cats.Comonad
|
||||
import cats.data.{Validated, ValidatedNec}
|
||||
import cats.free.Cofree
|
||||
import cats.parse.Parser
|
||||
import cats.~>
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
|
||||
case class FuncExpr[F[_]](
|
||||
name: Name[F]
|
||||
|
@ -3,8 +3,10 @@ package aqua.parser.expr
|
||||
import aqua.parser.Ast.Tree
|
||||
import aqua.parser.lexer.Token
|
||||
import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.LiftParser.*
|
||||
import aqua.parser.lift.{LiftParser, Span}
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
import aqua.parser.{Expr, ParserError}
|
||||
|
||||
import cats.data.{Chain, NonEmptyChain, NonEmptyList, Validated, ValidatedNec}
|
||||
@ -21,7 +23,6 @@ case class RootExpr[F[_]](point: Token[F]) extends Expr[F](RootExpr, point) {
|
||||
}
|
||||
|
||||
object RootExpr extends Expr.Companion {
|
||||
import Span.*
|
||||
|
||||
def validChildren: List[Expr.Lexem] =
|
||||
ServiceExpr :: AliasExpr :: DataStructExpr :: AbilityExpr :: ConstantExpr :: FuncExpr :: Nil
|
||||
|
@ -4,11 +4,12 @@ import aqua.parser.Expr
|
||||
import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lexer.{NamedTypeToken, ValueToken}
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.Comonad
|
||||
import cats.parse.Parser
|
||||
import cats.~>
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
|
||||
case class ServiceExpr[F[_]](name: NamedTypeToken[F], id: Option[ValueToken[F]])
|
||||
extends Expr[F](ServiceExpr, name) {
|
||||
|
@ -3,8 +3,9 @@ package aqua.parser.expr.func
|
||||
import aqua.parser.lexer.{ArrowTypeToken, BasicTypeToken, NamedTypeToken, TypeToken, ValueToken}
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
import aqua.parser.{ArrowReturnError, Ast, Expr, ParserError}
|
||||
|
||||
import cats.Comonad
|
||||
import cats.parse.Parser
|
||||
import cats.~>
|
||||
|
@ -5,10 +5,11 @@ import aqua.parser.expr.func.AssignmentExpr
|
||||
import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lexer.{CollectionToken, Name, ValueToken}
|
||||
import aqua.parser.lift.LiftParser
|
||||
import cats.parse.Parser as P
|
||||
import cats.{~>, Comonad}
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.parse.Parser as P
|
||||
import cats.{Comonad, ~>}
|
||||
|
||||
case class AssignmentExpr[F[_]](
|
||||
variable: Name[F],
|
||||
|
@ -4,11 +4,12 @@ import aqua.parser.Expr
|
||||
import aqua.parser.expr.func.CallArrowExpr
|
||||
import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lexer.{CallArrowToken, Name, ValueToken, VarToken}
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
import aqua.parser.lift.{LiftParser, Span}
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
|
||||
import cats.data.NonEmptyList
|
||||
import cats.parse.{Parser as P, Parser0 as P0}
|
||||
import cats.{~>, Comonad}
|
||||
import cats.{Comonad, ~>}
|
||||
|
||||
case class CallArrowExpr[F[_]](
|
||||
variables: List[Name[F]],
|
||||
@ -27,7 +28,8 @@ case class CallArrowExpr[F[_]](
|
||||
object CallArrowExpr extends Expr.Leaf {
|
||||
|
||||
override val p: P[CallArrowExpr[Span.S]] = {
|
||||
val variables: P0[Option[NonEmptyList[Name[Span.S]]]] = (comma(Name.variable) <* ` <- `).backtrack.?
|
||||
val variables: P0[Option[NonEmptyList[Name[Span.S]]]] =
|
||||
(comma(Name.variable) <* ` <- `).backtrack.?
|
||||
|
||||
// TODO: Restrict to function call only
|
||||
// or allow any expression?
|
||||
|
@ -5,10 +5,11 @@ import aqua.parser.expr.func.{CatchExpr, TryExpr}
|
||||
import aqua.parser.lexer.Name
|
||||
import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lift.LiftParser
|
||||
import cats.parse.Parser
|
||||
import cats.{~>, Comonad}
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.parse.Parser
|
||||
import cats.{Comonad, ~>}
|
||||
|
||||
case class CatchExpr[F[_]](name: Name[F]) extends Expr[F](CatchExpr, name) {
|
||||
def mapK[K[_]: Comonad](fk: F ~> K): CatchExpr[K] = copy(name.mapK(fk))
|
||||
|
@ -4,13 +4,15 @@ import aqua.parser.expr.func.ArrowExpr
|
||||
import aqua.parser.lexer.Name
|
||||
import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.LiftParser.*
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
import aqua.parser.{Ast, Expr, ParserError}
|
||||
|
||||
import cats.data.{Validated, ValidatedNec}
|
||||
import cats.free.Cofree
|
||||
import cats.parse.Parser
|
||||
import cats.{~>, Comonad}
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import cats.{Comonad, ~>}
|
||||
|
||||
case class ClosureExpr[F[_]](
|
||||
name: Name[F],
|
||||
|
@ -6,10 +6,11 @@ import aqua.parser.lexer.Token
|
||||
import aqua.parser.lexer.Token.`co`
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.LiftParser.*
|
||||
import cats.parse.Parser
|
||||
import cats.{~>, Comonad}
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.parse.Parser
|
||||
import cats.{Comonad, ~>}
|
||||
|
||||
case class CoExpr[F[_]](point: Token[F]) extends Expr[F](CoExpr, point) {
|
||||
def mapK[K[_]: Comonad](fk: F ~> K): CoExpr[K] = copy(point.mapK(fk))
|
||||
|
@ -6,7 +6,7 @@ import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lexer.{BasicTypeToken, Name, Token, TypeToken}
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.parse.Parser as P
|
||||
import cats.{Comonad, ~>}
|
||||
|
@ -6,12 +6,13 @@ import aqua.parser.lexer.Token
|
||||
import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.LiftParser.*
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.parse.Parser
|
||||
import cats.{~>, Comonad}
|
||||
import cats.syntax.comonad.*
|
||||
import cats.syntax.functor.*
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import cats.{Comonad, ~>}
|
||||
|
||||
case class ElseOtherwiseExpr[F[_]](kind: ElseOtherwiseExpr.Kind, point: Token[F])
|
||||
extends Expr[F](ElseOtherwiseExpr, point) {
|
||||
|
@ -7,7 +7,7 @@ import aqua.parser.lexer.{Name, ValueToken}
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.LiftParser.*
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.parse.Parser as P
|
||||
import cats.syntax.comonad.*
|
||||
|
@ -5,11 +5,12 @@ import aqua.parser.expr.func.{ForExpr, IfExpr}
|
||||
import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lexer.{LiteralToken, ValueToken}
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.types.LiteralType
|
||||
import cats.parse.Parser as P
|
||||
import cats.{~>, Comonad}
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
import aqua.types.LiteralType
|
||||
|
||||
import cats.parse.Parser as P
|
||||
import cats.{Comonad, ~>}
|
||||
|
||||
case class IfExpr[F[_]](value: ValueToken[F]) extends Expr[F](IfExpr, value) {
|
||||
|
||||
|
@ -4,11 +4,12 @@ import aqua.parser.Expr
|
||||
import aqua.parser.expr.*
|
||||
import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lexer.{PropertyToken, ValueToken}
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
import aqua.parser.lift.{LiftParser, Span}
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import cats.parse.Parser
|
||||
import cats.{~>, Comonad}
|
||||
|
||||
import cats.data.NonEmptyList
|
||||
import cats.parse.Parser
|
||||
import cats.{Comonad, ~>}
|
||||
|
||||
case class JoinExpr[F[_]](values: NonEmptyList[ValueToken[F]])
|
||||
extends Expr[F](JoinExpr, values.head) {
|
||||
|
@ -5,10 +5,11 @@ import aqua.parser.expr.*
|
||||
import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lexer.ValueToken
|
||||
import aqua.parser.lift.LiftParser
|
||||
import cats.parse.Parser as P
|
||||
import cats.{~>, Comonad}
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.parse.Parser as P
|
||||
import cats.{Comonad, ~>}
|
||||
|
||||
case class OnExpr[F[_]](peerId: ValueToken[F], via: List[ValueToken[F]])
|
||||
extends Expr[F](OnExpr, peerId) {
|
||||
|
@ -6,10 +6,11 @@ import aqua.parser.lexer.Token
|
||||
import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.LiftParser.*
|
||||
import cats.parse.Parser
|
||||
import cats.{~>, Comonad}
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.parse.Parser
|
||||
import cats.{Comonad, ~>}
|
||||
|
||||
case class ParExpr[F[_]](point: Token[F]) extends Expr[F](ParExpr, point) {
|
||||
|
||||
|
@ -6,11 +6,12 @@ import aqua.parser.lexer.Token.{`parseq`, *}
|
||||
import aqua.parser.lexer.{Name, ValueToken}
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.LiftParser.*
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.parse.Parser as P
|
||||
import cats.syntax.comonad.*
|
||||
import cats.{~>, Comonad}
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import cats.{Comonad, ~>}
|
||||
|
||||
case class ParSeqExpr[F[_]](
|
||||
item: Name[F],
|
||||
|
@ -5,10 +5,11 @@ import aqua.parser.expr.func.PushToStreamExpr
|
||||
import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lexer.{Name, ValueToken}
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.parse.Parser as P
|
||||
import cats.{Comonad, ~>}
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
|
||||
case class PushToStreamExpr[F[_]](
|
||||
stream: Name[F],
|
||||
|
@ -5,11 +5,12 @@ import aqua.parser.expr.func.ReturnExpr
|
||||
import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lexer.ValueToken
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.data.NonEmptyList
|
||||
import cats.parse.Parser
|
||||
import cats.{~>, Comonad}
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import cats.{Comonad, ~>}
|
||||
|
||||
case class ReturnExpr[F[_]](values: NonEmptyList[ValueToken[F]])
|
||||
extends Expr[F](ReturnExpr, values.head) {
|
||||
|
@ -5,10 +5,11 @@ import aqua.parser.expr.func.ServiceIdExpr
|
||||
import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lexer.{NamedTypeToken, ValueToken}
|
||||
import aqua.parser.lift.LiftParser
|
||||
import cats.parse.Parser as P
|
||||
import cats.{~>, Comonad}
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.parse.Parser as P
|
||||
import cats.{Comonad, ~>}
|
||||
|
||||
case class ServiceIdExpr[F[_]](service: NamedTypeToken[F], id: ValueToken[F])
|
||||
extends Expr[F](ServiceIdExpr, service) {
|
||||
|
@ -4,11 +4,12 @@ import aqua.parser.Expr
|
||||
import aqua.parser.expr.func.{IfExpr, TryExpr}
|
||||
import aqua.parser.lexer.Token
|
||||
import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lift.{LiftParser, Span}
|
||||
import aqua.parser.lift.LiftParser.*
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
import aqua.parser.lift.{LiftParser, Span}
|
||||
|
||||
import cats.parse.Parser as P
|
||||
import cats.{Comonad, ~>}
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
|
||||
case class TryExpr[F[_]](point: Token[F]) extends Expr[F](TryExpr, point) {
|
||||
|
||||
|
@ -4,7 +4,7 @@ import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lexer.{LiteralToken, Token, ValueToken}
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.Comonad
|
||||
import cats.data.NonEmptyList
|
||||
|
@ -1,16 +1,16 @@
|
||||
package aqua.parser.head
|
||||
|
||||
import cats.Comonad
|
||||
import cats.data.NonEmptyList
|
||||
import cats.parse.Parser as P
|
||||
import cats.~>
|
||||
import cats.syntax.bifunctor.*
|
||||
|
||||
import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lexer.{Ability, Name}
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.Comonad
|
||||
import cats.data.NonEmptyList
|
||||
import cats.parse.Parser as P
|
||||
import cats.syntax.bifunctor.*
|
||||
import cats.~>
|
||||
|
||||
trait FromExpr[F[_]] {
|
||||
def imports: NonEmptyList[FromExpr.NameOrAbAs[F]]
|
||||
|
@ -6,7 +6,7 @@ import aqua.parser.lexer.Token.` \n+`
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.LiftParser.*
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.data.Chain
|
||||
import cats.free.Cofree
|
||||
|
@ -4,7 +4,7 @@ import aqua.parser.Ast
|
||||
import aqua.parser.lexer.Token
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.Show
|
||||
import cats.data.Chain
|
||||
|
@ -4,7 +4,7 @@ import aqua.parser.lexer.Token._
|
||||
import aqua.parser.lexer.{LiteralToken, ValueToken}
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.Comonad
|
||||
import cats.parse.Parser
|
||||
|
@ -4,7 +4,7 @@ import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lexer.{LiteralToken, ValueToken}
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.Comonad
|
||||
import cats.data.NonEmptyList
|
||||
|
@ -1,14 +1,16 @@
|
||||
package aqua.parser.head
|
||||
|
||||
import aqua.parser.lexer.QName
|
||||
import aqua.parser.lexer.Token
|
||||
import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lexer.{Ability, LiteralToken, Name, ValueToken}
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.LiftParser.*
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.Comonad
|
||||
import cats.data.NonEmptyList
|
||||
import cats.parse.Parser
|
||||
import cats.syntax.applicative.*
|
||||
import cats.syntax.comonad.*
|
||||
@ -18,10 +20,8 @@ import cats.~>
|
||||
|
||||
case class ModuleExpr[F[_]](
|
||||
word: ModuleExpr.Word[F],
|
||||
name: Ability[F],
|
||||
declareAll: Option[Token[F]],
|
||||
declareNames: List[Name[F]],
|
||||
declareCustom: List[Ability[F]]
|
||||
name: QName[F],
|
||||
declares: Option[ModuleExpr.Declares[F]]
|
||||
) extends HeaderExpr[F] {
|
||||
override def token: Token[F] = name
|
||||
|
||||
@ -29,14 +29,31 @@ case class ModuleExpr[F[_]](
|
||||
copy(
|
||||
word = word.mapK(fk),
|
||||
name = name.mapK(fk),
|
||||
declareAll = declareAll.map(_.mapK(fk)),
|
||||
declareNames = declareNames.map(_.mapK(fk)),
|
||||
declareCustom = declareCustom.map(_.mapK(fk))
|
||||
declares = declares.map(_.mapK(fk))
|
||||
)
|
||||
}
|
||||
|
||||
object ModuleExpr extends HeaderExpr.Companion {
|
||||
|
||||
enum Declares[F[_]] {
|
||||
case All(point: Token[F])
|
||||
case Names(names: NonEmptyList[QName[F]])
|
||||
|
||||
def mapK[K[_]: Comonad](fk: F ~> K): Declares[K] = this match {
|
||||
case All(point) => All(point.mapK(fk))
|
||||
case Names(names) => Names(names.map(_.mapK(fk)))
|
||||
}
|
||||
}
|
||||
|
||||
object Declares {
|
||||
|
||||
val p: Parser[Declares[Span.S]] =
|
||||
(`declares` ~ ` *`) *> (
|
||||
comma(QName.p).map(Names(_)) |
|
||||
`star`.lift.map(Token.lift).map(All(_))
|
||||
)
|
||||
}
|
||||
|
||||
final case class Word[F[_]: Comonad](
|
||||
token: F[Word.Kind]
|
||||
) extends Token[F] {
|
||||
@ -60,49 +77,21 @@ object ModuleExpr extends HeaderExpr.Companion {
|
||||
case Kind.Aqua => aqua
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type NameOrAb[F[_]] = Either[Name[F], Ability[F]]
|
||||
|
||||
private val nameOrAb: Parser[NameOrAb[Span.S]] =
|
||||
Name.p.map(Left(_)) | Ability.ab.map(Right(_))
|
||||
|
||||
private val nameOrAbList: Parser[List[NameOrAb[Span.S]]] =
|
||||
comma[NameOrAb[Span.S]](nameOrAb).map(_.toList)
|
||||
|
||||
private val nameOrAbListOrAll: Parser[Either[List[NameOrAb[Span.S]], Token[Span.S]]] =
|
||||
nameOrAbList.map(Left(_)) | (`star` <* ` *`).lift.map(Token.lift(_)).map(Right(_))
|
||||
|
||||
private val moduleWord: Parser[Word[Span.S]] =
|
||||
(`module`.as(Word.Kind.Module).lift.backtrack |
|
||||
val p = (`module`.as(Word.Kind.Module).lift.backtrack |
|
||||
`aqua-word`.as(Word.Kind.Aqua).lift).map(Word(_))
|
||||
}
|
||||
|
||||
override val p: Parser[ModuleExpr[Span.S]] =
|
||||
(
|
||||
(` *`.with1 *> moduleWord) ~
|
||||
(` ` *> Ability.dotted) ~
|
||||
(` declares ` *> nameOrAbListOrAll).backtrack
|
||||
(` *`.with1 *> Word.p) ~
|
||||
(` *` *> QName.p) ~
|
||||
(` *` *> Declares.p <* ` *`).backtrack
|
||||
.map(_.some)
|
||||
.orElse(` *`.as(none)) // Allow trailing spaces
|
||||
).map {
|
||||
case ((word, name), None) =>
|
||||
ModuleExpr(word, name, None, Nil, Nil)
|
||||
case ((word, name), Some(Left(exportMembers))) =>
|
||||
ModuleExpr(
|
||||
word,
|
||||
name,
|
||||
None,
|
||||
exportMembers.collect { case Left(x) => x },
|
||||
exportMembers.collect { case Right(x) => x }
|
||||
)
|
||||
case ((word, name), Some(Right(point))) =>
|
||||
ModuleExpr(
|
||||
word,
|
||||
name,
|
||||
Some(point),
|
||||
Nil,
|
||||
Nil
|
||||
)
|
||||
// Allow trailing spaces without `declares`
|
||||
.orElse(` *`.as(none))
|
||||
).map { case ((word, name), declares) =>
|
||||
ModuleExpr(word, name, declares)
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lexer.{Ability, LiteralToken, ValueToken}
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.Comonad
|
||||
import cats.parse.Parser
|
||||
@ -25,7 +25,7 @@ case class UseExpr[F[_]](
|
||||
object UseExpr extends HeaderExpr.Companion {
|
||||
|
||||
override val p: Parser[HeaderExpr[Span.S]] =
|
||||
(`use` *> ` ` *> ValueToken.string ~ (` as ` *> Ability.ab).?).map {
|
||||
(`use` *> ` ` *> ValueToken.string ~ (` as ` *> Ability.dotted).?).map {
|
||||
case (filename, asModule) =>
|
||||
UseExpr(filename, asModule)
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lexer.{Ability, LiteralToken, Name, ValueToken}
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.Comonad
|
||||
import cats.data.NonEmptyList
|
||||
@ -27,9 +27,8 @@ case class UseFromExpr[F[_]](
|
||||
object UseFromExpr extends HeaderExpr.Companion {
|
||||
|
||||
override val p: Parser[UseFromExpr[Span.S]] =
|
||||
(`use` *> FromExpr.importFrom.surroundedBy(
|
||||
` `
|
||||
) ~ ValueToken.string ~ (` as ` *> Ability.ab)).map { case ((imports, filename), asModule) =>
|
||||
(`use` *> FromExpr.importFrom.surroundedBy(` `) ~
|
||||
ValueToken.string ~ (` as ` *> Ability.dotted)).map { case ((imports, filename), asModule) =>
|
||||
UseFromExpr(imports, filename, asModule)
|
||||
}
|
||||
}
|
||||
|
@ -3,13 +3,14 @@ package aqua.parser.lexer
|
||||
import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.LiftParser.*
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.Comonad
|
||||
import cats.parse.Parser as P
|
||||
import cats.syntax.functor.*
|
||||
import cats.syntax.comonad.*
|
||||
import cats.syntax.functor.*
|
||||
import cats.~>
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
|
||||
case class Ability[F[_]: Comonad](name: F[String]) extends Token[F] {
|
||||
override def as[T](v: T): F[T] = name.as(v)
|
||||
|
@ -1,11 +1,12 @@
|
||||
package aqua.parser.lexer
|
||||
|
||||
import aqua.parser.lift.LiftParser
|
||||
import cats.parse.{Parser => P}
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import Token._
|
||||
import cats.Comonad
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import cats.parse.{Parser => P}
|
||||
|
||||
case class Arg[F[_]](name: Name[F], `type`: TypeToken[F])
|
||||
|
||||
|
@ -1,15 +1,16 @@
|
||||
package aqua.parser.lexer
|
||||
|
||||
import aqua.parser.lexer.Token._
|
||||
import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.LiftParser._
|
||||
import aqua.parser.lift.LiftParser.*
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.Comonad
|
||||
import cats.parse.{Parser => P}
|
||||
import cats.syntax.functor._
|
||||
import cats.syntax.comonad._
|
||||
import cats.syntax.comonad.*
|
||||
import cats.syntax.functor.*
|
||||
import cats.~>
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
|
||||
case class Name[F[_]: Comonad](name: F[String]) extends Token[F] {
|
||||
override def as[T](v: T): F[T] = name.as(v)
|
||||
|
@ -6,7 +6,7 @@ import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.LiftParser.*
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
import aqua.types.LiteralType
|
||||
|
||||
import cats.data.{NonEmptyList, NonEmptyMap}
|
||||
|
55
parser/src/main/scala/aqua/parser/lexer/QName.scala
Normal file
55
parser/src/main/scala/aqua/parser/lexer/QName.scala
Normal file
@ -0,0 +1,55 @@
|
||||
package aqua.parser.lexer
|
||||
|
||||
import aqua.helpers.data.PName
|
||||
import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.LiftParser.*
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
|
||||
import cats.Comonad
|
||||
import cats.arrow.FunctionK
|
||||
import cats.data.NonEmptyList
|
||||
import cats.parse.{Parser => P}
|
||||
import cats.syntax.comonad.*
|
||||
import cats.syntax.functor.*
|
||||
|
||||
/**
|
||||
* Qualified name. Name with parts separated by `.`
|
||||
* e.g. `Some.Imported.Module.foo`
|
||||
*
|
||||
* @param name Name as a whole
|
||||
* @param parts Parts of the name
|
||||
*/
|
||||
final case class QName[F[_]: Comonad](
|
||||
name: F[String],
|
||||
parts: NonEmptyList[F[String]]
|
||||
) extends Token[F] {
|
||||
|
||||
def value: String = name.extract
|
||||
override def as[T](v: T): F[T] = name.as(v)
|
||||
|
||||
override def mapK[K[_]: Comonad](fk: FunctionK[F, K]): QName[K] =
|
||||
copy(fk(name), parts.map(p => fk(p)))
|
||||
|
||||
def toPName: PName = PName(parts.map(_.extract))
|
||||
}
|
||||
|
||||
object QName {
|
||||
|
||||
final case class As[F[_]: Comonad](
|
||||
name: QName[F],
|
||||
rename: Option[QName[F]]
|
||||
)
|
||||
|
||||
val p: P[QName[Span.S]] =
|
||||
anyName.lift
|
||||
.repSep(`.`)
|
||||
.withString
|
||||
.lift
|
||||
.map(span => {
|
||||
val name = span.fmap { case (_, name) => name }
|
||||
val parts = span.fmap { case (parts, _) => parts }.extract
|
||||
QName(name, parts)
|
||||
})
|
||||
}
|
@ -39,7 +39,6 @@ object Token {
|
||||
val `module`: P[Unit] = P.string("module")
|
||||
val `aqua-word`: P[Unit] = P.string("aqua")
|
||||
val `declares`: P[Unit] = P.string("declares")
|
||||
val ` declares ` : P[Unit] = `declares`.surroundedBy(` `)
|
||||
val `declare`: P[Unit] = P.string("declare")
|
||||
val `_export`: P[Unit] = P.string("export")
|
||||
val `star`: P[Unit] = P.char('*')
|
||||
|
@ -4,7 +4,7 @@ import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.LiftParser.*
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan, S}
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
import aqua.types.ScalarType
|
||||
|
||||
import cats.Comonad
|
||||
|
@ -6,7 +6,7 @@ import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.LiftParser.*
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan, S}
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
import aqua.types.LiteralType
|
||||
|
||||
import cats.arrow.FunctionK
|
||||
|
@ -13,25 +13,23 @@ trait LiftParser[S[_]] {
|
||||
|
||||
object LiftParser {
|
||||
|
||||
implicit class LiftErrorOps[S[_]: LiftParser, T](e: Parser.Error) {
|
||||
def wrapErr: S[Parser.Error] = implicitly[LiftParser[S]].wrapErr(e)
|
||||
def apply[S[_]](using lp: LiftParser[S]): LiftParser[S] = lp
|
||||
|
||||
extension [S[_]: LiftParser, T](e: Parser.Error) {
|
||||
def wrapErr: S[Parser.Error] = LiftParser[S].wrapErr(e)
|
||||
}
|
||||
|
||||
implicit class LiftParserOps[S[_]: LiftParser, T](parser: Parser[T]) {
|
||||
def lift: Parser[S[T]] = implicitly[LiftParser[S]].lift(parser)
|
||||
extension [S[_]: LiftParser, T](parser: Parser[T]) {
|
||||
def lift: Parser[S[T]] = LiftParser[S].lift(parser)
|
||||
}
|
||||
|
||||
implicit class LiftParser0Ops[S[_]: LiftParser, T](parser0: Parser0[T]) {
|
||||
def lift0: Parser0[S[T]] = implicitly[LiftParser[S]].lift0(parser0)
|
||||
extension [S[_]: LiftParser, T](parser0: Parser0[T]) {
|
||||
def lift0: Parser0[S[T]] = LiftParser[S].lift0(parser0)
|
||||
}
|
||||
|
||||
object Implicits {
|
||||
|
||||
implicit object idLiftParser extends LiftParser[Id] {
|
||||
override def lift[T](p: Parser[T]): Parser[Id[T]] = p
|
||||
override def lift0[T](p0: Parser0[T]): Parser0[Id[T]] = p0
|
||||
override def wrapErr(e: Parser.Error): Id[Parser.Error] = e
|
||||
}
|
||||
|
||||
given LiftParser[Id] with {
|
||||
override def lift[T](p: Parser[T]): Parser[Id[T]] = p
|
||||
override def lift0[T](p0: Parser0[T]): Parser0[Id[T]] = p0
|
||||
override def wrapErr(e: Parser.Error): Id[Parser.Error] = e
|
||||
}
|
||||
}
|
||||
|
@ -1,8 +1,7 @@
|
||||
package aqua.parser.lift
|
||||
|
||||
import cats.Comonad
|
||||
import cats.parse.{LocationMap, Parser0, Parser as P}
|
||||
|
||||
import cats.parse.{LocationMap, Parser as P, Parser0}
|
||||
import scala.language.implicitConversions
|
||||
|
||||
case class Span(startIndex: Int, endIndex: Int) {
|
||||
@ -131,7 +130,7 @@ object Span {
|
||||
|
||||
type S[T] = (Span, T)
|
||||
|
||||
implicit object spanComonad extends Comonad[S] {
|
||||
given Comonad[S] with {
|
||||
override def extract[A](x: S[A]): A = x._2
|
||||
|
||||
override def coflatMap[A, B](fa: S[A])(f: S[A] ⇒ B): S[B] = fa.copy(_2 = f(fa))
|
||||
@ -139,15 +138,7 @@ object Span {
|
||||
override def map[A, B](fa: S[A])(f: A ⇒ B): S[B] = fa.copy(_2 = f(fa._2))
|
||||
}
|
||||
|
||||
implicit class PToSpan[T](p: P[T]) {
|
||||
def lift: P[Span.S[T]] = Span.spanLiftParser.lift(p)
|
||||
}
|
||||
|
||||
implicit class P0ToSpan[T](p: Parser0[T]) {
|
||||
def lift0: Parser0[Span.S[T]] = Span.spanLiftParser.lift0(p)
|
||||
}
|
||||
|
||||
implicit object spanLiftParser extends LiftParser[S] {
|
||||
given LiftParser[S] with {
|
||||
|
||||
override def lift[T](p: P[T]): P[S[T]] =
|
||||
(P.index.with1 ~ p ~ P.index).map { case ((s, v), e) ⇒
|
||||
|
@ -11,9 +11,9 @@ import aqua.parser.lexer.InfixToken.Op as InfixOp
|
||||
import aqua.parser.lexer.PrefixToken.Op.*
|
||||
import aqua.parser.lexer.PrefixToken.Op as PrefixOp
|
||||
import aqua.parser.lexer.Token.LiftToken
|
||||
import aqua.parser.lift.LiftParser.Implicits.idLiftParser
|
||||
import aqua.parser.lift.LiftParser.given
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
import aqua.parser.lift.Span.{given, *}
|
||||
import aqua.types.LiteralType.{bool, number, signed, string, unsigned}
|
||||
import aqua.types.{LiteralType, ScalarType}
|
||||
|
||||
@ -40,6 +40,11 @@ object AquaSpec {
|
||||
|
||||
def toAb(str: String): Ability[Id] = Ability[Id](str)
|
||||
|
||||
def toQName(str: String): QName[Id] = QName[Id](
|
||||
str,
|
||||
NonEmptyList.fromListUnsafe(str.split("\\.").toList)
|
||||
)
|
||||
|
||||
def toVar(name: String): VarToken[Id] = VarToken[Id](toName(name))
|
||||
|
||||
def toVarOp(name: Option[String]): Option[VarToken[Id]] =
|
||||
|
@ -4,14 +4,14 @@ import aqua.AquaSpec
|
||||
import aqua.AquaSpec.*
|
||||
import aqua.parser.expr.func.{CallArrowExpr, CoExpr, ForExpr, JoinExpr, OnExpr}
|
||||
import aqua.parser.lexer.{CallArrowToken, Token}
|
||||
import aqua.parser.lift.LiftParser.Implicits.idLiftParser
|
||||
import aqua.parser.lift.LiftParser.given
|
||||
|
||||
import cats.data.{Chain, NonEmptyList}
|
||||
import cats.free.Cofree
|
||||
import cats.{Eval, Id}
|
||||
import org.scalatest.Inside
|
||||
import org.scalatest.flatspec.AnyFlatSpec
|
||||
import org.scalatest.matchers.should.Matchers
|
||||
import org.scalatest.Inside
|
||||
|
||||
class CoExprSpec extends AnyFlatSpec with Matchers with Inside with AquaSpec {
|
||||
|
||||
|
@ -4,14 +4,14 @@ import aqua.AquaSpec
|
||||
import aqua.AquaSpec.*
|
||||
import aqua.parser.expr.func.{CallArrowExpr, ForExpr, JoinExpr, OnExpr, ParExpr}
|
||||
import aqua.parser.lexer.{CallArrowToken, Token}
|
||||
import aqua.parser.lift.LiftParser.Implicits.idLiftParser
|
||||
import aqua.parser.lift.LiftParser.given
|
||||
|
||||
import org.scalatest.flatspec.AnyFlatSpec
|
||||
import org.scalatest.matchers.should.Matchers
|
||||
import org.scalatest.Inside
|
||||
import cats.{Eval, Id}
|
||||
import cats.data.{Chain, NonEmptyList}
|
||||
import cats.free.Cofree
|
||||
import cats.{Eval, Id}
|
||||
import org.scalatest.Inside
|
||||
import org.scalatest.flatspec.AnyFlatSpec
|
||||
import org.scalatest.matchers.should.Matchers
|
||||
|
||||
class ParExprSpec extends AnyFlatSpec with Matchers with Inside with AquaSpec {
|
||||
|
||||
|
@ -3,8 +3,9 @@ package aqua.parser.head
|
||||
import aqua.AquaSpec
|
||||
import aqua.parser.expr.func.ServiceIdExpr
|
||||
import aqua.parser.lexer.{LiteralToken, Token}
|
||||
import aqua.parser.lift.LiftParser.Implicits.*
|
||||
import aqua.parser.lift.LiftParser.given
|
||||
import aqua.types.LiteralType
|
||||
|
||||
import cats.Id
|
||||
import cats.data.NonEmptyList
|
||||
import org.scalatest.flatspec.AnyFlatSpec
|
||||
|
@ -3,7 +3,7 @@ package aqua.parser.head
|
||||
import aqua.AquaSpec
|
||||
import aqua.parser.expr.func.ServiceIdExpr
|
||||
import aqua.parser.lexer.{LiteralToken, Token}
|
||||
import aqua.parser.lift.LiftParser.Implicits.*
|
||||
import aqua.parser.lift.LiftParser.given
|
||||
import aqua.types.LiteralType
|
||||
|
||||
import cats.Id
|
||||
|
@ -3,10 +3,11 @@ package aqua.parser.head
|
||||
import aqua.AquaSpec
|
||||
import aqua.parser.expr.func.ServiceIdExpr
|
||||
import aqua.parser.lexer.{LiteralToken, Token}
|
||||
import aqua.parser.lift.LiftParser.Implicits.*
|
||||
import aqua.parser.lift.LiftParser.given
|
||||
import aqua.types.LiteralType
|
||||
|
||||
import cats.Id
|
||||
import cats.data.NonEmptyList
|
||||
import org.scalatest.flatspec.AnyFlatSpec
|
||||
import org.scalatest.matchers.should.Matchers
|
||||
|
||||
@ -15,20 +16,17 @@ class ModuleSpec extends AnyFlatSpec with Matchers with AquaSpec {
|
||||
|
||||
val myModule = ModuleExpr(
|
||||
ModuleExpr.Word[Id](Id(ModuleExpr.Word.Kind.Aqua)),
|
||||
toAb("MyModule"),
|
||||
None,
|
||||
Nil,
|
||||
Nil
|
||||
toQName("MyModule"),
|
||||
None
|
||||
)
|
||||
|
||||
val declaresAll = myModule.copy(
|
||||
declareAll = Some(Token.lift[Id, Unit](()))
|
||||
declares = Some(ModuleExpr.Declares.All(Token.lift[Id, Unit](())))
|
||||
)
|
||||
|
||||
def declares(symbols: List[String]) =
|
||||
myModule.copy(
|
||||
declareNames = symbols.filter(_.headOption.exists(_.isLower)).map(toName),
|
||||
declareCustom = symbols.filter(_.headOption.exists(_.isUpper)).map(toAb)
|
||||
declares = Some(ModuleExpr.Declares.Names(NonEmptyList.fromListUnsafe(symbols.map(toQName))))
|
||||
)
|
||||
|
||||
def parseModuleExpr(expr: String): ModuleExpr[Id] =
|
||||
|
@ -3,8 +3,9 @@ package aqua.parser.head
|
||||
import aqua.AquaSpec
|
||||
import aqua.parser.expr.func.ServiceIdExpr
|
||||
import aqua.parser.lexer.{LiteralToken, Token}
|
||||
import aqua.parser.lift.LiftParser.Implicits.*
|
||||
import aqua.parser.lift.LiftParser.given
|
||||
import aqua.types.LiteralType
|
||||
|
||||
import cats.Id
|
||||
import cats.data.NonEmptyList
|
||||
import org.scalatest.flatspec.AnyFlatSpec
|
||||
|
@ -1,7 +1,8 @@
|
||||
package aqua.parser.lexer
|
||||
|
||||
import aqua.parser.lift.LiftParser.Implicits.idLiftParser
|
||||
import aqua.parser.lift.LiftParser.given
|
||||
import aqua.types.LiteralType
|
||||
|
||||
import cats.Id
|
||||
import cats.data.{NonEmptyList, NonEmptyMap}
|
||||
import org.scalatest.EitherValues
|
||||
|
@ -1,6 +1,6 @@
|
||||
package aqua.parser.lexer
|
||||
|
||||
import aqua.parser.lift.LiftParser.Implicits.idLiftParser
|
||||
import aqua.parser.lift.LiftParser.given
|
||||
import aqua.types.ScalarType
|
||||
|
||||
import cats.Id
|
||||
|
@ -1,13 +1,13 @@
|
||||
package aqua.parser.lexer
|
||||
|
||||
import org.scalatest.EitherValues
|
||||
import org.scalatest.flatspec.AnyFlatSpec
|
||||
import org.scalatest.matchers.should.Matchers
|
||||
import aqua.parser.lift.LiftParser.Implicits.idLiftParser
|
||||
import aqua.parser.lift.LiftParser.given
|
||||
import aqua.types.LiteralType
|
||||
|
||||
import cats.Id
|
||||
import cats.data.NonEmptyList
|
||||
import org.scalatest.EitherValues
|
||||
import org.scalatest.flatspec.AnyFlatSpec
|
||||
import org.scalatest.matchers.should.Matchers
|
||||
|
||||
class ValueTokenSpec extends AnyFlatSpec with Matchers with EitherValues {
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
package aqua.parser.lexer
|
||||
|
||||
import aqua.parser.lift.LiftParser.Implicits.idLiftParser
|
||||
import aqua.parser.lift.LiftParser.given
|
||||
|
||||
import cats.Id
|
||||
import cats.data.NonEmptyList
|
||||
import org.scalatest.EitherValues
|
||||
|
@ -23,8 +23,8 @@ case class HeaderSem[S[_], C](
|
||||
|
||||
object HeaderSem {
|
||||
|
||||
def fromInit[S[_], C](init: C): HeaderSem[S, C] =
|
||||
HeaderSem(init, c => c.validNec)
|
||||
def fromInit[S[_], C: Monoid](init: C): HeaderSem[S, C] =
|
||||
HeaderSem(init, _ => Monoid[C].empty.validNec)
|
||||
|
||||
given [S[_]: Comonad, C](using
|
||||
rc: Monoid[C]
|
||||
|
@ -1,5 +1,6 @@
|
||||
package aqua.semantics.header
|
||||
|
||||
import aqua.helpers.data.PName
|
||||
import aqua.parser.head.ModuleExpr
|
||||
import aqua.semantics.header.HeaderHandler.{Res, error}
|
||||
import aqua.semantics.header.Picker.*
|
||||
@ -19,40 +20,41 @@ class ModuleSem[S[_]: Comonad, C: Picker](expr: ModuleExpr[S])(using
|
||||
locations: LocationsAlgebra[S, State[C, *]]
|
||||
) {
|
||||
|
||||
import expr.*
|
||||
|
||||
def headerSem: Res[S, C] = {
|
||||
lazy val sem = HeaderSem(
|
||||
// Save module header info
|
||||
Picker[C].blank.setModule(name.value),
|
||||
Picker[C].blank.setModule(expr.name.value.some),
|
||||
ctx =>
|
||||
// When file is handled, check that all the declarations exists
|
||||
if (declareAll.nonEmpty) ctx.setDeclares(ctx.allNames).validNec
|
||||
else {
|
||||
val declares = declareNames.fproductLeft(_.value) ::: declareCustom.fproductLeft(_.value)
|
||||
val names = declares.map { case (name, _) => name }.toSet
|
||||
val res = ctx.setDeclares(names).addOccurences(declares)
|
||||
expr.declares match {
|
||||
case None => ctx.validNec
|
||||
case Some(ModuleExpr.Declares.All(_)) =>
|
||||
val names = ctx.allNames.map(PName.simpleUnsafe).toSet
|
||||
ctx.setDeclares(names).validNec
|
||||
case Some(ModuleExpr.Declares.Names(declareNames)) =>
|
||||
val declares = declareNames.fproductLeft(_.value).toList
|
||||
val names = declareNames.map(_.toPName).toList.toSet
|
||||
val res = ctx.setDeclares(names).addOccurences(declares)
|
||||
|
||||
// summarize contexts to allow redeclaration of imports
|
||||
declares.map { case (n, t) =>
|
||||
res
|
||||
.pick(n, None, ctx.module.nonEmpty)
|
||||
.toValidNec(
|
||||
error(
|
||||
t,
|
||||
s"`$n` is expected to be declared, but declaration is not found in the file"
|
||||
// summarize contexts to allow redeclaration of imports
|
||||
declares.map { case (n, t) =>
|
||||
res
|
||||
.pick(t.toPName, ctx.module.nonEmpty)
|
||||
.toValidNec(
|
||||
error(
|
||||
t,
|
||||
s"`$n` is expected to be declared, but declaration is not found in the file"
|
||||
)
|
||||
)
|
||||
)
|
||||
.void
|
||||
// TODO: Should not it be possible to make `.combineAll` the final result?
|
||||
// Seems like `.pick` does not return much information
|
||||
}.combineAll.as(res)
|
||||
.void
|
||||
// TODO: Should not it be possible to make `.combineAll` the final result?
|
||||
// Seems like `.pick` does not return much information
|
||||
}.combineAll.as(res)
|
||||
}
|
||||
)
|
||||
|
||||
word.value.fold(
|
||||
expr.word.value.fold(
|
||||
module = error(
|
||||
word,
|
||||
expr.word,
|
||||
"Keyword `module` is deprecated, use `aqua` instead"
|
||||
).invalidNec,
|
||||
aqua = sem.validNec
|
||||
|
@ -1,5 +1,6 @@
|
||||
package aqua.semantics.header
|
||||
|
||||
import aqua.helpers.data.PName
|
||||
import aqua.raw.{RawContext, RawPart}
|
||||
import aqua.types.{AbilityType, ArrowType, Type}
|
||||
|
||||
@ -13,6 +14,7 @@ trait Picker[A] {
|
||||
def definedAbilityNames(ctx: A): Set[String]
|
||||
def blank: A
|
||||
def pick(ctx: A, name: String, rename: Option[String], declared: Boolean): Option[A]
|
||||
def pick(ctx: A, name: PName, declared: Boolean): Option[A]
|
||||
def pickDeclared(ctx: A): A
|
||||
def pickHeader(ctx: A): A
|
||||
def module(ctx: A): Option[String]
|
||||
@ -24,8 +26,8 @@ trait Picker[A] {
|
||||
def funcAcceptAbility(ctx: A, name: String): Boolean
|
||||
def setAbility(ctx: A, name: String, ctxAb: A): A
|
||||
def setImportPaths(ctx: A, importPaths: Map[String, String]): A
|
||||
def setModule(ctx: A, name: String): A
|
||||
def setDeclares(ctx: A, declares: Set[String]): A
|
||||
def setModule(ctx: A, name: Option[String]): A
|
||||
def setDeclares(ctx: A, declares: Set[PName]): A
|
||||
def setExports(ctx: A, exports: Map[String, Option[String]]): A
|
||||
def addPart(ctx: A, part: (A, RawPart)): A
|
||||
}
|
||||
@ -41,6 +43,10 @@ object Picker {
|
||||
|
||||
def pick(name: String, rename: Option[String], declared: Boolean): Option[A] =
|
||||
Picker[A].pick(p, name, rename, declared)
|
||||
|
||||
def pick(name: PName, declared: Boolean): Option[A] =
|
||||
Picker[A].pick(p, name, declared)
|
||||
|
||||
def pickDeclared: A = Picker[A].pickDeclared(p)
|
||||
def pickHeader: A = Picker[A].pickHeader(p)
|
||||
def module: Option[String] = Picker[A].module(p)
|
||||
@ -65,10 +71,10 @@ object Picker {
|
||||
def addFreeParts(parts: List[RawPart]): A =
|
||||
parts.foldLeft(p) { case (ctx, part) => ctx.addPart(blank -> part) }
|
||||
|
||||
def setModule(name: String): A =
|
||||
def setModule(name: Option[String]): A =
|
||||
Picker[A].setModule(p, name)
|
||||
|
||||
def setDeclares(declares: Set[String]): A =
|
||||
def setDeclares(declares: Set[PName]): A =
|
||||
Picker[A].setDeclares(p, declares)
|
||||
|
||||
def setExports(exports: Map[String, Option[String]]): A =
|
||||
@ -130,10 +136,10 @@ object Picker {
|
||||
override def setImportPaths(ctx: RawContext, importPaths: Map[String, String]): RawContext =
|
||||
ctx
|
||||
|
||||
override def setModule(ctx: RawContext, name: String): RawContext =
|
||||
ctx.copy(module = Some(name))
|
||||
override def setModule(ctx: RawContext, name: Option[String]): RawContext =
|
||||
ctx.copy(module = name)
|
||||
|
||||
override def setDeclares(ctx: RawContext, declares: Set[String]): RawContext =
|
||||
override def setDeclares(ctx: RawContext, declares: Set[PName]): RawContext =
|
||||
ctx.copy(declares = declares)
|
||||
|
||||
override def setExports(ctx: RawContext, exports: Map[String, Option[String]]): RawContext =
|
||||
@ -146,13 +152,39 @@ object Picker {
|
||||
declared: Boolean
|
||||
): Option[RawContext] =
|
||||
Option
|
||||
.when(!declared || ctx.declares(name)) {
|
||||
RawContext.blank
|
||||
.copy(parts = ctx.parts.filter(_._2.name == name).map { case (partContext, part) =>
|
||||
(partContext, rename.fold(part)(part.rename))
|
||||
})
|
||||
.when(!declared || ctx.declaredNames(name)) {
|
||||
RawContext.fromParts(
|
||||
ctx.parts.collect {
|
||||
case (partContext, part) if part.name == name =>
|
||||
(partContext, rename.fold(part)(part.rename))
|
||||
}
|
||||
)
|
||||
}
|
||||
.filter(_.nonEmpty)
|
||||
.map(
|
||||
// Module and declares should not be lost when picking
|
||||
// Because it affects later logic
|
||||
_.setModule(ctx.module).setDeclares(Set(PName.simpleUnsafe(name)))
|
||||
)
|
||||
|
||||
override def pick(
|
||||
ctx: RawContext,
|
||||
name: PName,
|
||||
declared: Boolean
|
||||
): Option[RawContext] =
|
||||
name.simple.fold(
|
||||
name.splits.collectFirstSome { case (ab, field) =>
|
||||
for {
|
||||
ability <- ctx.abilities.get(ab.value)
|
||||
inner <- pick(ability, field, declared)
|
||||
} yield RawContext
|
||||
.fromAbilities(Map(ab.value -> inner))
|
||||
// Module and declares should not be lost when picking
|
||||
// Because it affects later logic
|
||||
.setModule(ctx.module)
|
||||
.setDeclares(Set(name))
|
||||
}
|
||||
)(pick(ctx, _, None, declared))
|
||||
|
||||
override def pickHeader(ctx: RawContext): RawContext =
|
||||
RawContext.blank.copy(module = ctx.module, declares = ctx.declares, exports = ctx.exports)
|
||||
@ -161,7 +193,7 @@ object Picker {
|
||||
if (ctx.module.isEmpty) ctx
|
||||
else
|
||||
ctx.declares.toList
|
||||
.flatMap(n => pick(ctx, n, None, ctx.module.nonEmpty))
|
||||
.flatMap(n => pick(ctx, n, ctx.module.nonEmpty))
|
||||
.foldLeft(pickHeader(ctx))(_ |+| _)
|
||||
}
|
||||
|
||||
|
@ -154,7 +154,12 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](using
|
||||
.filterF(nv => T.resolveType(nv.typeName, mustBeDefined = false).map(_.isDefined))
|
||||
.widen[ValueToken[S]]
|
||||
|
||||
callArrow.orElse(ability).orElse(namedValue).foldF(default)(valueToRaw)
|
||||
callArrow
|
||||
.orElse(ability)
|
||||
.orElse(namedValue)
|
||||
.foldF(default)(
|
||||
valueToRaw
|
||||
)
|
||||
|
||||
case dvt @ NamedValueToken(typeName, fields) =>
|
||||
(for {
|
||||
|
@ -13,8 +13,8 @@ import aqua.types.ArrowType
|
||||
import cats.data.*
|
||||
import cats.syntax.applicative.*
|
||||
import cats.syntax.apply.*
|
||||
import cats.syntax.functor.*
|
||||
import cats.syntax.flatMap.*
|
||||
import cats.syntax.functor.*
|
||||
import cats.syntax.option.*
|
||||
import monocle.Lens
|
||||
import monocle.macros.GenLens
|
||||
|
@ -66,6 +66,6 @@ object AbilitiesState {
|
||||
rootServiceIds = context.allServices.flatMap { case (name, service) =>
|
||||
service.defaultId.map(name -> _)
|
||||
},
|
||||
abilities = context.abilities // TODO is it the right way to collect abilities? Why?
|
||||
abilities = context.allAbilities
|
||||
)
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ package aqua.semantics
|
||||
|
||||
import aqua.parser.Ast
|
||||
import aqua.parser.head.{ExportExpr, FromExpr, HeaderExpr, ModuleExpr}
|
||||
import aqua.parser.lexer.QName
|
||||
import aqua.parser.lexer.Token
|
||||
import aqua.parser.lexer.{Ability, Name}
|
||||
import aqua.raw.RawContext
|
||||
@ -38,11 +39,9 @@ class HeaderSpec extends AnyFlatSpec with Matchers with Inside {
|
||||
Token.lift(()),
|
||||
Chain(
|
||||
ModuleExpr(
|
||||
word = ModuleExpr.Word[Id](Id(ModuleExpr.Word.Kind.Aqua)),
|
||||
name = Ability[Id]("TestModule"),
|
||||
declareAll = None,
|
||||
declareNames = Nil,
|
||||
declareCustom = Nil
|
||||
word = ModuleExpr.Word(ModuleExpr.Word.Kind.Aqua),
|
||||
name = QName("TestModule", NonEmptyList.one("TestModule")),
|
||||
declares = None
|
||||
),
|
||||
ExportExpr(NonEmptyList.of(exp))
|
||||
)
|
||||
|
@ -28,7 +28,6 @@ class SemanticsSpec extends AnyFlatSpec with Matchers with Inside {
|
||||
|
||||
val emptyCall = Call(Nil, Nil)
|
||||
|
||||
implicit val fileLift: LiftParser[Span.S] = Span.spanLiftParser
|
||||
val parser = Parser.parse(Parser.spanParser)
|
||||
|
||||
val semantics = new RawSemantics[Span.S]()
|
||||
|
39
utils/helpers/src/main/scala/aqua/helpers/data/PName.scala
Normal file
39
utils/helpers/src/main/scala/aqua/helpers/data/PName.scala
Normal file
@ -0,0 +1,39 @@
|
||||
package aqua.helpers.data
|
||||
|
||||
import aqua.errors.Errors.internalError
|
||||
|
||||
import cats.data.NonEmptyList
|
||||
import cats.syntax.option.*
|
||||
|
||||
/**
|
||||
* Short for PathName. Represents name with parts separated by `.`
|
||||
*/
|
||||
final case class PName(
|
||||
parts: NonEmptyList[String]
|
||||
) {
|
||||
|
||||
lazy val simple: Option[String] =
|
||||
Option.when(parts.length == 1)(parts.head)
|
||||
|
||||
lazy val isSimple: Boolean = simple.isDefined
|
||||
|
||||
lazy val value: String = parts.toList.mkString(".")
|
||||
|
||||
lazy val splits: List[(PName, PName)] = {
|
||||
val partsList = parts.toList
|
||||
(1 until parts.length).toList.map(i =>
|
||||
PName(NonEmptyList.fromListUnsafe(partsList.take(i))) ->
|
||||
PName(NonEmptyList.fromListUnsafe(partsList.drop(i)))
|
||||
)
|
||||
}
|
||||
|
||||
override def toString(): String = value
|
||||
}
|
||||
|
||||
object PName {
|
||||
|
||||
def simpleUnsafe(name: String): PName =
|
||||
if (name.isEmpty || name.contains("."))
|
||||
internalError(s"Invalid PName: $name")
|
||||
else PName(NonEmptyList.one(name))
|
||||
}
|
Loading…
Reference in New Issue
Block a user