mirror of
https://github.com/fluencelabs/aqua.git
synced 2024-12-04 22:50:18 +00:00
feat(compiler): Allow redeclaring functions [LNG-357] (#1127)
* Allow dots in use ... as * Add tests * Refactor implicits * Use QName in module name * Fix test * Refactor declares * Add PName * Propagate nested abilities * Fix compilation * Do not propagate context * Use alignCombine * Add test * Add more tests * Refactor test * Remove unused function * Refactor test * Refactor, add comments * Checkout aqua-src
This commit is contained in:
parent
f0ad76189a
commit
9c23a9d4ef
@ -26,6 +26,8 @@ val commons = Seq(
|
|||||||
scalaVersion := scalaV,
|
scalaVersion := scalaV,
|
||||||
libraryDependencies ++= Seq(
|
libraryDependencies ++= Seq(
|
||||||
"com.outr" %%% "scribe" % scribeV,
|
"com.outr" %%% "scribe" % scribeV,
|
||||||
|
"dev.optics" %%% "monocle-core" % monocleV,
|
||||||
|
"dev.optics" %%% "monocle-macro" % monocleV,
|
||||||
"org.scalatest" %%% "scalatest" % scalaTestV % Test,
|
"org.scalatest" %%% "scalatest" % scalaTestV % Test,
|
||||||
"org.scalatestplus" %%% "scalacheck-1-17" % scalaTestScalaCheckV % Test
|
"org.scalatestplus" %%% "scalacheck-1-17" % scalaTestScalaCheckV % Test
|
||||||
),
|
),
|
||||||
@ -201,12 +203,6 @@ lazy val semantics = crossProject(JVMPlatform, JSPlatform)
|
|||||||
.withoutSuffixFor(JVMPlatform)
|
.withoutSuffixFor(JVMPlatform)
|
||||||
.crossType(CrossType.Pure)
|
.crossType(CrossType.Pure)
|
||||||
.settings(commons)
|
.settings(commons)
|
||||||
.settings(
|
|
||||||
libraryDependencies ++= Seq(
|
|
||||||
"dev.optics" %%% "monocle-core" % monocleV,
|
|
||||||
"dev.optics" %%% "monocle-macro" % monocleV
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.dependsOn(raw, parser, errors, mangler)
|
.dependsOn(raw, parser, errors, mangler)
|
||||||
|
|
||||||
lazy val compiler = crossProject(JVMPlatform, JSPlatform)
|
lazy val compiler = crossProject(JVMPlatform, JSPlatform)
|
||||||
@ -287,6 +283,7 @@ lazy val helpers = crossProject(JVMPlatform, JSPlatform)
|
|||||||
"org.typelevel" %%% "cats-free" % catsV
|
"org.typelevel" %%% "cats-free" % catsV
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
.dependsOn(errors)
|
||||||
|
|
||||||
lazy val errors = crossProject(JVMPlatform, JSPlatform)
|
lazy val errors = crossProject(JVMPlatform, JSPlatform)
|
||||||
.withoutSuffixFor(JVMPlatform)
|
.withoutSuffixFor(JVMPlatform)
|
||||||
|
@ -29,6 +29,7 @@ import cats.syntax.show.*
|
|||||||
import org.scalatest.Inside
|
import org.scalatest.Inside
|
||||||
import org.scalatest.flatspec.AnyFlatSpec
|
import org.scalatest.flatspec.AnyFlatSpec
|
||||||
import org.scalatest.matchers.should.Matchers
|
import org.scalatest.matchers.should.Matchers
|
||||||
|
import scala.annotation.tailrec
|
||||||
|
|
||||||
class AquaCompilerSpec extends AnyFlatSpec with Matchers with Inside {
|
class AquaCompilerSpec extends AnyFlatSpec with Matchers with Inside {
|
||||||
import ModelBuilder.*
|
import ModelBuilder.*
|
||||||
@ -497,11 +498,15 @@ class AquaCompilerSpec extends AnyFlatSpec with Matchers with Inside {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
val moduleNames = List("Test", "Imp", "Sub", "Path").inits
|
def paths(parts: List[String]): List[String] =
|
||||||
|
parts.inits
|
||||||
.takeWhile(_.nonEmpty)
|
.takeWhile(_.nonEmpty)
|
||||||
.map(_.mkString("."))
|
.map(_.mkString("."))
|
||||||
.toList
|
.toList
|
||||||
|
|
||||||
|
val moduleNames = paths(List("Test", "Imp", "Sub", "Path"))
|
||||||
|
val renames = paths(List("Renamed", "With", "New", "Name"))
|
||||||
|
|
||||||
it should "import function with `use`" in {
|
it should "import function with `use`" in {
|
||||||
def test(name: String, rename: Option[String]) = {
|
def test(name: String, rename: Option[String]) = {
|
||||||
val src = Map(
|
val src = Map(
|
||||||
@ -536,16 +541,18 @@ class AquaCompilerSpec extends AnyFlatSpec with Matchers with Inside {
|
|||||||
}
|
}
|
||||||
|
|
||||||
moduleNames.foreach { name =>
|
moduleNames.foreach { name =>
|
||||||
val rename = "Imported"
|
|
||||||
|
|
||||||
withClue(s"Testing $name") {
|
withClue(s"Testing $name") {
|
||||||
test(name, None)
|
test(name, None)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
renames.foreach { rename =>
|
||||||
withClue(s"Testing $name as $rename") {
|
withClue(s"Testing $name as $rename") {
|
||||||
test(name, rename.some)
|
test(name, rename.some)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
it should "import service with `use`" in {
|
it should "import service with `use`" in {
|
||||||
def test(name: String, rename: Option[String]) = {
|
def test(name: String, rename: Option[String]) = {
|
||||||
@ -664,16 +671,18 @@ class AquaCompilerSpec extends AnyFlatSpec with Matchers with Inside {
|
|||||||
}
|
}
|
||||||
|
|
||||||
moduleNames.foreach { name =>
|
moduleNames.foreach { name =>
|
||||||
val rename = "Imported"
|
|
||||||
|
|
||||||
withClue(s"Testing $name") {
|
withClue(s"Testing $name") {
|
||||||
test(name, None)
|
test(name, None)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
renames.foreach { rename =>
|
||||||
withClue(s"Testing $name as $rename") {
|
withClue(s"Testing $name as $rename") {
|
||||||
test(name, rename.some)
|
test(name, rename.some)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
it should "import ability (nested) with `use`" in {
|
it should "import ability (nested) with `use`" in {
|
||||||
def test(name: String, rename: Option[String]) = {
|
def test(name: String, rename: Option[String]) = {
|
||||||
@ -733,16 +742,18 @@ class AquaCompilerSpec extends AnyFlatSpec with Matchers with Inside {
|
|||||||
}
|
}
|
||||||
|
|
||||||
moduleNames.foreach { name =>
|
moduleNames.foreach { name =>
|
||||||
val rename = "Imported"
|
|
||||||
|
|
||||||
withClue(s"Testing $name") {
|
withClue(s"Testing $name") {
|
||||||
test(name, None)
|
test(name, None)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
renames.foreach { rename =>
|
||||||
withClue(s"Testing $name as $rename") {
|
withClue(s"Testing $name as $rename") {
|
||||||
test(name, rename.some)
|
test(name, rename.some)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
it should "import abilities in chain of imports" in {
|
it should "import abilities in chain of imports" in {
|
||||||
case class Imp(header: String, rename: Option[String] = None) {
|
case class Imp(header: String, rename: Option[String] = None) {
|
||||||
@ -878,6 +889,161 @@ class AquaCompilerSpec extends AnyFlatSpec with Matchers with Inside {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
it should "import redeclared functions" in {
|
||||||
|
|
||||||
|
final case class Imp(
|
||||||
|
idx: Int,
|
||||||
|
name: String,
|
||||||
|
rename: Option[String] = None,
|
||||||
|
use: Option[Imp] = None
|
||||||
|
) {
|
||||||
|
def withUse(other: Imp): Imp = copy(use = Some(other))
|
||||||
|
|
||||||
|
lazy val path: String = s"import$idx.aqua"
|
||||||
|
|
||||||
|
lazy val declares: List[String] = use
|
||||||
|
.map(u => u.declares.map(n => s"${u.access}.$n"))
|
||||||
|
.getOrElse(Nil)
|
||||||
|
.prepended("foo")
|
||||||
|
|
||||||
|
lazy val code: String =
|
||||||
|
s"""|aqua $name declares ${declares.mkString(", ")}
|
||||||
|
|
|
||||||
|
|${use.fold("")(_.usage)}
|
||||||
|
|
|
||||||
|
|func foo() -> i32:
|
||||||
|
| <- $idx
|
||||||
|
|""".stripMargin
|
||||||
|
|
||||||
|
lazy val usage: String = s"use \"$path\"" + rename.fold("")(n => s" as $n")
|
||||||
|
|
||||||
|
lazy val access: String = rename.getOrElse(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
type NameRename = (String, Option[String])
|
||||||
|
|
||||||
|
def test(imps: List[NameRename]) = {
|
||||||
|
(imps.length > 0) should be(true)
|
||||||
|
|
||||||
|
val top = imps.zipWithIndex.map { case ((name, rename), idx) =>
|
||||||
|
Imp(idx + 1, name, rename)
|
||||||
|
}.reduceRight { case (cur, prev) =>
|
||||||
|
cur.withUse(prev)
|
||||||
|
}
|
||||||
|
|
||||||
|
val (calls, vars) = top.declares.zipWithIndex.map { case (decl, idx) =>
|
||||||
|
val v = s"v$idx"
|
||||||
|
val call = s"$v <- ${top.access}.$decl()"
|
||||||
|
call -> v
|
||||||
|
}.unzip
|
||||||
|
|
||||||
|
val main =
|
||||||
|
s"""|aqua Main
|
||||||
|
|
|
||||||
|
|export main
|
||||||
|
|
|
||||||
|
|${top.usage}
|
||||||
|
|
|
||||||
|
|func main() -> i32:
|
||||||
|
| ${calls.mkString("\n ")}
|
||||||
|
| <- ${vars.mkString(" + ")}
|
||||||
|
|""".stripMargin
|
||||||
|
|
||||||
|
val imports = List
|
||||||
|
.unfold(top.some)(
|
||||||
|
_.map(i => i -> i.use)
|
||||||
|
)
|
||||||
|
.map(i => i.path -> i.code)
|
||||||
|
.toMap
|
||||||
|
|
||||||
|
val src = Map(
|
||||||
|
"main.aqua" -> main
|
||||||
|
)
|
||||||
|
|
||||||
|
val transformCfg = TransformConfig(relayVarName = None)
|
||||||
|
|
||||||
|
insideRes(src, imports, transformCfg)(
|
||||||
|
"main"
|
||||||
|
) { case main :: _ =>
|
||||||
|
val l = imps.length
|
||||||
|
val res = LiteralModel.number(l * (l + 1) / 2)
|
||||||
|
val expected = XorRes.wrap(
|
||||||
|
respCall(transformCfg, res, initPeer),
|
||||||
|
errorCall(transformCfg, 0, initPeer)
|
||||||
|
)
|
||||||
|
|
||||||
|
main.body.equalsOrShowDiff(expected) should be(true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Simple
|
||||||
|
(1 to 10).foreach { i =>
|
||||||
|
val names = (1 to i).map(n => s"Imp$n").toList
|
||||||
|
withClue(s"Testing ${names.mkString(" -> ")}") {
|
||||||
|
test(names.map(_ -> none))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
extension [A](l: List[List[A]]) {
|
||||||
|
def rotate: List[List[A]] =
|
||||||
|
l.foldLeft(List.empty[List[A]]) { case (acc, next) =>
|
||||||
|
if (acc.isEmpty) next.map(List(_))
|
||||||
|
else
|
||||||
|
for {
|
||||||
|
elem <- next
|
||||||
|
prev <- acc
|
||||||
|
} yield elem +: prev
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// With subpaths
|
||||||
|
(1 to 4).foreach { i =>
|
||||||
|
(1 to i)
|
||||||
|
.map(idx =>
|
||||||
|
paths(
|
||||||
|
List("Imp", "Sub", "Path")
|
||||||
|
.map(p => s"$p$idx")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.toList
|
||||||
|
.rotate
|
||||||
|
.foreach(names =>
|
||||||
|
withClue(s"Testing ${names.mkString(" -> ")}") {
|
||||||
|
test(names.map(_ -> none))
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// With renames
|
||||||
|
(1 to 3).foreach { i =>
|
||||||
|
(1 to i)
|
||||||
|
.map(idx =>
|
||||||
|
for {
|
||||||
|
name <- paths(
|
||||||
|
List("Imp", "Sub", "Path")
|
||||||
|
.map(p => s"$p$idx")
|
||||||
|
)
|
||||||
|
rename <- None :: paths(
|
||||||
|
List("Rename", "To", "Other")
|
||||||
|
.map(p => s"$p$idx")
|
||||||
|
).map(_.some)
|
||||||
|
} yield name -> rename
|
||||||
|
)
|
||||||
|
.toList
|
||||||
|
.rotate
|
||||||
|
.foreach(names =>
|
||||||
|
val message = names.map { case (n, r) =>
|
||||||
|
s"$n${r.fold("")(n => s" as $n")}"
|
||||||
|
}.mkString(" -> ")
|
||||||
|
|
||||||
|
withClue(s"Testing $message") {
|
||||||
|
test(names)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
it should "not generate error propagation in `if` with `noXor = true`" in {
|
it should "not generate error propagation in `if` with `noXor = true`" in {
|
||||||
val src = Map(
|
val src = Map(
|
||||||
"index.aqua" ->
|
"index.aqua" ->
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
package aqua.lsp
|
package aqua.lsp
|
||||||
|
|
||||||
|
import aqua.helpers.data.PName
|
||||||
import aqua.parser.lexer.{LiteralToken, NamedTypeToken, Token}
|
import aqua.parser.lexer.{LiteralToken, NamedTypeToken, Token}
|
||||||
import aqua.raw.{RawContext, RawPart}
|
import aqua.raw.{RawContext, RawPart}
|
||||||
import aqua.semantics.header.Picker
|
import aqua.semantics.header.Picker
|
||||||
@ -94,15 +95,12 @@ object LspContext {
|
|||||||
): LspContext[S] =
|
): LspContext[S] =
|
||||||
ctx.copy(importPaths = importPaths)
|
ctx.copy(importPaths = importPaths)
|
||||||
|
|
||||||
override def setModule(
|
override def setModule(ctx: LspContext[S], name: Option[String]): LspContext[S] =
|
||||||
ctx: LspContext[S],
|
|
||||||
name: String
|
|
||||||
): LspContext[S] =
|
|
||||||
ctx.copy(raw = ctx.raw.setModule(name))
|
ctx.copy(raw = ctx.raw.setModule(name))
|
||||||
|
|
||||||
override def setDeclares(
|
override def setDeclares(
|
||||||
ctx: LspContext[S],
|
ctx: LspContext[S],
|
||||||
declares: Set[String]
|
declares: Set[PName]
|
||||||
): LspContext[S] =
|
): LspContext[S] =
|
||||||
ctx.copy(raw = ctx.raw.setDeclares(declares))
|
ctx.copy(raw = ctx.raw.setDeclares(declares))
|
||||||
|
|
||||||
@ -145,6 +143,9 @@ object LspContext {
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
override def pick(ctx: LspContext[S], name: PName, declared: Boolean): Option[LspContext[S]] =
|
||||||
|
ctx.raw.pick(name, declared).map(rc => ctx.copy(raw = rc))
|
||||||
|
|
||||||
override def pickHeader(ctx: LspContext[S]): LspContext[S] = ctx.copy(raw = ctx.raw.pickHeader)
|
override def pickHeader(ctx: LspContext[S]): LspContext[S] = ctx.copy(raw = ctx.raw.pickHeader)
|
||||||
|
|
||||||
override def pickDeclared(ctx: LspContext[S]): LspContext[S] =
|
override def pickDeclared(ctx: LspContext[S]): LspContext[S] =
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
package aqua.raw
|
package aqua.raw
|
||||||
|
|
||||||
|
import aqua.helpers.data.PName
|
||||||
import aqua.raw.arrow.FuncRaw
|
import aqua.raw.arrow.FuncRaw
|
||||||
import aqua.raw.value.ValueRaw
|
import aqua.raw.value.ValueRaw
|
||||||
import aqua.types.{AbilityType, StructType, Type}
|
import aqua.types.{AbilityType, StructType, Type}
|
||||||
@ -8,8 +9,11 @@ import cats.Monoid
|
|||||||
import cats.Semigroup
|
import cats.Semigroup
|
||||||
import cats.data.Chain
|
import cats.data.Chain
|
||||||
import cats.data.NonEmptyMap
|
import cats.data.NonEmptyMap
|
||||||
|
import cats.syntax.align.*
|
||||||
import cats.syntax.monoid.*
|
import cats.syntax.monoid.*
|
||||||
import cats.syntax.option.*
|
import cats.syntax.option.*
|
||||||
|
import monocle.Lens
|
||||||
|
import monocle.macros.GenLens
|
||||||
import scala.collection.immutable.SortedMap
|
import scala.collection.immutable.SortedMap
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -28,7 +32,7 @@ import scala.collection.immutable.SortedMap
|
|||||||
*/
|
*/
|
||||||
case class RawContext(
|
case class RawContext(
|
||||||
module: Option[String] = None,
|
module: Option[String] = None,
|
||||||
declares: Set[String] = Set.empty,
|
declares: Set[PName] = Set.empty,
|
||||||
exports: Map[String, Option[String]] = Map.empty,
|
exports: Map[String, Option[String]] = Map.empty,
|
||||||
parts: Chain[(RawContext, RawPart)] = Chain.empty,
|
parts: Chain[(RawContext, RawPart)] = Chain.empty,
|
||||||
abilities: Map[String, RawContext] = Map.empty
|
abilities: Map[String, RawContext] = Map.empty
|
||||||
@ -51,6 +55,9 @@ case class RawContext(
|
|||||||
}
|
}
|
||||||
.map(prefixFirst(prefix, _))
|
.map(prefixFirst(prefix, _))
|
||||||
|
|
||||||
|
lazy val allAbilities: Map[String, RawContext] =
|
||||||
|
all(_.abilities)
|
||||||
|
|
||||||
lazy val services: Map[String, ServiceRaw] = collectPartsMap { case srv: ServiceRaw => srv }
|
lazy val services: Map[String, ServiceRaw] = collectPartsMap { case srv: ServiceRaw => srv }
|
||||||
|
|
||||||
lazy val allServices: Map[String, ServiceRaw] =
|
lazy val allServices: Map[String, ServiceRaw] =
|
||||||
@ -87,10 +94,11 @@ case class RawContext(
|
|||||||
all(_.definedAbilities)
|
all(_.definedAbilities)
|
||||||
|
|
||||||
lazy val allNames: Set[String] =
|
lazy val allNames: Set[String] =
|
||||||
|
// TODO: How about names in abilities?
|
||||||
parts.map { case (_, p) => p.name }.toList.toSet
|
parts.map { case (_, p) => p.name }.toList.toSet
|
||||||
|
|
||||||
lazy val declaredNames: Set[String] =
|
lazy val declaredNames: Set[String] =
|
||||||
allNames.intersect(declares)
|
declares.map(_.value).toSet
|
||||||
|
|
||||||
override def toString: String =
|
override def toString: String =
|
||||||
s"""|module: ${module.getOrElse("unnamed")}
|
s"""|module: ${module.getOrElse("unnamed")}
|
||||||
@ -105,6 +113,18 @@ case class RawContext(
|
|||||||
object RawContext {
|
object RawContext {
|
||||||
val blank: RawContext = RawContext()
|
val blank: RawContext = RawContext()
|
||||||
|
|
||||||
|
val partsLens: Lens[RawContext, Chain[(RawContext, RawPart)]] =
|
||||||
|
GenLens[RawContext](_.parts)
|
||||||
|
|
||||||
|
val abilitiesLens: Lens[RawContext, Map[String, RawContext]] =
|
||||||
|
GenLens[RawContext](_.abilities)
|
||||||
|
|
||||||
|
def fromParts(parts: Chain[(RawContext, RawPart)]): RawContext =
|
||||||
|
partsLens.set(parts)(blank)
|
||||||
|
|
||||||
|
def fromAbilities(abilities: Map[String, RawContext]): RawContext =
|
||||||
|
abilitiesLens.set(abilities)(blank)
|
||||||
|
|
||||||
given Monoid[RawContext] with {
|
given Monoid[RawContext] with {
|
||||||
|
|
||||||
override def empty: RawContext = blank
|
override def empty: RawContext = blank
|
||||||
@ -115,7 +135,8 @@ object RawContext {
|
|||||||
x.declares ++ y.declares,
|
x.declares ++ y.declares,
|
||||||
x.exports ++ y.exports,
|
x.exports ++ y.exports,
|
||||||
x.parts ++ y.parts,
|
x.parts ++ y.parts,
|
||||||
x.abilities ++ y.abilities
|
// This combines abilities (which are RawContexts too) recursively
|
||||||
|
x.abilities.alignCombine(y.abilities)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,21 +3,18 @@ package aqua.parser
|
|||||||
import aqua.parser.Ast.Tree
|
import aqua.parser.Ast.Tree
|
||||||
import aqua.parser.lexer.Token
|
import aqua.parser.lexer.Token
|
||||||
import aqua.parser.lexer.Token.*
|
import aqua.parser.lexer.Token.*
|
||||||
import aqua.parser.expr.func.ReturnExpr
|
|
||||||
import aqua.parser.lift.LiftParser.*
|
import aqua.parser.lift.LiftParser.*
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
import aqua.parser.lift.Span.{given, *}
|
||||||
import aqua.parser.lift.{LiftParser, Span}
|
import aqua.parser.lift.{LiftParser, Span}
|
||||||
import aqua.parser.Ast.Tree
|
|
||||||
import aqua.parser.ListToTreeConverter
|
|
||||||
|
|
||||||
|
import cats.Show
|
||||||
import cats.data.Chain.:==
|
import cats.data.Chain.:==
|
||||||
|
import cats.data.Validated.{invalid, invalidNec, invalidNel, valid, validNec, validNel}
|
||||||
import cats.data.{Chain, NonEmptyChain, Validated, ValidatedNec}
|
import cats.data.{Chain, NonEmptyChain, Validated, ValidatedNec}
|
||||||
import cats.free.Cofree
|
import cats.free.Cofree
|
||||||
import cats.Show
|
|
||||||
import cats.data.Validated.{invalid, invalidNec, invalidNel, valid, validNec, validNel}
|
|
||||||
import cats.parse.{Parser as P, Parser0 as P0}
|
import cats.parse.{Parser as P, Parser0 as P0}
|
||||||
import cats.syntax.comonad.*
|
import cats.syntax.comonad.*
|
||||||
import cats.{~>, Comonad, Eval}
|
import cats.{Comonad, Eval, ~>}
|
||||||
import scribe.Logging
|
import scribe.Logging
|
||||||
|
|
||||||
abstract class Expr[F[_]](val companion: Expr.Companion, val token: Token[F]) {
|
abstract class Expr[F[_]](val companion: Expr.Companion, val token: Token[F]) {
|
||||||
|
@ -2,9 +2,10 @@ package aqua.parser
|
|||||||
|
|
||||||
import aqua.parser.expr.RootExpr
|
import aqua.parser.expr.RootExpr
|
||||||
import aqua.parser.head.Header
|
import aqua.parser.head.Header
|
||||||
import aqua.parser.lift.LiftParser.LiftErrorOps
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.Span.S
|
import aqua.parser.lift.LiftParser.*
|
||||||
import aqua.parser.lift.{LiftParser, Span}
|
import aqua.parser.lift.Span
|
||||||
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
import cats.data.{Validated, ValidatedNec}
|
import cats.data.{Validated, ValidatedNec}
|
||||||
import cats.free.Cofree
|
import cats.free.Cofree
|
||||||
|
@ -4,11 +4,12 @@ import aqua.parser.Expr
|
|||||||
import aqua.parser.lexer.Token._
|
import aqua.parser.lexer.Token._
|
||||||
import aqua.parser.lexer.{NamedTypeToken, TypeToken}
|
import aqua.parser.lexer.{NamedTypeToken, TypeToken}
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
|
import aqua.parser.lift.Span
|
||||||
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
import cats.Comonad
|
import cats.Comonad
|
||||||
import cats.parse.Parser
|
import cats.parse.Parser
|
||||||
import cats.~>
|
import cats.~>
|
||||||
import aqua.parser.lift.Span
|
|
||||||
import aqua.parser.lift.Span.{P0ToSpan}
|
|
||||||
|
|
||||||
case class AliasExpr[F[_]](name: NamedTypeToken[F], target: TypeToken[F])
|
case class AliasExpr[F[_]](name: NamedTypeToken[F], target: TypeToken[F])
|
||||||
extends Expr[F](AliasExpr, name) {
|
extends Expr[F](AliasExpr, name) {
|
||||||
|
@ -5,7 +5,7 @@ import aqua.parser.lexer.Token.*
|
|||||||
import aqua.parser.lexer.{ArrowTypeToken, BasicTypeToken, Name}
|
import aqua.parser.lexer.{ArrowTypeToken, BasicTypeToken, Name}
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
import cats.Comonad
|
import cats.Comonad
|
||||||
import cats.parse.Parser
|
import cats.parse.Parser
|
||||||
|
@ -1,16 +1,17 @@
|
|||||||
package aqua.parser.expr
|
package aqua.parser.expr
|
||||||
|
|
||||||
import aqua.parser.Expr
|
import aqua.parser.Expr
|
||||||
import aqua.parser.lexer.Token.*
|
|
||||||
import aqua.parser.lexer.*
|
import aqua.parser.lexer.*
|
||||||
|
import aqua.parser.lexer.PrefixToken
|
||||||
|
import aqua.parser.lexer.Token.*
|
||||||
|
import aqua.parser.lexer.VarToken
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
|
import aqua.parser.lift.Span
|
||||||
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
import cats.Comonad
|
import cats.Comonad
|
||||||
import cats.parse.Parser as P
|
import cats.parse.Parser as P
|
||||||
import cats.~>
|
import cats.~>
|
||||||
import aqua.parser.lift.Span
|
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
|
||||||
import aqua.parser.lexer.PrefixToken
|
|
||||||
import aqua.parser.lexer.VarToken
|
|
||||||
|
|
||||||
case class ConstantExpr[F[_]](
|
case class ConstantExpr[F[_]](
|
||||||
name: Name[F],
|
name: Name[F],
|
||||||
|
@ -4,11 +4,12 @@ import aqua.parser.Expr
|
|||||||
import aqua.parser.lexer.NamedTypeToken
|
import aqua.parser.lexer.NamedTypeToken
|
||||||
import aqua.parser.lexer.Token.*
|
import aqua.parser.lexer.Token.*
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
|
import aqua.parser.lift.Span
|
||||||
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
import cats.Comonad
|
import cats.Comonad
|
||||||
import cats.parse.Parser
|
import cats.parse.Parser
|
||||||
import cats.~>
|
import cats.~>
|
||||||
import aqua.parser.lift.Span
|
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
|
||||||
|
|
||||||
case class DataStructExpr[F[_]](name: NamedTypeToken[F]) extends Expr[F](DataStructExpr, name) {
|
case class DataStructExpr[F[_]](name: NamedTypeToken[F]) extends Expr[F](DataStructExpr, name) {
|
||||||
override def mapK[K[_]: Comonad](fk: F ~> K): DataStructExpr[K] = copy(name.mapK(fk))
|
override def mapK[K[_]: Comonad](fk: F ~> K): DataStructExpr[K] = copy(name.mapK(fk))
|
||||||
|
@ -5,7 +5,7 @@ import aqua.parser.lexer.Token.*
|
|||||||
import aqua.parser.lexer.{BasicTypeToken, Name, StreamTypeToken}
|
import aqua.parser.lexer.{BasicTypeToken, Name, StreamTypeToken}
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
import cats.Comonad
|
import cats.Comonad
|
||||||
import cats.parse.Parser
|
import cats.parse.Parser
|
||||||
|
@ -1,17 +1,18 @@
|
|||||||
package aqua.parser.expr
|
package aqua.parser.expr
|
||||||
|
|
||||||
import aqua.parser.expr.func.ArrowExpr
|
import aqua.parser.expr.func.ArrowExpr
|
||||||
import aqua.parser.lexer.Token.*
|
|
||||||
import aqua.parser.lexer.Name
|
import aqua.parser.lexer.Name
|
||||||
|
import aqua.parser.lexer.Token.*
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
|
import aqua.parser.lift.Span
|
||||||
|
import aqua.parser.lift.Span.{given, *}
|
||||||
import aqua.parser.{Ast, Expr}
|
import aqua.parser.{Ast, Expr}
|
||||||
|
|
||||||
import cats.Comonad
|
import cats.Comonad
|
||||||
import cats.data.{Validated, ValidatedNec}
|
import cats.data.{Validated, ValidatedNec}
|
||||||
import cats.free.Cofree
|
import cats.free.Cofree
|
||||||
import cats.parse.Parser
|
import cats.parse.Parser
|
||||||
import cats.~>
|
import cats.~>
|
||||||
import aqua.parser.lift.Span
|
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
|
||||||
|
|
||||||
case class FuncExpr[F[_]](
|
case class FuncExpr[F[_]](
|
||||||
name: Name[F]
|
name: Name[F]
|
||||||
|
@ -3,8 +3,10 @@ package aqua.parser.expr
|
|||||||
import aqua.parser.Ast.Tree
|
import aqua.parser.Ast.Tree
|
||||||
import aqua.parser.lexer.Token
|
import aqua.parser.lexer.Token
|
||||||
import aqua.parser.lexer.Token.*
|
import aqua.parser.lexer.Token.*
|
||||||
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.LiftParser.*
|
import aqua.parser.lift.LiftParser.*
|
||||||
import aqua.parser.lift.{LiftParser, Span}
|
import aqua.parser.lift.Span
|
||||||
|
import aqua.parser.lift.Span.{given, *}
|
||||||
import aqua.parser.{Expr, ParserError}
|
import aqua.parser.{Expr, ParserError}
|
||||||
|
|
||||||
import cats.data.{Chain, NonEmptyChain, NonEmptyList, Validated, ValidatedNec}
|
import cats.data.{Chain, NonEmptyChain, NonEmptyList, Validated, ValidatedNec}
|
||||||
@ -21,7 +23,6 @@ case class RootExpr[F[_]](point: Token[F]) extends Expr[F](RootExpr, point) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
object RootExpr extends Expr.Companion {
|
object RootExpr extends Expr.Companion {
|
||||||
import Span.*
|
|
||||||
|
|
||||||
def validChildren: List[Expr.Lexem] =
|
def validChildren: List[Expr.Lexem] =
|
||||||
ServiceExpr :: AliasExpr :: DataStructExpr :: AbilityExpr :: ConstantExpr :: FuncExpr :: Nil
|
ServiceExpr :: AliasExpr :: DataStructExpr :: AbilityExpr :: ConstantExpr :: FuncExpr :: Nil
|
||||||
|
@ -4,11 +4,12 @@ import aqua.parser.Expr
|
|||||||
import aqua.parser.lexer.Token.*
|
import aqua.parser.lexer.Token.*
|
||||||
import aqua.parser.lexer.{NamedTypeToken, ValueToken}
|
import aqua.parser.lexer.{NamedTypeToken, ValueToken}
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
|
import aqua.parser.lift.Span
|
||||||
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
import cats.Comonad
|
import cats.Comonad
|
||||||
import cats.parse.Parser
|
import cats.parse.Parser
|
||||||
import cats.~>
|
import cats.~>
|
||||||
import aqua.parser.lift.Span
|
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
|
||||||
|
|
||||||
case class ServiceExpr[F[_]](name: NamedTypeToken[F], id: Option[ValueToken[F]])
|
case class ServiceExpr[F[_]](name: NamedTypeToken[F], id: Option[ValueToken[F]])
|
||||||
extends Expr[F](ServiceExpr, name) {
|
extends Expr[F](ServiceExpr, name) {
|
||||||
|
@ -3,8 +3,9 @@ package aqua.parser.expr.func
|
|||||||
import aqua.parser.lexer.{ArrowTypeToken, BasicTypeToken, NamedTypeToken, TypeToken, ValueToken}
|
import aqua.parser.lexer.{ArrowTypeToken, BasicTypeToken, NamedTypeToken, TypeToken, ValueToken}
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
import aqua.parser.lift.Span.{given, *}
|
||||||
import aqua.parser.{ArrowReturnError, Ast, Expr, ParserError}
|
import aqua.parser.{ArrowReturnError, Ast, Expr, ParserError}
|
||||||
|
|
||||||
import cats.Comonad
|
import cats.Comonad
|
||||||
import cats.parse.Parser
|
import cats.parse.Parser
|
||||||
import cats.~>
|
import cats.~>
|
||||||
|
@ -5,10 +5,11 @@ import aqua.parser.expr.func.AssignmentExpr
|
|||||||
import aqua.parser.lexer.Token.*
|
import aqua.parser.lexer.Token.*
|
||||||
import aqua.parser.lexer.{CollectionToken, Name, ValueToken}
|
import aqua.parser.lexer.{CollectionToken, Name, ValueToken}
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import cats.parse.Parser as P
|
|
||||||
import cats.{~>, Comonad}
|
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
|
import cats.parse.Parser as P
|
||||||
|
import cats.{Comonad, ~>}
|
||||||
|
|
||||||
case class AssignmentExpr[F[_]](
|
case class AssignmentExpr[F[_]](
|
||||||
variable: Name[F],
|
variable: Name[F],
|
||||||
|
@ -4,11 +4,12 @@ import aqua.parser.Expr
|
|||||||
import aqua.parser.expr.func.CallArrowExpr
|
import aqua.parser.expr.func.CallArrowExpr
|
||||||
import aqua.parser.lexer.Token.*
|
import aqua.parser.lexer.Token.*
|
||||||
import aqua.parser.lexer.{CallArrowToken, Name, ValueToken, VarToken}
|
import aqua.parser.lexer.{CallArrowToken, Name, ValueToken, VarToken}
|
||||||
|
import aqua.parser.lift.Span.{given, *}
|
||||||
import aqua.parser.lift.{LiftParser, Span}
|
import aqua.parser.lift.{LiftParser, Span}
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
|
||||||
import cats.data.NonEmptyList
|
import cats.data.NonEmptyList
|
||||||
import cats.parse.{Parser as P, Parser0 as P0}
|
import cats.parse.{Parser as P, Parser0 as P0}
|
||||||
import cats.{~>, Comonad}
|
import cats.{Comonad, ~>}
|
||||||
|
|
||||||
case class CallArrowExpr[F[_]](
|
case class CallArrowExpr[F[_]](
|
||||||
variables: List[Name[F]],
|
variables: List[Name[F]],
|
||||||
@ -27,7 +28,8 @@ case class CallArrowExpr[F[_]](
|
|||||||
object CallArrowExpr extends Expr.Leaf {
|
object CallArrowExpr extends Expr.Leaf {
|
||||||
|
|
||||||
override val p: P[CallArrowExpr[Span.S]] = {
|
override val p: P[CallArrowExpr[Span.S]] = {
|
||||||
val variables: P0[Option[NonEmptyList[Name[Span.S]]]] = (comma(Name.variable) <* ` <- `).backtrack.?
|
val variables: P0[Option[NonEmptyList[Name[Span.S]]]] =
|
||||||
|
(comma(Name.variable) <* ` <- `).backtrack.?
|
||||||
|
|
||||||
// TODO: Restrict to function call only
|
// TODO: Restrict to function call only
|
||||||
// or allow any expression?
|
// or allow any expression?
|
||||||
|
@ -5,10 +5,11 @@ import aqua.parser.expr.func.{CatchExpr, TryExpr}
|
|||||||
import aqua.parser.lexer.Name
|
import aqua.parser.lexer.Name
|
||||||
import aqua.parser.lexer.Token.*
|
import aqua.parser.lexer.Token.*
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import cats.parse.Parser
|
|
||||||
import cats.{~>, Comonad}
|
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
|
import cats.parse.Parser
|
||||||
|
import cats.{Comonad, ~>}
|
||||||
|
|
||||||
case class CatchExpr[F[_]](name: Name[F]) extends Expr[F](CatchExpr, name) {
|
case class CatchExpr[F[_]](name: Name[F]) extends Expr[F](CatchExpr, name) {
|
||||||
def mapK[K[_]: Comonad](fk: F ~> K): CatchExpr[K] = copy(name.mapK(fk))
|
def mapK[K[_]: Comonad](fk: F ~> K): CatchExpr[K] = copy(name.mapK(fk))
|
||||||
|
@ -4,13 +4,15 @@ import aqua.parser.expr.func.ArrowExpr
|
|||||||
import aqua.parser.lexer.Name
|
import aqua.parser.lexer.Name
|
||||||
import aqua.parser.lexer.Token.*
|
import aqua.parser.lexer.Token.*
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
|
import aqua.parser.lift.LiftParser.*
|
||||||
|
import aqua.parser.lift.Span
|
||||||
|
import aqua.parser.lift.Span.{given, *}
|
||||||
import aqua.parser.{Ast, Expr, ParserError}
|
import aqua.parser.{Ast, Expr, ParserError}
|
||||||
|
|
||||||
import cats.data.{Validated, ValidatedNec}
|
import cats.data.{Validated, ValidatedNec}
|
||||||
import cats.free.Cofree
|
import cats.free.Cofree
|
||||||
import cats.parse.Parser
|
import cats.parse.Parser
|
||||||
import cats.{~>, Comonad}
|
import cats.{Comonad, ~>}
|
||||||
import aqua.parser.lift.Span
|
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
|
||||||
|
|
||||||
case class ClosureExpr[F[_]](
|
case class ClosureExpr[F[_]](
|
||||||
name: Name[F],
|
name: Name[F],
|
||||||
|
@ -6,10 +6,11 @@ import aqua.parser.lexer.Token
|
|||||||
import aqua.parser.lexer.Token.`co`
|
import aqua.parser.lexer.Token.`co`
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.LiftParser.*
|
import aqua.parser.lift.LiftParser.*
|
||||||
import cats.parse.Parser
|
|
||||||
import cats.{~>, Comonad}
|
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
|
import cats.parse.Parser
|
||||||
|
import cats.{Comonad, ~>}
|
||||||
|
|
||||||
case class CoExpr[F[_]](point: Token[F]) extends Expr[F](CoExpr, point) {
|
case class CoExpr[F[_]](point: Token[F]) extends Expr[F](CoExpr, point) {
|
||||||
def mapK[K[_]: Comonad](fk: F ~> K): CoExpr[K] = copy(point.mapK(fk))
|
def mapK[K[_]: Comonad](fk: F ~> K): CoExpr[K] = copy(point.mapK(fk))
|
||||||
|
@ -6,7 +6,7 @@ import aqua.parser.lexer.Token.*
|
|||||||
import aqua.parser.lexer.{BasicTypeToken, Name, Token, TypeToken}
|
import aqua.parser.lexer.{BasicTypeToken, Name, Token, TypeToken}
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
import cats.parse.Parser as P
|
import cats.parse.Parser as P
|
||||||
import cats.{Comonad, ~>}
|
import cats.{Comonad, ~>}
|
||||||
|
@ -6,12 +6,13 @@ import aqua.parser.lexer.Token
|
|||||||
import aqua.parser.lexer.Token.*
|
import aqua.parser.lexer.Token.*
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.LiftParser.*
|
import aqua.parser.lift.LiftParser.*
|
||||||
|
import aqua.parser.lift.Span
|
||||||
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
import cats.parse.Parser
|
import cats.parse.Parser
|
||||||
import cats.{~>, Comonad}
|
|
||||||
import cats.syntax.comonad.*
|
import cats.syntax.comonad.*
|
||||||
import cats.syntax.functor.*
|
import cats.syntax.functor.*
|
||||||
import aqua.parser.lift.Span
|
import cats.{Comonad, ~>}
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
|
||||||
|
|
||||||
case class ElseOtherwiseExpr[F[_]](kind: ElseOtherwiseExpr.Kind, point: Token[F])
|
case class ElseOtherwiseExpr[F[_]](kind: ElseOtherwiseExpr.Kind, point: Token[F])
|
||||||
extends Expr[F](ElseOtherwiseExpr, point) {
|
extends Expr[F](ElseOtherwiseExpr, point) {
|
||||||
|
@ -7,7 +7,7 @@ import aqua.parser.lexer.{Name, ValueToken}
|
|||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.LiftParser.*
|
import aqua.parser.lift.LiftParser.*
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
import cats.parse.Parser as P
|
import cats.parse.Parser as P
|
||||||
import cats.syntax.comonad.*
|
import cats.syntax.comonad.*
|
||||||
|
@ -5,11 +5,12 @@ import aqua.parser.expr.func.{ForExpr, IfExpr}
|
|||||||
import aqua.parser.lexer.Token.*
|
import aqua.parser.lexer.Token.*
|
||||||
import aqua.parser.lexer.{LiteralToken, ValueToken}
|
import aqua.parser.lexer.{LiteralToken, ValueToken}
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.types.LiteralType
|
|
||||||
import cats.parse.Parser as P
|
|
||||||
import cats.{~>, Comonad}
|
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
import aqua.types.LiteralType
|
||||||
|
|
||||||
|
import cats.parse.Parser as P
|
||||||
|
import cats.{Comonad, ~>}
|
||||||
|
|
||||||
case class IfExpr[F[_]](value: ValueToken[F]) extends Expr[F](IfExpr, value) {
|
case class IfExpr[F[_]](value: ValueToken[F]) extends Expr[F](IfExpr, value) {
|
||||||
|
|
||||||
|
@ -4,11 +4,12 @@ import aqua.parser.Expr
|
|||||||
import aqua.parser.expr.*
|
import aqua.parser.expr.*
|
||||||
import aqua.parser.lexer.Token.*
|
import aqua.parser.lexer.Token.*
|
||||||
import aqua.parser.lexer.{PropertyToken, ValueToken}
|
import aqua.parser.lexer.{PropertyToken, ValueToken}
|
||||||
|
import aqua.parser.lift.Span.{given, *}
|
||||||
import aqua.parser.lift.{LiftParser, Span}
|
import aqua.parser.lift.{LiftParser, Span}
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
|
||||||
import cats.parse.Parser
|
|
||||||
import cats.{~>, Comonad}
|
|
||||||
import cats.data.NonEmptyList
|
import cats.data.NonEmptyList
|
||||||
|
import cats.parse.Parser
|
||||||
|
import cats.{Comonad, ~>}
|
||||||
|
|
||||||
case class JoinExpr[F[_]](values: NonEmptyList[ValueToken[F]])
|
case class JoinExpr[F[_]](values: NonEmptyList[ValueToken[F]])
|
||||||
extends Expr[F](JoinExpr, values.head) {
|
extends Expr[F](JoinExpr, values.head) {
|
||||||
|
@ -5,10 +5,11 @@ import aqua.parser.expr.*
|
|||||||
import aqua.parser.lexer.Token.*
|
import aqua.parser.lexer.Token.*
|
||||||
import aqua.parser.lexer.ValueToken
|
import aqua.parser.lexer.ValueToken
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import cats.parse.Parser as P
|
|
||||||
import cats.{~>, Comonad}
|
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
|
import cats.parse.Parser as P
|
||||||
|
import cats.{Comonad, ~>}
|
||||||
|
|
||||||
case class OnExpr[F[_]](peerId: ValueToken[F], via: List[ValueToken[F]])
|
case class OnExpr[F[_]](peerId: ValueToken[F], via: List[ValueToken[F]])
|
||||||
extends Expr[F](OnExpr, peerId) {
|
extends Expr[F](OnExpr, peerId) {
|
||||||
|
@ -6,10 +6,11 @@ import aqua.parser.lexer.Token
|
|||||||
import aqua.parser.lexer.Token.*
|
import aqua.parser.lexer.Token.*
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.LiftParser.*
|
import aqua.parser.lift.LiftParser.*
|
||||||
import cats.parse.Parser
|
|
||||||
import cats.{~>, Comonad}
|
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
|
import cats.parse.Parser
|
||||||
|
import cats.{Comonad, ~>}
|
||||||
|
|
||||||
case class ParExpr[F[_]](point: Token[F]) extends Expr[F](ParExpr, point) {
|
case class ParExpr[F[_]](point: Token[F]) extends Expr[F](ParExpr, point) {
|
||||||
|
|
||||||
|
@ -6,11 +6,12 @@ import aqua.parser.lexer.Token.{`parseq`, *}
|
|||||||
import aqua.parser.lexer.{Name, ValueToken}
|
import aqua.parser.lexer.{Name, ValueToken}
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.LiftParser.*
|
import aqua.parser.lift.LiftParser.*
|
||||||
|
import aqua.parser.lift.Span
|
||||||
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
import cats.parse.Parser as P
|
import cats.parse.Parser as P
|
||||||
import cats.syntax.comonad.*
|
import cats.syntax.comonad.*
|
||||||
import cats.{~>, Comonad}
|
import cats.{Comonad, ~>}
|
||||||
import aqua.parser.lift.Span
|
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
|
||||||
|
|
||||||
case class ParSeqExpr[F[_]](
|
case class ParSeqExpr[F[_]](
|
||||||
item: Name[F],
|
item: Name[F],
|
||||||
|
@ -5,10 +5,11 @@ import aqua.parser.expr.func.PushToStreamExpr
|
|||||||
import aqua.parser.lexer.Token.*
|
import aqua.parser.lexer.Token.*
|
||||||
import aqua.parser.lexer.{Name, ValueToken}
|
import aqua.parser.lexer.{Name, ValueToken}
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
|
import aqua.parser.lift.Span
|
||||||
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
import cats.parse.Parser as P
|
import cats.parse.Parser as P
|
||||||
import cats.{Comonad, ~>}
|
import cats.{Comonad, ~>}
|
||||||
import aqua.parser.lift.Span
|
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
|
||||||
|
|
||||||
case class PushToStreamExpr[F[_]](
|
case class PushToStreamExpr[F[_]](
|
||||||
stream: Name[F],
|
stream: Name[F],
|
||||||
|
@ -5,11 +5,12 @@ import aqua.parser.expr.func.ReturnExpr
|
|||||||
import aqua.parser.lexer.Token.*
|
import aqua.parser.lexer.Token.*
|
||||||
import aqua.parser.lexer.ValueToken
|
import aqua.parser.lexer.ValueToken
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
|
import aqua.parser.lift.Span
|
||||||
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
import cats.data.NonEmptyList
|
import cats.data.NonEmptyList
|
||||||
import cats.parse.Parser
|
import cats.parse.Parser
|
||||||
import cats.{~>, Comonad}
|
import cats.{Comonad, ~>}
|
||||||
import aqua.parser.lift.Span
|
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
|
||||||
|
|
||||||
case class ReturnExpr[F[_]](values: NonEmptyList[ValueToken[F]])
|
case class ReturnExpr[F[_]](values: NonEmptyList[ValueToken[F]])
|
||||||
extends Expr[F](ReturnExpr, values.head) {
|
extends Expr[F](ReturnExpr, values.head) {
|
||||||
|
@ -5,10 +5,11 @@ import aqua.parser.expr.func.ServiceIdExpr
|
|||||||
import aqua.parser.lexer.Token.*
|
import aqua.parser.lexer.Token.*
|
||||||
import aqua.parser.lexer.{NamedTypeToken, ValueToken}
|
import aqua.parser.lexer.{NamedTypeToken, ValueToken}
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import cats.parse.Parser as P
|
|
||||||
import cats.{~>, Comonad}
|
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
|
import cats.parse.Parser as P
|
||||||
|
import cats.{Comonad, ~>}
|
||||||
|
|
||||||
case class ServiceIdExpr[F[_]](service: NamedTypeToken[F], id: ValueToken[F])
|
case class ServiceIdExpr[F[_]](service: NamedTypeToken[F], id: ValueToken[F])
|
||||||
extends Expr[F](ServiceIdExpr, service) {
|
extends Expr[F](ServiceIdExpr, service) {
|
||||||
|
@ -4,11 +4,12 @@ import aqua.parser.Expr
|
|||||||
import aqua.parser.expr.func.{IfExpr, TryExpr}
|
import aqua.parser.expr.func.{IfExpr, TryExpr}
|
||||||
import aqua.parser.lexer.Token
|
import aqua.parser.lexer.Token
|
||||||
import aqua.parser.lexer.Token.*
|
import aqua.parser.lexer.Token.*
|
||||||
import aqua.parser.lift.{LiftParser, Span}
|
|
||||||
import aqua.parser.lift.LiftParser.*
|
import aqua.parser.lift.LiftParser.*
|
||||||
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
import aqua.parser.lift.{LiftParser, Span}
|
||||||
|
|
||||||
import cats.parse.Parser as P
|
import cats.parse.Parser as P
|
||||||
import cats.{Comonad, ~>}
|
import cats.{Comonad, ~>}
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
|
||||||
|
|
||||||
case class TryExpr[F[_]](point: Token[F]) extends Expr[F](TryExpr, point) {
|
case class TryExpr[F[_]](point: Token[F]) extends Expr[F](TryExpr, point) {
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@ import aqua.parser.lexer.Token.*
|
|||||||
import aqua.parser.lexer.{LiteralToken, Token, ValueToken}
|
import aqua.parser.lexer.{LiteralToken, Token, ValueToken}
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
import cats.Comonad
|
import cats.Comonad
|
||||||
import cats.data.NonEmptyList
|
import cats.data.NonEmptyList
|
||||||
|
@ -1,16 +1,16 @@
|
|||||||
package aqua.parser.head
|
package aqua.parser.head
|
||||||
|
|
||||||
import cats.Comonad
|
|
||||||
import cats.data.NonEmptyList
|
|
||||||
import cats.parse.Parser as P
|
|
||||||
import cats.~>
|
|
||||||
import cats.syntax.bifunctor.*
|
|
||||||
|
|
||||||
import aqua.parser.lexer.Token.*
|
import aqua.parser.lexer.Token.*
|
||||||
import aqua.parser.lexer.{Ability, Name}
|
import aqua.parser.lexer.{Ability, Name}
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
|
import cats.Comonad
|
||||||
|
import cats.data.NonEmptyList
|
||||||
|
import cats.parse.Parser as P
|
||||||
|
import cats.syntax.bifunctor.*
|
||||||
|
import cats.~>
|
||||||
|
|
||||||
trait FromExpr[F[_]] {
|
trait FromExpr[F[_]] {
|
||||||
def imports: NonEmptyList[FromExpr.NameOrAbAs[F]]
|
def imports: NonEmptyList[FromExpr.NameOrAbAs[F]]
|
||||||
|
@ -6,7 +6,7 @@ import aqua.parser.lexer.Token.` \n+`
|
|||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.LiftParser.*
|
import aqua.parser.lift.LiftParser.*
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
import cats.data.Chain
|
import cats.data.Chain
|
||||||
import cats.free.Cofree
|
import cats.free.Cofree
|
||||||
|
@ -4,7 +4,7 @@ import aqua.parser.Ast
|
|||||||
import aqua.parser.lexer.Token
|
import aqua.parser.lexer.Token
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
import cats.Show
|
import cats.Show
|
||||||
import cats.data.Chain
|
import cats.data.Chain
|
||||||
|
@ -4,7 +4,7 @@ import aqua.parser.lexer.Token._
|
|||||||
import aqua.parser.lexer.{LiteralToken, ValueToken}
|
import aqua.parser.lexer.{LiteralToken, ValueToken}
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
import cats.Comonad
|
import cats.Comonad
|
||||||
import cats.parse.Parser
|
import cats.parse.Parser
|
||||||
|
@ -4,7 +4,7 @@ import aqua.parser.lexer.Token.*
|
|||||||
import aqua.parser.lexer.{LiteralToken, ValueToken}
|
import aqua.parser.lexer.{LiteralToken, ValueToken}
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
import cats.Comonad
|
import cats.Comonad
|
||||||
import cats.data.NonEmptyList
|
import cats.data.NonEmptyList
|
||||||
|
@ -1,14 +1,16 @@
|
|||||||
package aqua.parser.head
|
package aqua.parser.head
|
||||||
|
|
||||||
|
import aqua.parser.lexer.QName
|
||||||
import aqua.parser.lexer.Token
|
import aqua.parser.lexer.Token
|
||||||
import aqua.parser.lexer.Token.*
|
import aqua.parser.lexer.Token.*
|
||||||
import aqua.parser.lexer.{Ability, LiteralToken, Name, ValueToken}
|
import aqua.parser.lexer.{Ability, LiteralToken, Name, ValueToken}
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.LiftParser.*
|
import aqua.parser.lift.LiftParser.*
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
import cats.Comonad
|
import cats.Comonad
|
||||||
|
import cats.data.NonEmptyList
|
||||||
import cats.parse.Parser
|
import cats.parse.Parser
|
||||||
import cats.syntax.applicative.*
|
import cats.syntax.applicative.*
|
||||||
import cats.syntax.comonad.*
|
import cats.syntax.comonad.*
|
||||||
@ -18,10 +20,8 @@ import cats.~>
|
|||||||
|
|
||||||
case class ModuleExpr[F[_]](
|
case class ModuleExpr[F[_]](
|
||||||
word: ModuleExpr.Word[F],
|
word: ModuleExpr.Word[F],
|
||||||
name: Ability[F],
|
name: QName[F],
|
||||||
declareAll: Option[Token[F]],
|
declares: Option[ModuleExpr.Declares[F]]
|
||||||
declareNames: List[Name[F]],
|
|
||||||
declareCustom: List[Ability[F]]
|
|
||||||
) extends HeaderExpr[F] {
|
) extends HeaderExpr[F] {
|
||||||
override def token: Token[F] = name
|
override def token: Token[F] = name
|
||||||
|
|
||||||
@ -29,14 +29,31 @@ case class ModuleExpr[F[_]](
|
|||||||
copy(
|
copy(
|
||||||
word = word.mapK(fk),
|
word = word.mapK(fk),
|
||||||
name = name.mapK(fk),
|
name = name.mapK(fk),
|
||||||
declareAll = declareAll.map(_.mapK(fk)),
|
declares = declares.map(_.mapK(fk))
|
||||||
declareNames = declareNames.map(_.mapK(fk)),
|
|
||||||
declareCustom = declareCustom.map(_.mapK(fk))
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
object ModuleExpr extends HeaderExpr.Companion {
|
object ModuleExpr extends HeaderExpr.Companion {
|
||||||
|
|
||||||
|
enum Declares[F[_]] {
|
||||||
|
case All(point: Token[F])
|
||||||
|
case Names(names: NonEmptyList[QName[F]])
|
||||||
|
|
||||||
|
def mapK[K[_]: Comonad](fk: F ~> K): Declares[K] = this match {
|
||||||
|
case All(point) => All(point.mapK(fk))
|
||||||
|
case Names(names) => Names(names.map(_.mapK(fk)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
object Declares {
|
||||||
|
|
||||||
|
val p: Parser[Declares[Span.S]] =
|
||||||
|
(`declares` ~ ` *`) *> (
|
||||||
|
comma(QName.p).map(Names(_)) |
|
||||||
|
`star`.lift.map(Token.lift).map(All(_))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
final case class Word[F[_]: Comonad](
|
final case class Word[F[_]: Comonad](
|
||||||
token: F[Word.Kind]
|
token: F[Word.Kind]
|
||||||
) extends Token[F] {
|
) extends Token[F] {
|
||||||
@ -60,49 +77,21 @@ object ModuleExpr extends HeaderExpr.Companion {
|
|||||||
case Kind.Aqua => aqua
|
case Kind.Aqua => aqua
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
type NameOrAb[F[_]] = Either[Name[F], Ability[F]]
|
val p = (`module`.as(Word.Kind.Module).lift.backtrack |
|
||||||
|
|
||||||
private val nameOrAb: Parser[NameOrAb[Span.S]] =
|
|
||||||
Name.p.map(Left(_)) | Ability.ab.map(Right(_))
|
|
||||||
|
|
||||||
private val nameOrAbList: Parser[List[NameOrAb[Span.S]]] =
|
|
||||||
comma[NameOrAb[Span.S]](nameOrAb).map(_.toList)
|
|
||||||
|
|
||||||
private val nameOrAbListOrAll: Parser[Either[List[NameOrAb[Span.S]], Token[Span.S]]] =
|
|
||||||
nameOrAbList.map(Left(_)) | (`star` <* ` *`).lift.map(Token.lift(_)).map(Right(_))
|
|
||||||
|
|
||||||
private val moduleWord: Parser[Word[Span.S]] =
|
|
||||||
(`module`.as(Word.Kind.Module).lift.backtrack |
|
|
||||||
`aqua-word`.as(Word.Kind.Aqua).lift).map(Word(_))
|
`aqua-word`.as(Word.Kind.Aqua).lift).map(Word(_))
|
||||||
|
}
|
||||||
|
|
||||||
override val p: Parser[ModuleExpr[Span.S]] =
|
override val p: Parser[ModuleExpr[Span.S]] =
|
||||||
(
|
(
|
||||||
(` *`.with1 *> moduleWord) ~
|
(` *`.with1 *> Word.p) ~
|
||||||
(` ` *> Ability.dotted) ~
|
(` *` *> QName.p) ~
|
||||||
(` declares ` *> nameOrAbListOrAll).backtrack
|
(` *` *> Declares.p <* ` *`).backtrack
|
||||||
.map(_.some)
|
.map(_.some)
|
||||||
.orElse(` *`.as(none)) // Allow trailing spaces
|
// Allow trailing spaces without `declares`
|
||||||
).map {
|
.orElse(` *`.as(none))
|
||||||
case ((word, name), None) =>
|
).map { case ((word, name), declares) =>
|
||||||
ModuleExpr(word, name, None, Nil, Nil)
|
ModuleExpr(word, name, declares)
|
||||||
case ((word, name), Some(Left(exportMembers))) =>
|
|
||||||
ModuleExpr(
|
|
||||||
word,
|
|
||||||
name,
|
|
||||||
None,
|
|
||||||
exportMembers.collect { case Left(x) => x },
|
|
||||||
exportMembers.collect { case Right(x) => x }
|
|
||||||
)
|
|
||||||
case ((word, name), Some(Right(point))) =>
|
|
||||||
ModuleExpr(
|
|
||||||
word,
|
|
||||||
name,
|
|
||||||
Some(point),
|
|
||||||
Nil,
|
|
||||||
Nil
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,7 @@ import aqua.parser.lexer.Token.*
|
|||||||
import aqua.parser.lexer.{Ability, LiteralToken, ValueToken}
|
import aqua.parser.lexer.{Ability, LiteralToken, ValueToken}
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
import cats.Comonad
|
import cats.Comonad
|
||||||
import cats.parse.Parser
|
import cats.parse.Parser
|
||||||
@ -25,7 +25,7 @@ case class UseExpr[F[_]](
|
|||||||
object UseExpr extends HeaderExpr.Companion {
|
object UseExpr extends HeaderExpr.Companion {
|
||||||
|
|
||||||
override val p: Parser[HeaderExpr[Span.S]] =
|
override val p: Parser[HeaderExpr[Span.S]] =
|
||||||
(`use` *> ` ` *> ValueToken.string ~ (` as ` *> Ability.ab).?).map {
|
(`use` *> ` ` *> ValueToken.string ~ (` as ` *> Ability.dotted).?).map {
|
||||||
case (filename, asModule) =>
|
case (filename, asModule) =>
|
||||||
UseExpr(filename, asModule)
|
UseExpr(filename, asModule)
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,7 @@ import aqua.parser.lexer.Token.*
|
|||||||
import aqua.parser.lexer.{Ability, LiteralToken, Name, ValueToken}
|
import aqua.parser.lexer.{Ability, LiteralToken, Name, ValueToken}
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
import cats.Comonad
|
import cats.Comonad
|
||||||
import cats.data.NonEmptyList
|
import cats.data.NonEmptyList
|
||||||
@ -27,9 +27,8 @@ case class UseFromExpr[F[_]](
|
|||||||
object UseFromExpr extends HeaderExpr.Companion {
|
object UseFromExpr extends HeaderExpr.Companion {
|
||||||
|
|
||||||
override val p: Parser[UseFromExpr[Span.S]] =
|
override val p: Parser[UseFromExpr[Span.S]] =
|
||||||
(`use` *> FromExpr.importFrom.surroundedBy(
|
(`use` *> FromExpr.importFrom.surroundedBy(` `) ~
|
||||||
` `
|
ValueToken.string ~ (` as ` *> Ability.dotted)).map { case ((imports, filename), asModule) =>
|
||||||
) ~ ValueToken.string ~ (` as ` *> Ability.ab)).map { case ((imports, filename), asModule) =>
|
|
||||||
UseFromExpr(imports, filename, asModule)
|
UseFromExpr(imports, filename, asModule)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,13 +3,14 @@ package aqua.parser.lexer
|
|||||||
import aqua.parser.lexer.Token.*
|
import aqua.parser.lexer.Token.*
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.LiftParser.*
|
import aqua.parser.lift.LiftParser.*
|
||||||
|
import aqua.parser.lift.Span
|
||||||
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
import cats.Comonad
|
import cats.Comonad
|
||||||
import cats.parse.Parser as P
|
import cats.parse.Parser as P
|
||||||
import cats.syntax.functor.*
|
|
||||||
import cats.syntax.comonad.*
|
import cats.syntax.comonad.*
|
||||||
|
import cats.syntax.functor.*
|
||||||
import cats.~>
|
import cats.~>
|
||||||
import aqua.parser.lift.Span
|
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
|
||||||
|
|
||||||
case class Ability[F[_]: Comonad](name: F[String]) extends Token[F] {
|
case class Ability[F[_]: Comonad](name: F[String]) extends Token[F] {
|
||||||
override def as[T](v: T): F[T] = name.as(v)
|
override def as[T](v: T): F[T] = name.as(v)
|
||||||
|
@ -1,11 +1,12 @@
|
|||||||
package aqua.parser.lexer
|
package aqua.parser.lexer
|
||||||
|
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import cats.parse.{Parser => P}
|
import aqua.parser.lift.Span
|
||||||
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
import Token._
|
import Token._
|
||||||
import cats.Comonad
|
import cats.Comonad
|
||||||
import aqua.parser.lift.Span
|
import cats.parse.{Parser => P}
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
|
||||||
|
|
||||||
case class Arg[F[_]](name: Name[F], `type`: TypeToken[F])
|
case class Arg[F[_]](name: Name[F], `type`: TypeToken[F])
|
||||||
|
|
||||||
|
@ -1,15 +1,16 @@
|
|||||||
package aqua.parser.lexer
|
package aqua.parser.lexer
|
||||||
|
|
||||||
import aqua.parser.lexer.Token._
|
import aqua.parser.lexer.Token.*
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.LiftParser._
|
import aqua.parser.lift.LiftParser.*
|
||||||
|
import aqua.parser.lift.Span
|
||||||
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
import cats.Comonad
|
import cats.Comonad
|
||||||
import cats.parse.{Parser => P}
|
import cats.parse.{Parser => P}
|
||||||
import cats.syntax.functor._
|
import cats.syntax.comonad.*
|
||||||
import cats.syntax.comonad._
|
import cats.syntax.functor.*
|
||||||
import cats.~>
|
import cats.~>
|
||||||
import aqua.parser.lift.Span
|
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
|
||||||
|
|
||||||
case class Name[F[_]: Comonad](name: F[String]) extends Token[F] {
|
case class Name[F[_]: Comonad](name: F[String]) extends Token[F] {
|
||||||
override def as[T](v: T): F[T] = name.as(v)
|
override def as[T](v: T): F[T] = name.as(v)
|
||||||
|
@ -6,7 +6,7 @@ import aqua.parser.lexer.Token.*
|
|||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.LiftParser.*
|
import aqua.parser.lift.LiftParser.*
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
import aqua.parser.lift.Span.{given, *}
|
||||||
import aqua.types.LiteralType
|
import aqua.types.LiteralType
|
||||||
|
|
||||||
import cats.data.{NonEmptyList, NonEmptyMap}
|
import cats.data.{NonEmptyList, NonEmptyMap}
|
||||||
|
55
parser/src/main/scala/aqua/parser/lexer/QName.scala
Normal file
55
parser/src/main/scala/aqua/parser/lexer/QName.scala
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
package aqua.parser.lexer
|
||||||
|
|
||||||
|
import aqua.helpers.data.PName
|
||||||
|
import aqua.parser.lexer.Token.*
|
||||||
|
import aqua.parser.lift.LiftParser
|
||||||
|
import aqua.parser.lift.LiftParser.*
|
||||||
|
import aqua.parser.lift.Span
|
||||||
|
import aqua.parser.lift.Span.{given, *}
|
||||||
|
|
||||||
|
import cats.Comonad
|
||||||
|
import cats.arrow.FunctionK
|
||||||
|
import cats.data.NonEmptyList
|
||||||
|
import cats.parse.{Parser => P}
|
||||||
|
import cats.syntax.comonad.*
|
||||||
|
import cats.syntax.functor.*
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Qualified name. Name with parts separated by `.`
|
||||||
|
* e.g. `Some.Imported.Module.foo`
|
||||||
|
*
|
||||||
|
* @param name Name as a whole
|
||||||
|
* @param parts Parts of the name
|
||||||
|
*/
|
||||||
|
final case class QName[F[_]: Comonad](
|
||||||
|
name: F[String],
|
||||||
|
parts: NonEmptyList[F[String]]
|
||||||
|
) extends Token[F] {
|
||||||
|
|
||||||
|
def value: String = name.extract
|
||||||
|
override def as[T](v: T): F[T] = name.as(v)
|
||||||
|
|
||||||
|
override def mapK[K[_]: Comonad](fk: FunctionK[F, K]): QName[K] =
|
||||||
|
copy(fk(name), parts.map(p => fk(p)))
|
||||||
|
|
||||||
|
def toPName: PName = PName(parts.map(_.extract))
|
||||||
|
}
|
||||||
|
|
||||||
|
object QName {
|
||||||
|
|
||||||
|
final case class As[F[_]: Comonad](
|
||||||
|
name: QName[F],
|
||||||
|
rename: Option[QName[F]]
|
||||||
|
)
|
||||||
|
|
||||||
|
val p: P[QName[Span.S]] =
|
||||||
|
anyName.lift
|
||||||
|
.repSep(`.`)
|
||||||
|
.withString
|
||||||
|
.lift
|
||||||
|
.map(span => {
|
||||||
|
val name = span.fmap { case (_, name) => name }
|
||||||
|
val parts = span.fmap { case (parts, _) => parts }.extract
|
||||||
|
QName(name, parts)
|
||||||
|
})
|
||||||
|
}
|
@ -39,7 +39,6 @@ object Token {
|
|||||||
val `module`: P[Unit] = P.string("module")
|
val `module`: P[Unit] = P.string("module")
|
||||||
val `aqua-word`: P[Unit] = P.string("aqua")
|
val `aqua-word`: P[Unit] = P.string("aqua")
|
||||||
val `declares`: P[Unit] = P.string("declares")
|
val `declares`: P[Unit] = P.string("declares")
|
||||||
val ` declares ` : P[Unit] = `declares`.surroundedBy(` `)
|
|
||||||
val `declare`: P[Unit] = P.string("declare")
|
val `declare`: P[Unit] = P.string("declare")
|
||||||
val `_export`: P[Unit] = P.string("export")
|
val `_export`: P[Unit] = P.string("export")
|
||||||
val `star`: P[Unit] = P.char('*')
|
val `star`: P[Unit] = P.char('*')
|
||||||
|
@ -4,7 +4,7 @@ import aqua.parser.lexer.Token.*
|
|||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.LiftParser.*
|
import aqua.parser.lift.LiftParser.*
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan, S}
|
import aqua.parser.lift.Span.{given, *}
|
||||||
import aqua.types.ScalarType
|
import aqua.types.ScalarType
|
||||||
|
|
||||||
import cats.Comonad
|
import cats.Comonad
|
||||||
|
@ -6,7 +6,7 @@ import aqua.parser.lexer.Token.*
|
|||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.LiftParser.*
|
import aqua.parser.lift.LiftParser.*
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan, S}
|
import aqua.parser.lift.Span.{given, *}
|
||||||
import aqua.types.LiteralType
|
import aqua.types.LiteralType
|
||||||
|
|
||||||
import cats.arrow.FunctionK
|
import cats.arrow.FunctionK
|
||||||
|
@ -13,25 +13,23 @@ trait LiftParser[S[_]] {
|
|||||||
|
|
||||||
object LiftParser {
|
object LiftParser {
|
||||||
|
|
||||||
implicit class LiftErrorOps[S[_]: LiftParser, T](e: Parser.Error) {
|
def apply[S[_]](using lp: LiftParser[S]): LiftParser[S] = lp
|
||||||
def wrapErr: S[Parser.Error] = implicitly[LiftParser[S]].wrapErr(e)
|
|
||||||
|
extension [S[_]: LiftParser, T](e: Parser.Error) {
|
||||||
|
def wrapErr: S[Parser.Error] = LiftParser[S].wrapErr(e)
|
||||||
}
|
}
|
||||||
|
|
||||||
implicit class LiftParserOps[S[_]: LiftParser, T](parser: Parser[T]) {
|
extension [S[_]: LiftParser, T](parser: Parser[T]) {
|
||||||
def lift: Parser[S[T]] = implicitly[LiftParser[S]].lift(parser)
|
def lift: Parser[S[T]] = LiftParser[S].lift(parser)
|
||||||
}
|
}
|
||||||
|
|
||||||
implicit class LiftParser0Ops[S[_]: LiftParser, T](parser0: Parser0[T]) {
|
extension [S[_]: LiftParser, T](parser0: Parser0[T]) {
|
||||||
def lift0: Parser0[S[T]] = implicitly[LiftParser[S]].lift0(parser0)
|
def lift0: Parser0[S[T]] = LiftParser[S].lift0(parser0)
|
||||||
}
|
}
|
||||||
|
|
||||||
object Implicits {
|
given LiftParser[Id] with {
|
||||||
|
|
||||||
implicit object idLiftParser extends LiftParser[Id] {
|
|
||||||
override def lift[T](p: Parser[T]): Parser[Id[T]] = p
|
override def lift[T](p: Parser[T]): Parser[Id[T]] = p
|
||||||
override def lift0[T](p0: Parser0[T]): Parser0[Id[T]] = p0
|
override def lift0[T](p0: Parser0[T]): Parser0[Id[T]] = p0
|
||||||
override def wrapErr(e: Parser.Error): Id[Parser.Error] = e
|
override def wrapErr(e: Parser.Error): Id[Parser.Error] = e
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -1,8 +1,7 @@
|
|||||||
package aqua.parser.lift
|
package aqua.parser.lift
|
||||||
|
|
||||||
import cats.Comonad
|
import cats.Comonad
|
||||||
import cats.parse.{LocationMap, Parser0, Parser as P}
|
import cats.parse.{LocationMap, Parser as P, Parser0}
|
||||||
|
|
||||||
import scala.language.implicitConversions
|
import scala.language.implicitConversions
|
||||||
|
|
||||||
case class Span(startIndex: Int, endIndex: Int) {
|
case class Span(startIndex: Int, endIndex: Int) {
|
||||||
@ -131,7 +130,7 @@ object Span {
|
|||||||
|
|
||||||
type S[T] = (Span, T)
|
type S[T] = (Span, T)
|
||||||
|
|
||||||
implicit object spanComonad extends Comonad[S] {
|
given Comonad[S] with {
|
||||||
override def extract[A](x: S[A]): A = x._2
|
override def extract[A](x: S[A]): A = x._2
|
||||||
|
|
||||||
override def coflatMap[A, B](fa: S[A])(f: S[A] ⇒ B): S[B] = fa.copy(_2 = f(fa))
|
override def coflatMap[A, B](fa: S[A])(f: S[A] ⇒ B): S[B] = fa.copy(_2 = f(fa))
|
||||||
@ -139,15 +138,7 @@ object Span {
|
|||||||
override def map[A, B](fa: S[A])(f: A ⇒ B): S[B] = fa.copy(_2 = f(fa._2))
|
override def map[A, B](fa: S[A])(f: A ⇒ B): S[B] = fa.copy(_2 = f(fa._2))
|
||||||
}
|
}
|
||||||
|
|
||||||
implicit class PToSpan[T](p: P[T]) {
|
given LiftParser[S] with {
|
||||||
def lift: P[Span.S[T]] = Span.spanLiftParser.lift(p)
|
|
||||||
}
|
|
||||||
|
|
||||||
implicit class P0ToSpan[T](p: Parser0[T]) {
|
|
||||||
def lift0: Parser0[Span.S[T]] = Span.spanLiftParser.lift0(p)
|
|
||||||
}
|
|
||||||
|
|
||||||
implicit object spanLiftParser extends LiftParser[S] {
|
|
||||||
|
|
||||||
override def lift[T](p: P[T]): P[S[T]] =
|
override def lift[T](p: P[T]): P[S[T]] =
|
||||||
(P.index.with1 ~ p ~ P.index).map { case ((s, v), e) ⇒
|
(P.index.with1 ~ p ~ P.index).map { case ((s, v), e) ⇒
|
||||||
|
@ -11,9 +11,9 @@ import aqua.parser.lexer.InfixToken.Op as InfixOp
|
|||||||
import aqua.parser.lexer.PrefixToken.Op.*
|
import aqua.parser.lexer.PrefixToken.Op.*
|
||||||
import aqua.parser.lexer.PrefixToken.Op as PrefixOp
|
import aqua.parser.lexer.PrefixToken.Op as PrefixOp
|
||||||
import aqua.parser.lexer.Token.LiftToken
|
import aqua.parser.lexer.Token.LiftToken
|
||||||
import aqua.parser.lift.LiftParser.Implicits.idLiftParser
|
import aqua.parser.lift.LiftParser.given
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
import aqua.parser.lift.Span.{given, *}
|
||||||
import aqua.types.LiteralType.{bool, number, signed, string, unsigned}
|
import aqua.types.LiteralType.{bool, number, signed, string, unsigned}
|
||||||
import aqua.types.{LiteralType, ScalarType}
|
import aqua.types.{LiteralType, ScalarType}
|
||||||
|
|
||||||
@ -40,6 +40,11 @@ object AquaSpec {
|
|||||||
|
|
||||||
def toAb(str: String): Ability[Id] = Ability[Id](str)
|
def toAb(str: String): Ability[Id] = Ability[Id](str)
|
||||||
|
|
||||||
|
def toQName(str: String): QName[Id] = QName[Id](
|
||||||
|
str,
|
||||||
|
NonEmptyList.fromListUnsafe(str.split("\\.").toList)
|
||||||
|
)
|
||||||
|
|
||||||
def toVar(name: String): VarToken[Id] = VarToken[Id](toName(name))
|
def toVar(name: String): VarToken[Id] = VarToken[Id](toName(name))
|
||||||
|
|
||||||
def toVarOp(name: Option[String]): Option[VarToken[Id]] =
|
def toVarOp(name: Option[String]): Option[VarToken[Id]] =
|
||||||
|
@ -4,14 +4,14 @@ import aqua.AquaSpec
|
|||||||
import aqua.AquaSpec.*
|
import aqua.AquaSpec.*
|
||||||
import aqua.parser.expr.func.{CallArrowExpr, CoExpr, ForExpr, JoinExpr, OnExpr}
|
import aqua.parser.expr.func.{CallArrowExpr, CoExpr, ForExpr, JoinExpr, OnExpr}
|
||||||
import aqua.parser.lexer.{CallArrowToken, Token}
|
import aqua.parser.lexer.{CallArrowToken, Token}
|
||||||
import aqua.parser.lift.LiftParser.Implicits.idLiftParser
|
import aqua.parser.lift.LiftParser.given
|
||||||
|
|
||||||
import cats.data.{Chain, NonEmptyList}
|
import cats.data.{Chain, NonEmptyList}
|
||||||
import cats.free.Cofree
|
import cats.free.Cofree
|
||||||
import cats.{Eval, Id}
|
import cats.{Eval, Id}
|
||||||
|
import org.scalatest.Inside
|
||||||
import org.scalatest.flatspec.AnyFlatSpec
|
import org.scalatest.flatspec.AnyFlatSpec
|
||||||
import org.scalatest.matchers.should.Matchers
|
import org.scalatest.matchers.should.Matchers
|
||||||
import org.scalatest.Inside
|
|
||||||
|
|
||||||
class CoExprSpec extends AnyFlatSpec with Matchers with Inside with AquaSpec {
|
class CoExprSpec extends AnyFlatSpec with Matchers with Inside with AquaSpec {
|
||||||
|
|
||||||
|
@ -4,14 +4,14 @@ import aqua.AquaSpec
|
|||||||
import aqua.AquaSpec.*
|
import aqua.AquaSpec.*
|
||||||
import aqua.parser.expr.func.{CallArrowExpr, ForExpr, JoinExpr, OnExpr, ParExpr}
|
import aqua.parser.expr.func.{CallArrowExpr, ForExpr, JoinExpr, OnExpr, ParExpr}
|
||||||
import aqua.parser.lexer.{CallArrowToken, Token}
|
import aqua.parser.lexer.{CallArrowToken, Token}
|
||||||
import aqua.parser.lift.LiftParser.Implicits.idLiftParser
|
import aqua.parser.lift.LiftParser.given
|
||||||
|
|
||||||
import org.scalatest.flatspec.AnyFlatSpec
|
|
||||||
import org.scalatest.matchers.should.Matchers
|
|
||||||
import org.scalatest.Inside
|
|
||||||
import cats.{Eval, Id}
|
|
||||||
import cats.data.{Chain, NonEmptyList}
|
import cats.data.{Chain, NonEmptyList}
|
||||||
import cats.free.Cofree
|
import cats.free.Cofree
|
||||||
|
import cats.{Eval, Id}
|
||||||
|
import org.scalatest.Inside
|
||||||
|
import org.scalatest.flatspec.AnyFlatSpec
|
||||||
|
import org.scalatest.matchers.should.Matchers
|
||||||
|
|
||||||
class ParExprSpec extends AnyFlatSpec with Matchers with Inside with AquaSpec {
|
class ParExprSpec extends AnyFlatSpec with Matchers with Inside with AquaSpec {
|
||||||
|
|
||||||
|
@ -3,8 +3,9 @@ package aqua.parser.head
|
|||||||
import aqua.AquaSpec
|
import aqua.AquaSpec
|
||||||
import aqua.parser.expr.func.ServiceIdExpr
|
import aqua.parser.expr.func.ServiceIdExpr
|
||||||
import aqua.parser.lexer.{LiteralToken, Token}
|
import aqua.parser.lexer.{LiteralToken, Token}
|
||||||
import aqua.parser.lift.LiftParser.Implicits.*
|
import aqua.parser.lift.LiftParser.given
|
||||||
import aqua.types.LiteralType
|
import aqua.types.LiteralType
|
||||||
|
|
||||||
import cats.Id
|
import cats.Id
|
||||||
import cats.data.NonEmptyList
|
import cats.data.NonEmptyList
|
||||||
import org.scalatest.flatspec.AnyFlatSpec
|
import org.scalatest.flatspec.AnyFlatSpec
|
||||||
|
@ -3,7 +3,7 @@ package aqua.parser.head
|
|||||||
import aqua.AquaSpec
|
import aqua.AquaSpec
|
||||||
import aqua.parser.expr.func.ServiceIdExpr
|
import aqua.parser.expr.func.ServiceIdExpr
|
||||||
import aqua.parser.lexer.{LiteralToken, Token}
|
import aqua.parser.lexer.{LiteralToken, Token}
|
||||||
import aqua.parser.lift.LiftParser.Implicits.*
|
import aqua.parser.lift.LiftParser.given
|
||||||
import aqua.types.LiteralType
|
import aqua.types.LiteralType
|
||||||
|
|
||||||
import cats.Id
|
import cats.Id
|
||||||
|
@ -3,10 +3,11 @@ package aqua.parser.head
|
|||||||
import aqua.AquaSpec
|
import aqua.AquaSpec
|
||||||
import aqua.parser.expr.func.ServiceIdExpr
|
import aqua.parser.expr.func.ServiceIdExpr
|
||||||
import aqua.parser.lexer.{LiteralToken, Token}
|
import aqua.parser.lexer.{LiteralToken, Token}
|
||||||
import aqua.parser.lift.LiftParser.Implicits.*
|
import aqua.parser.lift.LiftParser.given
|
||||||
import aqua.types.LiteralType
|
import aqua.types.LiteralType
|
||||||
|
|
||||||
import cats.Id
|
import cats.Id
|
||||||
|
import cats.data.NonEmptyList
|
||||||
import org.scalatest.flatspec.AnyFlatSpec
|
import org.scalatest.flatspec.AnyFlatSpec
|
||||||
import org.scalatest.matchers.should.Matchers
|
import org.scalatest.matchers.should.Matchers
|
||||||
|
|
||||||
@ -15,20 +16,17 @@ class ModuleSpec extends AnyFlatSpec with Matchers with AquaSpec {
|
|||||||
|
|
||||||
val myModule = ModuleExpr(
|
val myModule = ModuleExpr(
|
||||||
ModuleExpr.Word[Id](Id(ModuleExpr.Word.Kind.Aqua)),
|
ModuleExpr.Word[Id](Id(ModuleExpr.Word.Kind.Aqua)),
|
||||||
toAb("MyModule"),
|
toQName("MyModule"),
|
||||||
None,
|
None
|
||||||
Nil,
|
|
||||||
Nil
|
|
||||||
)
|
)
|
||||||
|
|
||||||
val declaresAll = myModule.copy(
|
val declaresAll = myModule.copy(
|
||||||
declareAll = Some(Token.lift[Id, Unit](()))
|
declares = Some(ModuleExpr.Declares.All(Token.lift[Id, Unit](())))
|
||||||
)
|
)
|
||||||
|
|
||||||
def declares(symbols: List[String]) =
|
def declares(symbols: List[String]) =
|
||||||
myModule.copy(
|
myModule.copy(
|
||||||
declareNames = symbols.filter(_.headOption.exists(_.isLower)).map(toName),
|
declares = Some(ModuleExpr.Declares.Names(NonEmptyList.fromListUnsafe(symbols.map(toQName))))
|
||||||
declareCustom = symbols.filter(_.headOption.exists(_.isUpper)).map(toAb)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def parseModuleExpr(expr: String): ModuleExpr[Id] =
|
def parseModuleExpr(expr: String): ModuleExpr[Id] =
|
||||||
|
@ -3,8 +3,9 @@ package aqua.parser.head
|
|||||||
import aqua.AquaSpec
|
import aqua.AquaSpec
|
||||||
import aqua.parser.expr.func.ServiceIdExpr
|
import aqua.parser.expr.func.ServiceIdExpr
|
||||||
import aqua.parser.lexer.{LiteralToken, Token}
|
import aqua.parser.lexer.{LiteralToken, Token}
|
||||||
import aqua.parser.lift.LiftParser.Implicits.*
|
import aqua.parser.lift.LiftParser.given
|
||||||
import aqua.types.LiteralType
|
import aqua.types.LiteralType
|
||||||
|
|
||||||
import cats.Id
|
import cats.Id
|
||||||
import cats.data.NonEmptyList
|
import cats.data.NonEmptyList
|
||||||
import org.scalatest.flatspec.AnyFlatSpec
|
import org.scalatest.flatspec.AnyFlatSpec
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
package aqua.parser.lexer
|
package aqua.parser.lexer
|
||||||
|
|
||||||
import aqua.parser.lift.LiftParser.Implicits.idLiftParser
|
import aqua.parser.lift.LiftParser.given
|
||||||
import aqua.types.LiteralType
|
import aqua.types.LiteralType
|
||||||
|
|
||||||
import cats.Id
|
import cats.Id
|
||||||
import cats.data.{NonEmptyList, NonEmptyMap}
|
import cats.data.{NonEmptyList, NonEmptyMap}
|
||||||
import org.scalatest.EitherValues
|
import org.scalatest.EitherValues
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
package aqua.parser.lexer
|
package aqua.parser.lexer
|
||||||
|
|
||||||
import aqua.parser.lift.LiftParser.Implicits.idLiftParser
|
import aqua.parser.lift.LiftParser.given
|
||||||
import aqua.types.ScalarType
|
import aqua.types.ScalarType
|
||||||
|
|
||||||
import cats.Id
|
import cats.Id
|
||||||
|
@ -1,13 +1,13 @@
|
|||||||
package aqua.parser.lexer
|
package aqua.parser.lexer
|
||||||
|
|
||||||
import org.scalatest.EitherValues
|
import aqua.parser.lift.LiftParser.given
|
||||||
import org.scalatest.flatspec.AnyFlatSpec
|
|
||||||
import org.scalatest.matchers.should.Matchers
|
|
||||||
import aqua.parser.lift.LiftParser.Implicits.idLiftParser
|
|
||||||
import aqua.types.LiteralType
|
import aqua.types.LiteralType
|
||||||
|
|
||||||
import cats.Id
|
import cats.Id
|
||||||
import cats.data.NonEmptyList
|
import cats.data.NonEmptyList
|
||||||
|
import org.scalatest.EitherValues
|
||||||
|
import org.scalatest.flatspec.AnyFlatSpec
|
||||||
|
import org.scalatest.matchers.should.Matchers
|
||||||
|
|
||||||
class ValueTokenSpec extends AnyFlatSpec with Matchers with EitherValues {
|
class ValueTokenSpec extends AnyFlatSpec with Matchers with EitherValues {
|
||||||
|
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package aqua.parser.lexer
|
package aqua.parser.lexer
|
||||||
|
|
||||||
import aqua.parser.lift.LiftParser.Implicits.idLiftParser
|
import aqua.parser.lift.LiftParser.given
|
||||||
|
|
||||||
import cats.Id
|
import cats.Id
|
||||||
import cats.data.NonEmptyList
|
import cats.data.NonEmptyList
|
||||||
import org.scalatest.EitherValues
|
import org.scalatest.EitherValues
|
||||||
|
@ -23,8 +23,8 @@ case class HeaderSem[S[_], C](
|
|||||||
|
|
||||||
object HeaderSem {
|
object HeaderSem {
|
||||||
|
|
||||||
def fromInit[S[_], C](init: C): HeaderSem[S, C] =
|
def fromInit[S[_], C: Monoid](init: C): HeaderSem[S, C] =
|
||||||
HeaderSem(init, c => c.validNec)
|
HeaderSem(init, _ => Monoid[C].empty.validNec)
|
||||||
|
|
||||||
given [S[_]: Comonad, C](using
|
given [S[_]: Comonad, C](using
|
||||||
rc: Monoid[C]
|
rc: Monoid[C]
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
package aqua.semantics.header
|
package aqua.semantics.header
|
||||||
|
|
||||||
|
import aqua.helpers.data.PName
|
||||||
import aqua.parser.head.ModuleExpr
|
import aqua.parser.head.ModuleExpr
|
||||||
import aqua.semantics.header.HeaderHandler.{Res, error}
|
import aqua.semantics.header.HeaderHandler.{Res, error}
|
||||||
import aqua.semantics.header.Picker.*
|
import aqua.semantics.header.Picker.*
|
||||||
@ -19,24 +20,25 @@ class ModuleSem[S[_]: Comonad, C: Picker](expr: ModuleExpr[S])(using
|
|||||||
locations: LocationsAlgebra[S, State[C, *]]
|
locations: LocationsAlgebra[S, State[C, *]]
|
||||||
) {
|
) {
|
||||||
|
|
||||||
import expr.*
|
|
||||||
|
|
||||||
def headerSem: Res[S, C] = {
|
def headerSem: Res[S, C] = {
|
||||||
lazy val sem = HeaderSem(
|
lazy val sem = HeaderSem(
|
||||||
// Save module header info
|
// Save module header info
|
||||||
Picker[C].blank.setModule(name.value),
|
Picker[C].blank.setModule(expr.name.value.some),
|
||||||
ctx =>
|
ctx =>
|
||||||
// When file is handled, check that all the declarations exists
|
expr.declares match {
|
||||||
if (declareAll.nonEmpty) ctx.setDeclares(ctx.allNames).validNec
|
case None => ctx.validNec
|
||||||
else {
|
case Some(ModuleExpr.Declares.All(_)) =>
|
||||||
val declares = declareNames.fproductLeft(_.value) ::: declareCustom.fproductLeft(_.value)
|
val names = ctx.allNames.map(PName.simpleUnsafe).toSet
|
||||||
val names = declares.map { case (name, _) => name }.toSet
|
ctx.setDeclares(names).validNec
|
||||||
|
case Some(ModuleExpr.Declares.Names(declareNames)) =>
|
||||||
|
val declares = declareNames.fproductLeft(_.value).toList
|
||||||
|
val names = declareNames.map(_.toPName).toList.toSet
|
||||||
val res = ctx.setDeclares(names).addOccurences(declares)
|
val res = ctx.setDeclares(names).addOccurences(declares)
|
||||||
|
|
||||||
// summarize contexts to allow redeclaration of imports
|
// summarize contexts to allow redeclaration of imports
|
||||||
declares.map { case (n, t) =>
|
declares.map { case (n, t) =>
|
||||||
res
|
res
|
||||||
.pick(n, None, ctx.module.nonEmpty)
|
.pick(t.toPName, ctx.module.nonEmpty)
|
||||||
.toValidNec(
|
.toValidNec(
|
||||||
error(
|
error(
|
||||||
t,
|
t,
|
||||||
@ -50,9 +52,9 @@ class ModuleSem[S[_]: Comonad, C: Picker](expr: ModuleExpr[S])(using
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
word.value.fold(
|
expr.word.value.fold(
|
||||||
module = error(
|
module = error(
|
||||||
word,
|
expr.word,
|
||||||
"Keyword `module` is deprecated, use `aqua` instead"
|
"Keyword `module` is deprecated, use `aqua` instead"
|
||||||
).invalidNec,
|
).invalidNec,
|
||||||
aqua = sem.validNec
|
aqua = sem.validNec
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
package aqua.semantics.header
|
package aqua.semantics.header
|
||||||
|
|
||||||
|
import aqua.helpers.data.PName
|
||||||
import aqua.raw.{RawContext, RawPart}
|
import aqua.raw.{RawContext, RawPart}
|
||||||
import aqua.types.{AbilityType, ArrowType, Type}
|
import aqua.types.{AbilityType, ArrowType, Type}
|
||||||
|
|
||||||
@ -13,6 +14,7 @@ trait Picker[A] {
|
|||||||
def definedAbilityNames(ctx: A): Set[String]
|
def definedAbilityNames(ctx: A): Set[String]
|
||||||
def blank: A
|
def blank: A
|
||||||
def pick(ctx: A, name: String, rename: Option[String], declared: Boolean): Option[A]
|
def pick(ctx: A, name: String, rename: Option[String], declared: Boolean): Option[A]
|
||||||
|
def pick(ctx: A, name: PName, declared: Boolean): Option[A]
|
||||||
def pickDeclared(ctx: A): A
|
def pickDeclared(ctx: A): A
|
||||||
def pickHeader(ctx: A): A
|
def pickHeader(ctx: A): A
|
||||||
def module(ctx: A): Option[String]
|
def module(ctx: A): Option[String]
|
||||||
@ -24,8 +26,8 @@ trait Picker[A] {
|
|||||||
def funcAcceptAbility(ctx: A, name: String): Boolean
|
def funcAcceptAbility(ctx: A, name: String): Boolean
|
||||||
def setAbility(ctx: A, name: String, ctxAb: A): A
|
def setAbility(ctx: A, name: String, ctxAb: A): A
|
||||||
def setImportPaths(ctx: A, importPaths: Map[String, String]): A
|
def setImportPaths(ctx: A, importPaths: Map[String, String]): A
|
||||||
def setModule(ctx: A, name: String): A
|
def setModule(ctx: A, name: Option[String]): A
|
||||||
def setDeclares(ctx: A, declares: Set[String]): A
|
def setDeclares(ctx: A, declares: Set[PName]): A
|
||||||
def setExports(ctx: A, exports: Map[String, Option[String]]): A
|
def setExports(ctx: A, exports: Map[String, Option[String]]): A
|
||||||
def addPart(ctx: A, part: (A, RawPart)): A
|
def addPart(ctx: A, part: (A, RawPart)): A
|
||||||
}
|
}
|
||||||
@ -41,6 +43,10 @@ object Picker {
|
|||||||
|
|
||||||
def pick(name: String, rename: Option[String], declared: Boolean): Option[A] =
|
def pick(name: String, rename: Option[String], declared: Boolean): Option[A] =
|
||||||
Picker[A].pick(p, name, rename, declared)
|
Picker[A].pick(p, name, rename, declared)
|
||||||
|
|
||||||
|
def pick(name: PName, declared: Boolean): Option[A] =
|
||||||
|
Picker[A].pick(p, name, declared)
|
||||||
|
|
||||||
def pickDeclared: A = Picker[A].pickDeclared(p)
|
def pickDeclared: A = Picker[A].pickDeclared(p)
|
||||||
def pickHeader: A = Picker[A].pickHeader(p)
|
def pickHeader: A = Picker[A].pickHeader(p)
|
||||||
def module: Option[String] = Picker[A].module(p)
|
def module: Option[String] = Picker[A].module(p)
|
||||||
@ -65,10 +71,10 @@ object Picker {
|
|||||||
def addFreeParts(parts: List[RawPart]): A =
|
def addFreeParts(parts: List[RawPart]): A =
|
||||||
parts.foldLeft(p) { case (ctx, part) => ctx.addPart(blank -> part) }
|
parts.foldLeft(p) { case (ctx, part) => ctx.addPart(blank -> part) }
|
||||||
|
|
||||||
def setModule(name: String): A =
|
def setModule(name: Option[String]): A =
|
||||||
Picker[A].setModule(p, name)
|
Picker[A].setModule(p, name)
|
||||||
|
|
||||||
def setDeclares(declares: Set[String]): A =
|
def setDeclares(declares: Set[PName]): A =
|
||||||
Picker[A].setDeclares(p, declares)
|
Picker[A].setDeclares(p, declares)
|
||||||
|
|
||||||
def setExports(exports: Map[String, Option[String]]): A =
|
def setExports(exports: Map[String, Option[String]]): A =
|
||||||
@ -130,10 +136,10 @@ object Picker {
|
|||||||
override def setImportPaths(ctx: RawContext, importPaths: Map[String, String]): RawContext =
|
override def setImportPaths(ctx: RawContext, importPaths: Map[String, String]): RawContext =
|
||||||
ctx
|
ctx
|
||||||
|
|
||||||
override def setModule(ctx: RawContext, name: String): RawContext =
|
override def setModule(ctx: RawContext, name: Option[String]): RawContext =
|
||||||
ctx.copy(module = Some(name))
|
ctx.copy(module = name)
|
||||||
|
|
||||||
override def setDeclares(ctx: RawContext, declares: Set[String]): RawContext =
|
override def setDeclares(ctx: RawContext, declares: Set[PName]): RawContext =
|
||||||
ctx.copy(declares = declares)
|
ctx.copy(declares = declares)
|
||||||
|
|
||||||
override def setExports(ctx: RawContext, exports: Map[String, Option[String]]): RawContext =
|
override def setExports(ctx: RawContext, exports: Map[String, Option[String]]): RawContext =
|
||||||
@ -146,13 +152,39 @@ object Picker {
|
|||||||
declared: Boolean
|
declared: Boolean
|
||||||
): Option[RawContext] =
|
): Option[RawContext] =
|
||||||
Option
|
Option
|
||||||
.when(!declared || ctx.declares(name)) {
|
.when(!declared || ctx.declaredNames(name)) {
|
||||||
RawContext.blank
|
RawContext.fromParts(
|
||||||
.copy(parts = ctx.parts.filter(_._2.name == name).map { case (partContext, part) =>
|
ctx.parts.collect {
|
||||||
|
case (partContext, part) if part.name == name =>
|
||||||
(partContext, rename.fold(part)(part.rename))
|
(partContext, rename.fold(part)(part.rename))
|
||||||
})
|
}
|
||||||
|
)
|
||||||
}
|
}
|
||||||
.filter(_.nonEmpty)
|
.filter(_.nonEmpty)
|
||||||
|
.map(
|
||||||
|
// Module and declares should not be lost when picking
|
||||||
|
// Because it affects later logic
|
||||||
|
_.setModule(ctx.module).setDeclares(Set(PName.simpleUnsafe(name)))
|
||||||
|
)
|
||||||
|
|
||||||
|
override def pick(
|
||||||
|
ctx: RawContext,
|
||||||
|
name: PName,
|
||||||
|
declared: Boolean
|
||||||
|
): Option[RawContext] =
|
||||||
|
name.simple.fold(
|
||||||
|
name.splits.collectFirstSome { case (ab, field) =>
|
||||||
|
for {
|
||||||
|
ability <- ctx.abilities.get(ab.value)
|
||||||
|
inner <- pick(ability, field, declared)
|
||||||
|
} yield RawContext
|
||||||
|
.fromAbilities(Map(ab.value -> inner))
|
||||||
|
// Module and declares should not be lost when picking
|
||||||
|
// Because it affects later logic
|
||||||
|
.setModule(ctx.module)
|
||||||
|
.setDeclares(Set(name))
|
||||||
|
}
|
||||||
|
)(pick(ctx, _, None, declared))
|
||||||
|
|
||||||
override def pickHeader(ctx: RawContext): RawContext =
|
override def pickHeader(ctx: RawContext): RawContext =
|
||||||
RawContext.blank.copy(module = ctx.module, declares = ctx.declares, exports = ctx.exports)
|
RawContext.blank.copy(module = ctx.module, declares = ctx.declares, exports = ctx.exports)
|
||||||
@ -161,7 +193,7 @@ object Picker {
|
|||||||
if (ctx.module.isEmpty) ctx
|
if (ctx.module.isEmpty) ctx
|
||||||
else
|
else
|
||||||
ctx.declares.toList
|
ctx.declares.toList
|
||||||
.flatMap(n => pick(ctx, n, None, ctx.module.nonEmpty))
|
.flatMap(n => pick(ctx, n, ctx.module.nonEmpty))
|
||||||
.foldLeft(pickHeader(ctx))(_ |+| _)
|
.foldLeft(pickHeader(ctx))(_ |+| _)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -154,7 +154,12 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](using
|
|||||||
.filterF(nv => T.resolveType(nv.typeName, mustBeDefined = false).map(_.isDefined))
|
.filterF(nv => T.resolveType(nv.typeName, mustBeDefined = false).map(_.isDefined))
|
||||||
.widen[ValueToken[S]]
|
.widen[ValueToken[S]]
|
||||||
|
|
||||||
callArrow.orElse(ability).orElse(namedValue).foldF(default)(valueToRaw)
|
callArrow
|
||||||
|
.orElse(ability)
|
||||||
|
.orElse(namedValue)
|
||||||
|
.foldF(default)(
|
||||||
|
valueToRaw
|
||||||
|
)
|
||||||
|
|
||||||
case dvt @ NamedValueToken(typeName, fields) =>
|
case dvt @ NamedValueToken(typeName, fields) =>
|
||||||
(for {
|
(for {
|
||||||
|
@ -13,8 +13,8 @@ import aqua.types.ArrowType
|
|||||||
import cats.data.*
|
import cats.data.*
|
||||||
import cats.syntax.applicative.*
|
import cats.syntax.applicative.*
|
||||||
import cats.syntax.apply.*
|
import cats.syntax.apply.*
|
||||||
import cats.syntax.functor.*
|
|
||||||
import cats.syntax.flatMap.*
|
import cats.syntax.flatMap.*
|
||||||
|
import cats.syntax.functor.*
|
||||||
import cats.syntax.option.*
|
import cats.syntax.option.*
|
||||||
import monocle.Lens
|
import monocle.Lens
|
||||||
import monocle.macros.GenLens
|
import monocle.macros.GenLens
|
||||||
|
@ -66,6 +66,6 @@ object AbilitiesState {
|
|||||||
rootServiceIds = context.allServices.flatMap { case (name, service) =>
|
rootServiceIds = context.allServices.flatMap { case (name, service) =>
|
||||||
service.defaultId.map(name -> _)
|
service.defaultId.map(name -> _)
|
||||||
},
|
},
|
||||||
abilities = context.abilities // TODO is it the right way to collect abilities? Why?
|
abilities = context.allAbilities
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -2,6 +2,7 @@ package aqua.semantics
|
|||||||
|
|
||||||
import aqua.parser.Ast
|
import aqua.parser.Ast
|
||||||
import aqua.parser.head.{ExportExpr, FromExpr, HeaderExpr, ModuleExpr}
|
import aqua.parser.head.{ExportExpr, FromExpr, HeaderExpr, ModuleExpr}
|
||||||
|
import aqua.parser.lexer.QName
|
||||||
import aqua.parser.lexer.Token
|
import aqua.parser.lexer.Token
|
||||||
import aqua.parser.lexer.{Ability, Name}
|
import aqua.parser.lexer.{Ability, Name}
|
||||||
import aqua.raw.RawContext
|
import aqua.raw.RawContext
|
||||||
@ -38,11 +39,9 @@ class HeaderSpec extends AnyFlatSpec with Matchers with Inside {
|
|||||||
Token.lift(()),
|
Token.lift(()),
|
||||||
Chain(
|
Chain(
|
||||||
ModuleExpr(
|
ModuleExpr(
|
||||||
word = ModuleExpr.Word[Id](Id(ModuleExpr.Word.Kind.Aqua)),
|
word = ModuleExpr.Word(ModuleExpr.Word.Kind.Aqua),
|
||||||
name = Ability[Id]("TestModule"),
|
name = QName("TestModule", NonEmptyList.one("TestModule")),
|
||||||
declareAll = None,
|
declares = None
|
||||||
declareNames = Nil,
|
|
||||||
declareCustom = Nil
|
|
||||||
),
|
),
|
||||||
ExportExpr(NonEmptyList.of(exp))
|
ExportExpr(NonEmptyList.of(exp))
|
||||||
)
|
)
|
||||||
|
@ -28,7 +28,6 @@ class SemanticsSpec extends AnyFlatSpec with Matchers with Inside {
|
|||||||
|
|
||||||
val emptyCall = Call(Nil, Nil)
|
val emptyCall = Call(Nil, Nil)
|
||||||
|
|
||||||
implicit val fileLift: LiftParser[Span.S] = Span.spanLiftParser
|
|
||||||
val parser = Parser.parse(Parser.spanParser)
|
val parser = Parser.parse(Parser.spanParser)
|
||||||
|
|
||||||
val semantics = new RawSemantics[Span.S]()
|
val semantics = new RawSemantics[Span.S]()
|
||||||
|
39
utils/helpers/src/main/scala/aqua/helpers/data/PName.scala
Normal file
39
utils/helpers/src/main/scala/aqua/helpers/data/PName.scala
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
package aqua.helpers.data
|
||||||
|
|
||||||
|
import aqua.errors.Errors.internalError
|
||||||
|
|
||||||
|
import cats.data.NonEmptyList
|
||||||
|
import cats.syntax.option.*
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Short for PathName. Represents name with parts separated by `.`
|
||||||
|
*/
|
||||||
|
final case class PName(
|
||||||
|
parts: NonEmptyList[String]
|
||||||
|
) {
|
||||||
|
|
||||||
|
lazy val simple: Option[String] =
|
||||||
|
Option.when(parts.length == 1)(parts.head)
|
||||||
|
|
||||||
|
lazy val isSimple: Boolean = simple.isDefined
|
||||||
|
|
||||||
|
lazy val value: String = parts.toList.mkString(".")
|
||||||
|
|
||||||
|
lazy val splits: List[(PName, PName)] = {
|
||||||
|
val partsList = parts.toList
|
||||||
|
(1 until parts.length).toList.map(i =>
|
||||||
|
PName(NonEmptyList.fromListUnsafe(partsList.take(i))) ->
|
||||||
|
PName(NonEmptyList.fromListUnsafe(partsList.drop(i)))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
override def toString(): String = value
|
||||||
|
}
|
||||||
|
|
||||||
|
object PName {
|
||||||
|
|
||||||
|
def simpleUnsafe(name: String): PName =
|
||||||
|
if (name.isEmpty || name.contains("."))
|
||||||
|
internalError(s"Invalid PName: $name")
|
||||||
|
else PName(NonEmptyList.one(name))
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user