mirror of
https://github.com/fluencelabs/aqua.git
synced 2024-12-04 22:50:18 +00:00
feat(compiler): Allow omitting field name in struct creation [LNG-261] (#943)
This commit is contained in:
parent
2f11a0649f
commit
fcdb5b0fef
59
parser/src/main/scala/aqua/parser/lexer/NamedArg.scala
Normal file
59
parser/src/main/scala/aqua/parser/lexer/NamedArg.scala
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
package aqua.parser.lexer
|
||||||
|
|
||||||
|
import aqua.parser.lift.Span.S
|
||||||
|
import aqua.parser.lexer.Token.*
|
||||||
|
|
||||||
|
import cats.data.NonEmptyList
|
||||||
|
import cats.parse.Parser as P
|
||||||
|
import cats.syntax.functor.*
|
||||||
|
import cats.Comonad
|
||||||
|
import cats.arrow.FunctionK
|
||||||
|
|
||||||
|
enum NamedArg[F[_]] extends Token[F] {
|
||||||
|
// for `name = value`
|
||||||
|
case Full(name: Name[F], value: ValueToken[F])
|
||||||
|
// for just `name` (short for `name = name`)
|
||||||
|
case Short(variable: VarToken[F])
|
||||||
|
|
||||||
|
lazy val argName: Name[F] = this match {
|
||||||
|
case Full(name, _) => name
|
||||||
|
case Short(variable) => variable.name
|
||||||
|
}
|
||||||
|
|
||||||
|
lazy val argValue: ValueToken[F] = this match {
|
||||||
|
case Full(_, value) => value
|
||||||
|
case Short(variable) => variable
|
||||||
|
}
|
||||||
|
|
||||||
|
override def as[T](v: T): F[T] = this match {
|
||||||
|
case Full(name, value) => name.as(v)
|
||||||
|
case Short(variable) => variable.as(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
override def mapK[K[_]: Comonad](fk: FunctionK[F, K]): NamedArg[K] =
|
||||||
|
this match {
|
||||||
|
case Full(name, value) => Full(name.mapK(fk), value.mapK(fk))
|
||||||
|
case Short(variable) => Short(variable.mapK(fk))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
object NamedArg {
|
||||||
|
|
||||||
|
private val namedArgFull: P[NamedArg.Full[S]] =
|
||||||
|
P.defer(
|
||||||
|
(Name.p.between(` *`, `/s*`) <*
|
||||||
|
`=`.between(` *`, `/s*`)) ~
|
||||||
|
ValueToken.`value`.between(` *`, `/s*`)
|
||||||
|
).map(NamedArg.Full.apply)
|
||||||
|
|
||||||
|
private val namedArgShort: P[NamedArg.Short[S]] =
|
||||||
|
P.defer(
|
||||||
|
VarToken.variable.between(` *`, `/s*`)
|
||||||
|
).map(NamedArg.Short.apply)
|
||||||
|
|
||||||
|
val namedArg: P[NamedArg[S]] =
|
||||||
|
namedArgFull.backtrack | namedArgShort
|
||||||
|
|
||||||
|
val namedArgs: P[NonEmptyList[NamedArg[S]]] =
|
||||||
|
P.defer(` `.?.with1 ~ `(` ~ `/s*` *> comma(namedArg) <* `/s*` *> `)`)
|
||||||
|
}
|
@ -3,18 +3,19 @@ package aqua.parser.lexer
|
|||||||
import aqua.parser.lexer.Token.*
|
import aqua.parser.lexer.Token.*
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.LiftParser.*
|
import aqua.parser.lift.LiftParser.*
|
||||||
|
import aqua.parser.lift.Span
|
||||||
|
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||||
|
import aqua.types.LiteralType
|
||||||
|
import aqua.parser.lexer.CallArrowToken.CallBraces
|
||||||
|
import aqua.parser.lexer.NamedArg.namedArgs
|
||||||
|
|
||||||
|
import cats.~>
|
||||||
import cats.data.{NonEmptyList, NonEmptyMap}
|
import cats.data.{NonEmptyList, NonEmptyMap}
|
||||||
import cats.parse.{Numbers, Parser as P, Parser0 as P0}
|
import cats.parse.{Numbers, Parser as P, Parser0 as P0}
|
||||||
import cats.syntax.comonad.*
|
import cats.syntax.comonad.*
|
||||||
import cats.syntax.functor.*
|
import cats.syntax.functor.*
|
||||||
import cats.{Comonad, Functor}
|
import cats.{Comonad, Functor}
|
||||||
|
|
||||||
import scala.language.postfixOps
|
import scala.language.postfixOps
|
||||||
import cats.~>
|
|
||||||
import aqua.parser.lift.Span
|
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
|
||||||
import aqua.types.LiteralType
|
|
||||||
import aqua.parser.lexer.CallArrowToken.CallBraces
|
|
||||||
|
|
||||||
sealed trait PropertyOp[F[_]] extends Token[F] {
|
sealed trait PropertyOp[F[_]] extends Token[F] {
|
||||||
def mapK[K[_]: Comonad](fk: F ~> K): PropertyOp[K]
|
def mapK[K[_]: Comonad](fk: F ~> K): PropertyOp[K]
|
||||||
@ -47,12 +48,14 @@ case class IntoIndex[F[_]: Comonad](point: F[Unit], idx: Option[ValueToken[F]])
|
|||||||
override def mapK[K[_]: Comonad](fk: F ~> K): IntoIndex[K] = copy(fk(point), idx.map(_.mapK(fk)))
|
override def mapK[K[_]: Comonad](fk: F ~> K): IntoIndex[K] = copy(fk(point), idx.map(_.mapK(fk)))
|
||||||
}
|
}
|
||||||
|
|
||||||
case class IntoCopy[F[_]: Comonad](point: F[Unit], fields: NonEmptyMap[String, ValueToken[F]])
|
case class IntoCopy[F[_]: Comonad](
|
||||||
extends PropertyOp[F] {
|
point: F[Unit],
|
||||||
|
args: NonEmptyList[NamedArg[F]]
|
||||||
|
) extends PropertyOp[F] {
|
||||||
override def as[T](v: T): F[T] = point.as(v)
|
override def as[T](v: T): F[T] = point.as(v)
|
||||||
|
|
||||||
override def mapK[K[_]: Comonad](fk: F ~> K): IntoCopy[K] =
|
override def mapK[K[_]: Comonad](fk: F ~> K): IntoCopy[K] =
|
||||||
copy(fk(point), fields.map(_.mapK(fk)))
|
copy(fk(point), args.map(_.mapK(fk)))
|
||||||
}
|
}
|
||||||
|
|
||||||
object PropertyOp {
|
object PropertyOp {
|
||||||
@ -66,9 +69,7 @@ object PropertyOp {
|
|||||||
}
|
}
|
||||||
|
|
||||||
val parseCopy: P[PropertyOp[Span.S]] =
|
val parseCopy: P[PropertyOp[Span.S]] =
|
||||||
(`.` *> (`copy`.lift ~ namedArgs)).map { case (point, fields) =>
|
(`.` *> (`copy`.lift ~ namedArgs)).map(IntoCopy.apply)
|
||||||
IntoCopy(point, NonEmptyMap.of(fields.head, fields.tail: _*))
|
|
||||||
}
|
|
||||||
|
|
||||||
private val parseIdx: P[PropertyOp[Span.S]] =
|
private val parseIdx: P[PropertyOp[Span.S]] =
|
||||||
(P.defer(
|
(P.defer(
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package aqua.parser.lexer
|
package aqua.parser.lexer
|
||||||
|
|
||||||
import aqua.parser.lift.Span.S
|
import aqua.parser.lift.Span.S
|
||||||
|
|
||||||
import cats.data.NonEmptyList
|
import cats.data.NonEmptyList
|
||||||
import cats.parse.{Accumulator0, Parser as P, Parser0 as P0}
|
import cats.parse.{Accumulator0, Parser as P, Parser0 as P0}
|
||||||
import cats.{~>, Comonad, Functor}
|
import cats.{~>, Comonad, Functor}
|
||||||
@ -120,18 +121,6 @@ object Token {
|
|||||||
val `<-` : P[Unit] = P.string("<-")
|
val `<-` : P[Unit] = P.string("<-")
|
||||||
val `/s*` : P0[Unit] = ` \n+`.backtrack | ` *`.void
|
val `/s*` : P0[Unit] = ` \n+`.backtrack | ` *`.void
|
||||||
|
|
||||||
val namedArg: P[(String, ValueToken[S])] =
|
|
||||||
P.defer(
|
|
||||||
`name`.between(` *`, `/s*`) ~
|
|
||||||
`=`.between(` *`, `/s*`).void ~
|
|
||||||
ValueToken.`value`.between(` *`, `/s*`)
|
|
||||||
).map { case ((name, _), vt) =>
|
|
||||||
(name, vt)
|
|
||||||
}
|
|
||||||
|
|
||||||
val namedArgs: P[NonEmptyList[(String, ValueToken[S])]] =
|
|
||||||
P.defer(` `.?.with1 ~ `(` ~ `/s*` *> comma(namedArg) <* `/s*` *> `)`)
|
|
||||||
|
|
||||||
case class LiftToken[F[_]: Functor, A](point: F[A]) extends Token[F] {
|
case class LiftToken[F[_]: Functor, A](point: F[A]) extends Token[F] {
|
||||||
override def as[T](v: T): F[T] = Functor[F].as(point, v)
|
override def as[T](v: T): F[T] = Functor[F].as(point, v)
|
||||||
|
|
||||||
|
@ -3,6 +3,7 @@ package aqua.parser.lexer
|
|||||||
import aqua.parser.Expr
|
import aqua.parser.Expr
|
||||||
import aqua.parser.head.FilenameExpr
|
import aqua.parser.head.FilenameExpr
|
||||||
import aqua.parser.lexer.Token.*
|
import aqua.parser.lexer.Token.*
|
||||||
|
import aqua.parser.lexer.NamedArg.namedArgs
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.LiftParser.*
|
import aqua.parser.lift.LiftParser.*
|
||||||
import aqua.types.LiteralType
|
import aqua.types.LiteralType
|
||||||
@ -260,7 +261,7 @@ object CallArrowToken {
|
|||||||
|
|
||||||
case class NamedValueToken[F[_]: Comonad](
|
case class NamedValueToken[F[_]: Comonad](
|
||||||
typeName: NamedTypeToken[F],
|
typeName: NamedTypeToken[F],
|
||||||
fields: NonEmptyMap[String, ValueToken[F]]
|
fields: NonEmptyList[NamedArg[F]]
|
||||||
) extends ValueToken[F] {
|
) extends ValueToken[F] {
|
||||||
|
|
||||||
override def mapK[K[_]: Comonad](fk: F ~> K): NamedValueToken[K] =
|
override def mapK[K[_]: Comonad](fk: F ~> K): NamedValueToken[K] =
|
||||||
@ -277,7 +278,7 @@ object NamedValueToken {
|
|||||||
"Missing braces '()' after the struct type"
|
"Missing braces '()' after the struct type"
|
||||||
)
|
)
|
||||||
.map { case (dn, args) =>
|
.map { case (dn, args) =>
|
||||||
NamedValueToken(NamedTypeToken(dn), args.toNem)
|
NamedValueToken(NamedTypeToken(dn), args)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -19,15 +19,18 @@ class AbilityValueExprSpec extends AnyFlatSpec with Matchers with AquaSpec {
|
|||||||
parseData(str) should be(
|
parseData(str) should be(
|
||||||
NamedValueToken(
|
NamedValueToken(
|
||||||
NamedTypeToken[Id]("AbilityA"),
|
NamedTypeToken[Id]("AbilityA"),
|
||||||
NonEmptyMap.of(
|
NonEmptyList.of(
|
||||||
"v1" -> toNumber(1),
|
NamedArg.Full(toName("v1"), toNumber(1)),
|
||||||
"f1" -> PropertyToken[Id](
|
NamedArg.Full(
|
||||||
|
toName("f1"),
|
||||||
|
PropertyToken(
|
||||||
VarToken(toName("input")),
|
VarToken(toName("input")),
|
||||||
NonEmptyList.one(IntoField("arrow"))
|
NonEmptyList.one(IntoField("arrow"))
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
"one line struct value" should "be parsed" in {
|
"one line struct value" should "be parsed" in {
|
||||||
|
@ -4,21 +4,10 @@ import aqua.AquaSpec
|
|||||||
import aqua.AquaSpec.{toNumber, toStr, toVar}
|
import aqua.AquaSpec.{toNumber, toStr, toVar}
|
||||||
import aqua.parser.expr.ConstantExpr
|
import aqua.parser.expr.ConstantExpr
|
||||||
import aqua.parser.expr.func.AssignmentExpr
|
import aqua.parser.expr.func.AssignmentExpr
|
||||||
import aqua.parser.lexer.{
|
import aqua.parser.lexer.*
|
||||||
CallArrowToken,
|
|
||||||
CollectionToken,
|
|
||||||
IntoArrow,
|
|
||||||
LiteralToken,
|
|
||||||
Name,
|
|
||||||
NamedTypeToken,
|
|
||||||
NamedValueToken,
|
|
||||||
PropertyToken,
|
|
||||||
Token,
|
|
||||||
ValueToken,
|
|
||||||
VarToken
|
|
||||||
}
|
|
||||||
import aqua.parser.lexer.CollectionToken.Mode.ArrayMode
|
import aqua.parser.lexer.CollectionToken.Mode.ArrayMode
|
||||||
import aqua.types.LiteralType
|
import aqua.types.LiteralType
|
||||||
|
|
||||||
import cats.Id
|
import cats.Id
|
||||||
import org.scalatest.flatspec.AnyFlatSpec
|
import org.scalatest.flatspec.AnyFlatSpec
|
||||||
import org.scalatest.matchers.should.Matchers
|
import org.scalatest.matchers.should.Matchers
|
||||||
@ -32,52 +21,67 @@ class StructValueExprSpec extends AnyFlatSpec with Matchers with AquaSpec {
|
|||||||
val one = toNumber(1)
|
val one = toNumber(1)
|
||||||
val two = toNumber(2)
|
val two = toNumber(2)
|
||||||
val three = toNumber(3)
|
val three = toNumber(3)
|
||||||
val a = LiteralToken[Id]("\"a\"", LiteralType.string)
|
val a = toStr("a")
|
||||||
val b = LiteralToken[Id]("\"b\"", LiteralType.string)
|
val b = toStr("b")
|
||||||
val c = LiteralToken[Id]("\"c\"", LiteralType.string)
|
val c = toStr("c")
|
||||||
|
|
||||||
parseData(
|
parseData(
|
||||||
str
|
str
|
||||||
) should be(
|
) should be(
|
||||||
NamedValueToken(
|
NamedValueToken(
|
||||||
NamedTypeToken[Id]("Obj"),
|
NamedTypeToken[Id]("Obj"),
|
||||||
NonEmptyMap.of(
|
NonEmptyList.of(
|
||||||
"f1" -> one,
|
NamedArg.Full(toName("f1"), one),
|
||||||
"f2" -> a,
|
NamedArg.Full(toName("f2"), a),
|
||||||
"f3" -> CollectionToken[Id](ArrayMode, List(one, two, three)),
|
NamedArg.Full(toName("f3"), CollectionToken[Id](ArrayMode, List(one, two, three))),
|
||||||
"f4" -> CollectionToken[Id](ArrayMode, List(b, c)),
|
NamedArg.Full(toName("f4"), CollectionToken[Id](ArrayMode, List(b, c))),
|
||||||
"f5" -> NamedValueToken(
|
NamedArg.Full(
|
||||||
|
toName("f5"),
|
||||||
|
NamedValueToken(
|
||||||
NamedTypeToken[Id]("NestedObj"),
|
NamedTypeToken[Id]("NestedObj"),
|
||||||
NonEmptyMap.of(
|
NonEmptyList.of(
|
||||||
"i1" -> two,
|
NamedArg.Full(toName("i1"), two),
|
||||||
"i2" -> b,
|
NamedArg.Full(toName("i2"), b),
|
||||||
"i3" -> CallArrowToken(Name[Id]("funcCall"), List(three)),
|
NamedArg.Full(toName("i3"), CallArrowToken(toName("funcCall"), List(three))),
|
||||||
"i4" -> VarToken[Id](Name[Id]("value"))
|
NamedArg.Full(toName("i4"), VarToken(toName("value")))
|
||||||
|
)
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
"f6" -> CallArrowToken(Name[Id]("funcCall"), List(one)),
|
NamedArg.Full(toName("f6"), CallArrowToken(Name[Id]("funcCall"), List(one))),
|
||||||
"f7" -> PropertyToken[Id](
|
NamedArg.Full(
|
||||||
|
toName("f7"),
|
||||||
|
PropertyToken[Id](
|
||||||
VarToken[Id](Name[Id]("Serv")),
|
VarToken[Id](Name[Id]("Serv")),
|
||||||
NonEmptyList.one(IntoArrow[Id](Name[Id]("call"), List(two)))
|
NonEmptyList.one(IntoArrow[Id](Name[Id]("call"), List(two)))
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
"one named arg" should "be parsed" in {
|
"named args" should "parse one full named arg" in {
|
||||||
val result = aqua.parser.lexer.Token.namedArg
|
val result = NamedArg.namedArg
|
||||||
.parseAll(""" a
|
.parseAll(""" a
|
||||||
| =
|
| =
|
||||||
| 3""".stripMargin)
|
| 3""".stripMargin)
|
||||||
.map(v => (v._1, v._2.mapK(spanToId)))
|
.map(_.mapK(spanToId))
|
||||||
.value
|
.value
|
||||||
|
|
||||||
result should be(("a", toNumber(3)))
|
result should be(NamedArg.Full(toName("a"), toNumber(3)))
|
||||||
}
|
}
|
||||||
|
|
||||||
"named args" should "be parsed" in {
|
it should "parse one short named arg" in {
|
||||||
val result = Token.namedArgs
|
val result = NamedArg.namedArg
|
||||||
|
.parseAll(" b ")
|
||||||
|
.map(_.mapK(spanToId))
|
||||||
|
.value
|
||||||
|
|
||||||
|
result should be(NamedArg.Short(toVar("b")))
|
||||||
|
}
|
||||||
|
|
||||||
|
it should "parse a few full named args" in {
|
||||||
|
val result = NamedArg.namedArgs
|
||||||
.parseAll("""(
|
.parseAll("""(
|
||||||
|a = "str",
|
|a = "str",
|
||||||
|b = 3,
|
|b = 3,
|
||||||
@ -86,16 +90,61 @@ class StructValueExprSpec extends AnyFlatSpec with Matchers with AquaSpec {
|
|||||||
| 5
|
| 5
|
||||||
|)""".stripMargin)
|
|)""".stripMargin)
|
||||||
.value
|
.value
|
||||||
.map { case (str, vt) => (str, vt.mapK(spanToId)) }
|
.map(_.mapK(spanToId))
|
||||||
|
|
||||||
result should be(
|
result should be(
|
||||||
NonEmptyList[(String, ValueToken[Id])](
|
NonEmptyList.of(
|
||||||
("a", toStr("str")),
|
NamedArg.Full(toName("a"), toStr("str")),
|
||||||
("b", toNumber(3)) :: ("c", toNumber(5)) :: Nil
|
NamedArg.Full(toName("b"), toNumber(3)),
|
||||||
|
NamedArg.Full(toName("c"), toNumber(5))
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
it should "parse a few short named args" in {
|
||||||
|
val result = NamedArg.namedArgs
|
||||||
|
.parseAll("""(
|
||||||
|
|a,
|
||||||
|
| b ,
|
||||||
|
|c
|
||||||
|
|
|
||||||
|
|)""".stripMargin)
|
||||||
|
.value
|
||||||
|
.map(_.mapK(spanToId))
|
||||||
|
|
||||||
|
result should be(
|
||||||
|
NonEmptyList.of(
|
||||||
|
NamedArg.Short(toVar("a")),
|
||||||
|
NamedArg.Short(toVar("b")),
|
||||||
|
NamedArg.Short(toVar("c"))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
it should "parse mixed named args" in {
|
||||||
|
val args = List(
|
||||||
|
"meaning = 42" -> NamedArg.Full(toName("meaning"), toNumber(42)),
|
||||||
|
"variable" -> NamedArg.Short(toVar("variable")),
|
||||||
|
"col = [1,2,3]" -> NamedArg.Full(
|
||||||
|
toName("col"),
|
||||||
|
CollectionToken[Id](ArrayMode, List(toNumber(1), toNumber(2), toNumber(3)))
|
||||||
|
),
|
||||||
|
"arrow" -> NamedArg.Short(toVar("arrow"))
|
||||||
|
)
|
||||||
|
|
||||||
|
args.permutations.foreach(perm =>
|
||||||
|
val str = perm.map(_._1).mkString("(", ", ", ")")
|
||||||
|
val expected = NonEmptyList.fromListUnsafe(perm.map(_._2))
|
||||||
|
|
||||||
|
val result = NamedArg.namedArgs
|
||||||
|
.parseAll(str)
|
||||||
|
.value
|
||||||
|
.map(_.mapK(spanToId))
|
||||||
|
|
||||||
|
result should be(expected)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
"one line struct value" should "be parsed" in {
|
"one line struct value" should "be parsed" in {
|
||||||
parseAndCheckStruct(
|
parseAndCheckStruct(
|
||||||
"""Obj(f1 = 1, f2 = "a", f3 = [1,2,3], f4=["b", "c"], f5 =NestedObj(i1 = 2, i2 = "b", i3= funcCall(3), i4 = value), f6=funcCall(1), f7 = Serv.call(2))"""
|
"""Obj(f1 = 1, f2 = "a", f3 = [1,2,3], f4=["b", "c"], f5 =NestedObj(i1 = 2, i2 = "b", i3= funcCall(3), i4 = value), f6=funcCall(1), f7 = Serv.call(2))"""
|
||||||
|
@ -42,39 +42,59 @@ class PropertyOpSpec extends AnyFlatSpec with Matchers with EitherValues {
|
|||||||
PropertyOp.ops.parseAll("!-1").isLeft shouldBe true
|
PropertyOp.ops.parseAll("!-1").isLeft shouldBe true
|
||||||
}
|
}
|
||||||
|
|
||||||
"copy ops" should "parse" in {
|
def copyOpsP(s: String) = PropertyOp.ops.parseAll(s).value.map(_.mapK(spanToId))
|
||||||
val opsP = (s: String) => PropertyOp.ops.parseAll(s).value.map(_.mapK(spanToId))
|
|
||||||
|
|
||||||
opsP(".copy(a = \"str\", b = 12)") should be(
|
"copy ops" should "parse one copy" in {
|
||||||
|
copyOpsP(".copy(a = \"str\", b = 12)") should be(
|
||||||
NonEmptyList.of(
|
NonEmptyList.of(
|
||||||
IntoCopy[Id](
|
IntoCopy[Id](
|
||||||
(),
|
(),
|
||||||
NonEmptyMap.of(
|
NonEmptyList.of(
|
||||||
"a" -> LiteralToken("\"str\"", LiteralType.string),
|
NamedArg.Full(toName("a"), toStr("str")),
|
||||||
"b" -> toNumber(12)
|
NamedArg.Full(toName("b"), toNumber(12))
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
}
|
||||||
|
|
||||||
opsP(".copy(a = \"str\", b = 12).copy(c = 54, d = someVar)") should be(
|
it should "parse sequential copy" in {
|
||||||
|
copyOpsP(".copy(a = \"str\", b = 12).copy(c = 54, d = someVar)") should be(
|
||||||
NonEmptyList.of(
|
NonEmptyList.of(
|
||||||
IntoCopy[Id](
|
IntoCopy[Id](
|
||||||
(),
|
(),
|
||||||
NonEmptyMap.of(
|
NonEmptyList.of(
|
||||||
"a" -> LiteralToken("\"str\"", LiteralType.string),
|
NamedArg.Full(toName("a"), toStr("str")),
|
||||||
"b" -> toNumber(12)
|
NamedArg.Full(toName("b"), toNumber(12))
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
IntoCopy[Id](
|
IntoCopy[Id](
|
||||||
(),
|
(),
|
||||||
NonEmptyMap.of(
|
NonEmptyList.of(
|
||||||
"c" -> toNumber(54),
|
NamedArg.Full(toName("c"), toNumber(54)),
|
||||||
"d" -> VarToken("someVar")
|
NamedArg.Full(toName("d"), toVar("someVar"))
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
it should "parse mixed args in copy" in {
|
||||||
|
val args = List(
|
||||||
|
"a = \"str\"" -> NamedArg.Full(toName("a"), toStr("str")),
|
||||||
|
"b = 12" -> NamedArg.Full(toName("b"), toNumber(12)),
|
||||||
|
"c" -> NamedArg.Short(toVar("c")),
|
||||||
|
"d" -> NamedArg.Short(toVar("d"))
|
||||||
|
)
|
||||||
|
|
||||||
|
args.toSet.subsets().filter(_.nonEmpty).flatMap(_.toList.permutations).foreach { args =>
|
||||||
|
val str = args.map(_._1).mkString(".copy(", ", ", ")")
|
||||||
|
val expected = NonEmptyList.of(
|
||||||
|
IntoCopy[Id]((), NonEmptyList.fromListUnsafe(args.map(_._2)))
|
||||||
|
)
|
||||||
|
|
||||||
|
copyOpsP(str) should be(expected)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -34,6 +34,23 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](using
|
|||||||
report: ReportAlgebra[S, Alg]
|
report: ReportAlgebra[S, Alg]
|
||||||
) extends Logging {
|
) extends Logging {
|
||||||
|
|
||||||
|
private def reportNamedArgsDuplicates(
|
||||||
|
args: NonEmptyList[NamedArg[S]]
|
||||||
|
): Alg[Unit] = args
|
||||||
|
.groupBy(_.argName.value)
|
||||||
|
.filter { case (_, group) =>
|
||||||
|
group.size > 1
|
||||||
|
}
|
||||||
|
.toList
|
||||||
|
.traverse_ { case (name, group) =>
|
||||||
|
group.traverse_ { arg =>
|
||||||
|
report.error(
|
||||||
|
arg.argName,
|
||||||
|
s"Duplicate argument `$name`"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private def resolveSingleProperty(rootType: Type, op: PropertyOp[S]): Alg[Option[PropertyRaw]] =
|
private def resolveSingleProperty(rootType: Type, op: PropertyOp[S]): Alg[Option[PropertyRaw]] =
|
||||||
op match {
|
op match {
|
||||||
case op: IntoField[S] =>
|
case op: IntoField[S] =>
|
||||||
@ -46,12 +63,13 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](using
|
|||||||
)
|
)
|
||||||
} yield arrowProp
|
} yield arrowProp
|
||||||
case op: IntoCopy[S] =>
|
case op: IntoCopy[S] =>
|
||||||
for {
|
(for {
|
||||||
maybeFields <- op.fields.traverse(valueToRaw)
|
_ <- OptionT.liftF(
|
||||||
copyProp <- maybeFields.sequence.flatTraverse(
|
reportNamedArgsDuplicates(op.args)
|
||||||
T.resolveCopy(rootType, op, _)
|
|
||||||
)
|
)
|
||||||
} yield copyProp
|
fields <- op.args.traverse(arg => OptionT(valueToRaw(arg.argValue)).map(arg -> _))
|
||||||
|
prop <- OptionT(T.resolveCopy(op, rootType, fields))
|
||||||
|
} yield prop).value
|
||||||
case op: IntoIndex[S] =>
|
case op: IntoIndex[S] =>
|
||||||
for {
|
for {
|
||||||
maybeIdx <- op.idx.fold(LiteralRaw.Zero.some.pure)(valueToRaw)
|
maybeIdx <- op.idx.fold(LiteralRaw.Zero.some.pure)(valueToRaw)
|
||||||
@ -102,7 +120,13 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](using
|
|||||||
case dvt @ NamedValueToken(typeName, fields) =>
|
case dvt @ NamedValueToken(typeName, fields) =>
|
||||||
(for {
|
(for {
|
||||||
resolvedType <- OptionT(T.resolveType(typeName))
|
resolvedType <- OptionT(T.resolveType(typeName))
|
||||||
fieldsGiven <- fields.traverse(value => OptionT(valueToRaw(value)))
|
// Report duplicate fields
|
||||||
|
_ <- OptionT.liftF(
|
||||||
|
reportNamedArgsDuplicates(fields)
|
||||||
|
)
|
||||||
|
fieldsGiven <- fields
|
||||||
|
.traverse(arg => OptionT(valueToRaw(arg.argValue)).map(arg.argName.value -> _))
|
||||||
|
.map(_.toNem) // Take only last value for a field
|
||||||
fieldsGivenTypes = fieldsGiven.map(_.`type`)
|
fieldsGivenTypes = fieldsGiven.map(_.`type`)
|
||||||
generated <- OptionT.fromOption(
|
generated <- OptionT.fromOption(
|
||||||
resolvedType match {
|
resolvedType match {
|
||||||
@ -297,7 +321,8 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](using
|
|||||||
valueToRaw(v).flatMap(
|
valueToRaw(v).flatMap(
|
||||||
_.flatTraverse {
|
_.flatTraverse {
|
||||||
case ca: CallArrowRaw => (ca, ca.baseType).some.pure[Alg]
|
case ca: CallArrowRaw => (ca, ca.baseType).some.pure[Alg]
|
||||||
case apr@ApplyPropertyRaw(_, IntoArrowRaw(_, arrowType, _)) => (apr, arrowType).some.pure[Alg]
|
case apr @ ApplyPropertyRaw(_, IntoArrowRaw(_, arrowType, _)) =>
|
||||||
|
(apr, arrowType).some.pure[Alg]
|
||||||
// TODO: better error message (`raw` formatting)
|
// TODO: better error message (`raw` formatting)
|
||||||
case raw => report.error(v, s"Expected arrow call, got $raw").as(none)
|
case raw => report.error(v, s"Expected arrow call, got $raw").as(none)
|
||||||
}
|
}
|
||||||
|
@ -37,10 +37,11 @@ trait TypesAlgebra[S[_], Alg[_]] {
|
|||||||
def resolveIndex(rootT: Type, op: IntoIndex[S], idx: ValueRaw): Alg[Option[PropertyRaw]]
|
def resolveIndex(rootT: Type, op: IntoIndex[S], idx: ValueRaw): Alg[Option[PropertyRaw]]
|
||||||
|
|
||||||
def resolveCopy(
|
def resolveCopy(
|
||||||
|
token: IntoCopy[S],
|
||||||
rootT: Type,
|
rootT: Type,
|
||||||
op: IntoCopy[S],
|
fields: NonEmptyList[(NamedArg[S], ValueRaw)]
|
||||||
fields: NonEmptyMap[String, ValueRaw]
|
|
||||||
): Alg[Option[PropertyRaw]]
|
): Alg[Option[PropertyRaw]]
|
||||||
|
|
||||||
def resolveField(rootT: Type, op: IntoField[S]): Alg[Option[PropertyRaw]]
|
def resolveField(rootT: Type, op: IntoField[S]): Alg[Option[PropertyRaw]]
|
||||||
|
|
||||||
def resolveArrow(
|
def resolveArrow(
|
||||||
|
@ -245,22 +245,33 @@ class TypesInterpreter[S[_], X](using
|
|||||||
|
|
||||||
// TODO actually it's stateless, exists there just for reporting needs
|
// TODO actually it's stateless, exists there just for reporting needs
|
||||||
override def resolveCopy(
|
override def resolveCopy(
|
||||||
|
token: IntoCopy[S],
|
||||||
rootT: Type,
|
rootT: Type,
|
||||||
op: IntoCopy[S],
|
args: NonEmptyList[(NamedArg[S], ValueRaw)]
|
||||||
fields: NonEmptyMap[String, ValueRaw]
|
|
||||||
): State[X, Option[PropertyRaw]] =
|
): State[X, Option[PropertyRaw]] =
|
||||||
rootT match {
|
rootT match {
|
||||||
case st: StructType =>
|
case st: StructType =>
|
||||||
fields.toSortedMap.toList.traverse { case (fieldName, value) =>
|
args.forallM { case (arg, value) =>
|
||||||
|
val fieldName = arg.argName.value
|
||||||
st.fields.lookup(fieldName) match {
|
st.fields.lookup(fieldName) match {
|
||||||
case Some(t) =>
|
case Some(t) =>
|
||||||
ensureTypeMatches(op.fields.lookup(fieldName).getOrElse(op), t, value.`type`)
|
ensureTypeMatches(arg.argValue, t, value.`type`)
|
||||||
case None => report.error(op, s"No field with name '$fieldName' in $rootT").as(false)
|
case None =>
|
||||||
|
report.error(arg.argName, s"No field with name '$fieldName' in $rootT").as(false)
|
||||||
}
|
}
|
||||||
}.map(res => if (res.forall(identity)) Some(IntoCopyRaw(st, fields)) else None)
|
}.map(
|
||||||
|
Option.when(_)(
|
||||||
|
IntoCopyRaw(
|
||||||
|
st,
|
||||||
|
args.map { case (arg, value) =>
|
||||||
|
arg.argName.value -> value
|
||||||
|
}.toNem
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
case _ =>
|
case _ =>
|
||||||
report.error(op, s"Expected $rootT to be a data type").as(None)
|
report.error(token, s"Expected $rootT to be a data type").as(None)
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO actually it's stateless, exists there just for reporting needs
|
// TODO actually it's stateless, exists there just for reporting needs
|
||||||
@ -339,12 +350,12 @@ class TypesInterpreter[S[_], X](using
|
|||||||
)
|
)
|
||||||
.as(false)
|
.as(false)
|
||||||
} else {
|
} else {
|
||||||
valueFields.toSortedMap.toList.traverse { (name, `type`) =>
|
valueFields.toSortedMap.toList.forallM { (name, `type`) =>
|
||||||
typeFields.lookup(name) match {
|
typeFields.lookup(name) match {
|
||||||
case Some(t) =>
|
case Some(t) =>
|
||||||
val nextToken = token match {
|
val nextToken = token match {
|
||||||
case NamedValueToken(_, fields) =>
|
case NamedValueToken(_, fields) =>
|
||||||
fields.lookup(name).getOrElse(token)
|
fields.find(_.argName.value == name).getOrElse(token)
|
||||||
// TODO: Is it needed?
|
// TODO: Is it needed?
|
||||||
case PropertyToken(_, properties) =>
|
case PropertyToken(_, properties) =>
|
||||||
properties.last
|
properties.last
|
||||||
@ -359,7 +370,7 @@ class TypesInterpreter[S[_], X](using
|
|||||||
)
|
)
|
||||||
.as(false)
|
.as(false)
|
||||||
}
|
}
|
||||||
}.map(_.forall(identity))
|
}
|
||||||
}
|
}
|
||||||
case _ =>
|
case _ =>
|
||||||
val notes =
|
val notes =
|
||||||
|
@ -5,7 +5,7 @@ import aqua.parser.Ast
|
|||||||
import aqua.raw.ops.{Call, CallArrowRawTag, FuncOp, OnTag, ParTag, RawTag, SeqGroupTag, SeqTag}
|
import aqua.raw.ops.{Call, CallArrowRawTag, FuncOp, OnTag, ParTag, RawTag, SeqGroupTag, SeqTag}
|
||||||
import aqua.parser.Parser
|
import aqua.parser.Parser
|
||||||
import aqua.parser.lift.{LiftParser, Span}
|
import aqua.parser.lift.{LiftParser, Span}
|
||||||
import aqua.raw.value.{ApplyBinaryOpRaw, LiteralRaw, ValueRaw, VarRaw}
|
import aqua.raw.value.*
|
||||||
import aqua.types.*
|
import aqua.types.*
|
||||||
import aqua.raw.ops.*
|
import aqua.raw.ops.*
|
||||||
|
|
||||||
@ -674,4 +674,126 @@ class SemanticsSpec extends AnyFlatSpec with Matchers with Inside {
|
|||||||
warnings.exists(_.hints.exists(_.contains("used"))) should be(true)
|
warnings.exists(_.hints.exists(_.contains("used"))) should be(true)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
val fieldCases = List(
|
||||||
|
"field = 42" -> "field = field",
|
||||||
|
"field = 42" -> "field",
|
||||||
|
"integer = 42" -> "field = integer",
|
||||||
|
"" -> "field = 42"
|
||||||
|
)
|
||||||
|
|
||||||
|
val strCases = List(
|
||||||
|
"str = \"str\"" -> "str = str",
|
||||||
|
"str = \"str\"" -> "str",
|
||||||
|
"string = \"str\"" -> "str = string",
|
||||||
|
"" -> "str = \"str\""
|
||||||
|
)
|
||||||
|
|
||||||
|
it should "handle struct creation" in {
|
||||||
|
for {
|
||||||
|
fieldCase <- fieldCases
|
||||||
|
(fieldDef, fieldArg) = fieldCase
|
||||||
|
strCase <- strCases
|
||||||
|
(strDef, strArg) = strCase
|
||||||
|
} {
|
||||||
|
val defs = List(fieldDef, strDef).filter(_.nonEmpty).mkString("\n ")
|
||||||
|
val args = List(fieldArg, strArg).filter(_.nonEmpty).mkString(", ")
|
||||||
|
val script = s"""|data Struct:
|
||||||
|
| field: i8
|
||||||
|
| str: string
|
||||||
|
|
|
||||||
|
|func main() -> Struct:
|
||||||
|
| $defs
|
||||||
|
| <- Struct($args)
|
||||||
|
|""".stripMargin
|
||||||
|
|
||||||
|
insideBody(script) { body =>
|
||||||
|
matchSubtree(body) { case (ReturnTag(vals), _) =>
|
||||||
|
inside(vals.head) { case MakeStructRaw(fields, _) =>
|
||||||
|
fields.contains("field") should be(true)
|
||||||
|
fields.contains("str") should be(true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
it should "handle ability creation" in {
|
||||||
|
def arrow(name: String) =
|
||||||
|
s"""|$name = (x: i8) -> bool:
|
||||||
|
| <- x > 0
|
||||||
|
|""".stripMargin
|
||||||
|
val arrowCases = List(
|
||||||
|
arrow("arrow") -> "arrow = arrow",
|
||||||
|
arrow("arrow") -> "arrow",
|
||||||
|
arrow("closure") -> "arrow = closure"
|
||||||
|
)
|
||||||
|
|
||||||
|
for {
|
||||||
|
arrowCase <- arrowCases
|
||||||
|
(arrowDef, arrowArg) = arrowCase
|
||||||
|
fieldCase <- fieldCases
|
||||||
|
(fieldDef, fieldArg) = fieldCase
|
||||||
|
strCase <- strCases
|
||||||
|
(strDef, strArg) = strCase
|
||||||
|
} {
|
||||||
|
val defs = List(arrowDef, fieldDef, strDef).filter(_.nonEmpty).mkString("\n ")
|
||||||
|
val args = List(arrowArg, fieldArg, strArg).filter(_.nonEmpty).mkString(", ")
|
||||||
|
val script = s"""|ability Ab:
|
||||||
|
| field: i8
|
||||||
|
| str: string
|
||||||
|
| arrow(x: i8) -> bool
|
||||||
|
|
|
||||||
|
|func main() -> Ab:
|
||||||
|
| $defs
|
||||||
|
| <- Ab($args)
|
||||||
|
|""".stripMargin
|
||||||
|
|
||||||
|
insideBody(script) { body =>
|
||||||
|
matchSubtree(body) { case (ReturnTag(vals), _) =>
|
||||||
|
inside(vals.head) { case AbilityRaw(fields, _) =>
|
||||||
|
fields.contains("arrow") should be(true)
|
||||||
|
fields.contains("field") should be(true)
|
||||||
|
fields.contains("str") should be(true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
it should "forbid duplicate fields in data or ability creation" in {
|
||||||
|
List("data", "ability").foreach { form =>
|
||||||
|
|
||||||
|
val script = s"""|$form StructOrAb:
|
||||||
|
| field: i8
|
||||||
|
|
|
||||||
|
|func main() -> StructOrAb:
|
||||||
|
| field = 24
|
||||||
|
| <- StructOrAb(field = 42, field)
|
||||||
|
|""".stripMargin
|
||||||
|
|
||||||
|
insideSemErrors(script) { errors =>
|
||||||
|
atLeast(1, errors.toChain.toList) shouldBe a[RulesViolated[Span.S]]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
it should "forbid duplicate fields in data copy" in {
|
||||||
|
|
||||||
|
val script = """|data Struct:
|
||||||
|
| field: i8
|
||||||
|
|
|
||||||
|
|func main() -> Struct:
|
||||||
|
| st = Struct(field = 24)
|
||||||
|
| field = 37
|
||||||
|
| <- st.copy(field = 42, field)
|
||||||
|
|""".stripMargin
|
||||||
|
|
||||||
|
insideSemErrors(script) { errors =>
|
||||||
|
atLeast(1, errors.toChain.toList) shouldBe a[RulesViolated[Span.S]]
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user