From fcdb5b0fefeffc004a50bd66a6a768b36ed9d71d Mon Sep 17 00:00:00 2001 From: InversionSpaces Date: Wed, 25 Oct 2023 11:12:09 +0200 Subject: [PATCH] feat(compiler): Allow omitting field name in struct creation [LNG-261] (#943) --- .../scala/aqua/parser/lexer/NamedArg.scala | 59 ++++++++ .../scala/aqua/parser/lexer/PropertyOp.scala | 25 ++-- .../main/scala/aqua/parser/lexer/Token.scala | 13 +- .../scala/aqua/parser/lexer/ValueToken.scala | 5 +- .../aqua/parser/AbilityValueExprSpec.scala | 13 +- .../aqua/parser/StructValueExprSpec.scala | 133 ++++++++++++------ .../aqua/parser/lexer/PropertyOpSpec.scala | 70 +++++---- .../aqua/semantics/rules/ValuesAlgebra.scala | 39 ++++- .../semantics/rules/types/TypesAlgebra.scala | 5 +- .../rules/types/TypesInterpreter.scala | 33 +++-- .../scala/aqua/semantics/SemanticsSpec.scala | 124 +++++++++++++++- 11 files changed, 400 insertions(+), 119 deletions(-) create mode 100644 parser/src/main/scala/aqua/parser/lexer/NamedArg.scala diff --git a/parser/src/main/scala/aqua/parser/lexer/NamedArg.scala b/parser/src/main/scala/aqua/parser/lexer/NamedArg.scala new file mode 100644 index 00000000..c0d50dc1 --- /dev/null +++ b/parser/src/main/scala/aqua/parser/lexer/NamedArg.scala @@ -0,0 +1,59 @@ +package aqua.parser.lexer + +import aqua.parser.lift.Span.S +import aqua.parser.lexer.Token.* + +import cats.data.NonEmptyList +import cats.parse.Parser as P +import cats.syntax.functor.* +import cats.Comonad +import cats.arrow.FunctionK + +enum NamedArg[F[_]] extends Token[F] { + // for `name = value` + case Full(name: Name[F], value: ValueToken[F]) + // for just `name` (short for `name = name`) + case Short(variable: VarToken[F]) + + lazy val argName: Name[F] = this match { + case Full(name, _) => name + case Short(variable) => variable.name + } + + lazy val argValue: ValueToken[F] = this match { + case Full(_, value) => value + case Short(variable) => variable + } + + override def as[T](v: T): F[T] = this match { + case Full(name, value) => name.as(v) + case Short(variable) => variable.as(v) + } + + override def mapK[K[_]: Comonad](fk: FunctionK[F, K]): NamedArg[K] = + this match { + case Full(name, value) => Full(name.mapK(fk), value.mapK(fk)) + case Short(variable) => Short(variable.mapK(fk)) + } +} + +object NamedArg { + + private val namedArgFull: P[NamedArg.Full[S]] = + P.defer( + (Name.p.between(` *`, `/s*`) <* + `=`.between(` *`, `/s*`)) ~ + ValueToken.`value`.between(` *`, `/s*`) + ).map(NamedArg.Full.apply) + + private val namedArgShort: P[NamedArg.Short[S]] = + P.defer( + VarToken.variable.between(` *`, `/s*`) + ).map(NamedArg.Short.apply) + + val namedArg: P[NamedArg[S]] = + namedArgFull.backtrack | namedArgShort + + val namedArgs: P[NonEmptyList[NamedArg[S]]] = + P.defer(` `.?.with1 ~ `(` ~ `/s*` *> comma(namedArg) <* `/s*` *> `)`) +} diff --git a/parser/src/main/scala/aqua/parser/lexer/PropertyOp.scala b/parser/src/main/scala/aqua/parser/lexer/PropertyOp.scala index 6fa49a78..1e63f266 100644 --- a/parser/src/main/scala/aqua/parser/lexer/PropertyOp.scala +++ b/parser/src/main/scala/aqua/parser/lexer/PropertyOp.scala @@ -3,18 +3,19 @@ package aqua.parser.lexer import aqua.parser.lexer.Token.* import aqua.parser.lift.LiftParser import aqua.parser.lift.LiftParser.* +import aqua.parser.lift.Span +import aqua.parser.lift.Span.{P0ToSpan, PToSpan} +import aqua.types.LiteralType +import aqua.parser.lexer.CallArrowToken.CallBraces +import aqua.parser.lexer.NamedArg.namedArgs + +import cats.~> import cats.data.{NonEmptyList, NonEmptyMap} import cats.parse.{Numbers, Parser as P, Parser0 as P0} import cats.syntax.comonad.* import cats.syntax.functor.* import cats.{Comonad, Functor} - import scala.language.postfixOps -import cats.~> -import aqua.parser.lift.Span -import aqua.parser.lift.Span.{P0ToSpan, PToSpan} -import aqua.types.LiteralType -import aqua.parser.lexer.CallArrowToken.CallBraces sealed trait PropertyOp[F[_]] extends Token[F] { def mapK[K[_]: Comonad](fk: F ~> K): PropertyOp[K] @@ -47,12 +48,14 @@ case class IntoIndex[F[_]: Comonad](point: F[Unit], idx: Option[ValueToken[F]]) override def mapK[K[_]: Comonad](fk: F ~> K): IntoIndex[K] = copy(fk(point), idx.map(_.mapK(fk))) } -case class IntoCopy[F[_]: Comonad](point: F[Unit], fields: NonEmptyMap[String, ValueToken[F]]) - extends PropertyOp[F] { +case class IntoCopy[F[_]: Comonad]( + point: F[Unit], + args: NonEmptyList[NamedArg[F]] +) extends PropertyOp[F] { override def as[T](v: T): F[T] = point.as(v) override def mapK[K[_]: Comonad](fk: F ~> K): IntoCopy[K] = - copy(fk(point), fields.map(_.mapK(fk))) + copy(fk(point), args.map(_.mapK(fk))) } object PropertyOp { @@ -66,9 +69,7 @@ object PropertyOp { } val parseCopy: P[PropertyOp[Span.S]] = - (`.` *> (`copy`.lift ~ namedArgs)).map { case (point, fields) => - IntoCopy(point, NonEmptyMap.of(fields.head, fields.tail: _*)) - } + (`.` *> (`copy`.lift ~ namedArgs)).map(IntoCopy.apply) private val parseIdx: P[PropertyOp[Span.S]] = (P.defer( diff --git a/parser/src/main/scala/aqua/parser/lexer/Token.scala b/parser/src/main/scala/aqua/parser/lexer/Token.scala index 9eac6e85..8c44347a 100644 --- a/parser/src/main/scala/aqua/parser/lexer/Token.scala +++ b/parser/src/main/scala/aqua/parser/lexer/Token.scala @@ -1,6 +1,7 @@ package aqua.parser.lexer import aqua.parser.lift.Span.S + import cats.data.NonEmptyList import cats.parse.{Accumulator0, Parser as P, Parser0 as P0} import cats.{~>, Comonad, Functor} @@ -120,18 +121,6 @@ object Token { val `<-` : P[Unit] = P.string("<-") val `/s*` : P0[Unit] = ` \n+`.backtrack | ` *`.void - val namedArg: P[(String, ValueToken[S])] = - P.defer( - `name`.between(` *`, `/s*`) ~ - `=`.between(` *`, `/s*`).void ~ - ValueToken.`value`.between(` *`, `/s*`) - ).map { case ((name, _), vt) => - (name, vt) - } - - val namedArgs: P[NonEmptyList[(String, ValueToken[S])]] = - P.defer(` `.?.with1 ~ `(` ~ `/s*` *> comma(namedArg) <* `/s*` *> `)`) - case class LiftToken[F[_]: Functor, A](point: F[A]) extends Token[F] { override def as[T](v: T): F[T] = Functor[F].as(point, v) diff --git a/parser/src/main/scala/aqua/parser/lexer/ValueToken.scala b/parser/src/main/scala/aqua/parser/lexer/ValueToken.scala index afc42750..a4a818ad 100644 --- a/parser/src/main/scala/aqua/parser/lexer/ValueToken.scala +++ b/parser/src/main/scala/aqua/parser/lexer/ValueToken.scala @@ -3,6 +3,7 @@ package aqua.parser.lexer import aqua.parser.Expr import aqua.parser.head.FilenameExpr import aqua.parser.lexer.Token.* +import aqua.parser.lexer.NamedArg.namedArgs import aqua.parser.lift.LiftParser import aqua.parser.lift.LiftParser.* import aqua.types.LiteralType @@ -260,7 +261,7 @@ object CallArrowToken { case class NamedValueToken[F[_]: Comonad]( typeName: NamedTypeToken[F], - fields: NonEmptyMap[String, ValueToken[F]] + fields: NonEmptyList[NamedArg[F]] ) extends ValueToken[F] { override def mapK[K[_]: Comonad](fk: F ~> K): NamedValueToken[K] = @@ -277,7 +278,7 @@ object NamedValueToken { "Missing braces '()' after the struct type" ) .map { case (dn, args) => - NamedValueToken(NamedTypeToken(dn), args.toNem) + NamedValueToken(NamedTypeToken(dn), args) } } diff --git a/parser/src/test/scala/aqua/parser/AbilityValueExprSpec.scala b/parser/src/test/scala/aqua/parser/AbilityValueExprSpec.scala index 26c4d6a3..b0dd4dcc 100644 --- a/parser/src/test/scala/aqua/parser/AbilityValueExprSpec.scala +++ b/parser/src/test/scala/aqua/parser/AbilityValueExprSpec.scala @@ -19,11 +19,14 @@ class AbilityValueExprSpec extends AnyFlatSpec with Matchers with AquaSpec { parseData(str) should be( NamedValueToken( NamedTypeToken[Id]("AbilityA"), - NonEmptyMap.of( - "v1" -> toNumber(1), - "f1" -> PropertyToken[Id]( - VarToken(toName("input")), - NonEmptyList.one(IntoField("arrow")) + NonEmptyList.of( + NamedArg.Full(toName("v1"), toNumber(1)), + NamedArg.Full( + toName("f1"), + PropertyToken( + VarToken(toName("input")), + NonEmptyList.one(IntoField("arrow")) + ) ) ) ) diff --git a/parser/src/test/scala/aqua/parser/StructValueExprSpec.scala b/parser/src/test/scala/aqua/parser/StructValueExprSpec.scala index b487a28a..b237a5fb 100644 --- a/parser/src/test/scala/aqua/parser/StructValueExprSpec.scala +++ b/parser/src/test/scala/aqua/parser/StructValueExprSpec.scala @@ -4,21 +4,10 @@ import aqua.AquaSpec import aqua.AquaSpec.{toNumber, toStr, toVar} import aqua.parser.expr.ConstantExpr import aqua.parser.expr.func.AssignmentExpr -import aqua.parser.lexer.{ - CallArrowToken, - CollectionToken, - IntoArrow, - LiteralToken, - Name, - NamedTypeToken, - NamedValueToken, - PropertyToken, - Token, - ValueToken, - VarToken -} +import aqua.parser.lexer.* import aqua.parser.lexer.CollectionToken.Mode.ArrayMode import aqua.types.LiteralType + import cats.Id import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -32,52 +21,67 @@ class StructValueExprSpec extends AnyFlatSpec with Matchers with AquaSpec { val one = toNumber(1) val two = toNumber(2) val three = toNumber(3) - val a = LiteralToken[Id]("\"a\"", LiteralType.string) - val b = LiteralToken[Id]("\"b\"", LiteralType.string) - val c = LiteralToken[Id]("\"c\"", LiteralType.string) + val a = toStr("a") + val b = toStr("b") + val c = toStr("c") parseData( str ) should be( NamedValueToken( NamedTypeToken[Id]("Obj"), - NonEmptyMap.of( - "f1" -> one, - "f2" -> a, - "f3" -> CollectionToken[Id](ArrayMode, List(one, two, three)), - "f4" -> CollectionToken[Id](ArrayMode, List(b, c)), - "f5" -> NamedValueToken( - NamedTypeToken[Id]("NestedObj"), - NonEmptyMap.of( - "i1" -> two, - "i2" -> b, - "i3" -> CallArrowToken(Name[Id]("funcCall"), List(three)), - "i4" -> VarToken[Id](Name[Id]("value")) + NonEmptyList.of( + NamedArg.Full(toName("f1"), one), + NamedArg.Full(toName("f2"), a), + NamedArg.Full(toName("f3"), CollectionToken[Id](ArrayMode, List(one, two, three))), + NamedArg.Full(toName("f4"), CollectionToken[Id](ArrayMode, List(b, c))), + NamedArg.Full( + toName("f5"), + NamedValueToken( + NamedTypeToken[Id]("NestedObj"), + NonEmptyList.of( + NamedArg.Full(toName("i1"), two), + NamedArg.Full(toName("i2"), b), + NamedArg.Full(toName("i3"), CallArrowToken(toName("funcCall"), List(three))), + NamedArg.Full(toName("i4"), VarToken(toName("value"))) + ) ) ), - "f6" -> CallArrowToken(Name[Id]("funcCall"), List(one)), - "f7" -> PropertyToken[Id]( - VarToken[Id](Name[Id]("Serv")), - NonEmptyList.one(IntoArrow[Id](Name[Id]("call"), List(two))) + NamedArg.Full(toName("f6"), CallArrowToken(Name[Id]("funcCall"), List(one))), + NamedArg.Full( + toName("f7"), + PropertyToken[Id]( + VarToken[Id](Name[Id]("Serv")), + NonEmptyList.one(IntoArrow[Id](Name[Id]("call"), List(two))) + ) ) ) ) ) } - "one named arg" should "be parsed" in { - val result = aqua.parser.lexer.Token.namedArg + "named args" should "parse one full named arg" in { + val result = NamedArg.namedArg .parseAll(""" a | = | 3""".stripMargin) - .map(v => (v._1, v._2.mapK(spanToId))) + .map(_.mapK(spanToId)) .value - result should be(("a", toNumber(3))) + result should be(NamedArg.Full(toName("a"), toNumber(3))) } - "named args" should "be parsed" in { - val result = Token.namedArgs + it should "parse one short named arg" in { + val result = NamedArg.namedArg + .parseAll(" b ") + .map(_.mapK(spanToId)) + .value + + result should be(NamedArg.Short(toVar("b"))) + } + + it should "parse a few full named args" in { + val result = NamedArg.namedArgs .parseAll("""( |a = "str", |b = 3, @@ -86,16 +90,61 @@ class StructValueExprSpec extends AnyFlatSpec with Matchers with AquaSpec { | 5 |)""".stripMargin) .value - .map { case (str, vt) => (str, vt.mapK(spanToId)) } + .map(_.mapK(spanToId)) result should be( - NonEmptyList[(String, ValueToken[Id])]( - ("a", toStr("str")), - ("b", toNumber(3)) :: ("c", toNumber(5)) :: Nil + NonEmptyList.of( + NamedArg.Full(toName("a"), toStr("str")), + NamedArg.Full(toName("b"), toNumber(3)), + NamedArg.Full(toName("c"), toNumber(5)) ) ) } + it should "parse a few short named args" in { + val result = NamedArg.namedArgs + .parseAll("""( + |a, + | b , + |c + | + |)""".stripMargin) + .value + .map(_.mapK(spanToId)) + + result should be( + NonEmptyList.of( + NamedArg.Short(toVar("a")), + NamedArg.Short(toVar("b")), + NamedArg.Short(toVar("c")) + ) + ) + } + + it should "parse mixed named args" in { + val args = List( + "meaning = 42" -> NamedArg.Full(toName("meaning"), toNumber(42)), + "variable" -> NamedArg.Short(toVar("variable")), + "col = [1,2,3]" -> NamedArg.Full( + toName("col"), + CollectionToken[Id](ArrayMode, List(toNumber(1), toNumber(2), toNumber(3))) + ), + "arrow" -> NamedArg.Short(toVar("arrow")) + ) + + args.permutations.foreach(perm => + val str = perm.map(_._1).mkString("(", ", ", ")") + val expected = NonEmptyList.fromListUnsafe(perm.map(_._2)) + + val result = NamedArg.namedArgs + .parseAll(str) + .value + .map(_.mapK(spanToId)) + + result should be(expected) + ) + } + "one line struct value" should "be parsed" in { parseAndCheckStruct( """Obj(f1 = 1, f2 = "a", f3 = [1,2,3], f4=["b", "c"], f5 =NestedObj(i1 = 2, i2 = "b", i3= funcCall(3), i4 = value), f6=funcCall(1), f7 = Serv.call(2))""" diff --git a/parser/src/test/scala/aqua/parser/lexer/PropertyOpSpec.scala b/parser/src/test/scala/aqua/parser/lexer/PropertyOpSpec.scala index 14dbb5e0..21f86cef 100644 --- a/parser/src/test/scala/aqua/parser/lexer/PropertyOpSpec.scala +++ b/parser/src/test/scala/aqua/parser/lexer/PropertyOpSpec.scala @@ -42,39 +42,59 @@ class PropertyOpSpec extends AnyFlatSpec with Matchers with EitherValues { PropertyOp.ops.parseAll("!-1").isLeft shouldBe true } - "copy ops" should "parse" in { - val opsP = (s: String) => PropertyOp.ops.parseAll(s).value.map(_.mapK(spanToId)) + def copyOpsP(s: String) = PropertyOp.ops.parseAll(s).value.map(_.mapK(spanToId)) - opsP(".copy(a = \"str\", b = 12)") should be( + "copy ops" should "parse one copy" in { + copyOpsP(".copy(a = \"str\", b = 12)") should be( NonEmptyList.of( IntoCopy[Id]( (), - NonEmptyMap.of( - "a" -> LiteralToken("\"str\"", LiteralType.string), - "b" -> toNumber(12) - ) - ) - ) - ) - - opsP(".copy(a = \"str\", b = 12).copy(c = 54, d = someVar)") should be( - NonEmptyList.of( - IntoCopy[Id]( - (), - NonEmptyMap.of( - "a" -> LiteralToken("\"str\"", LiteralType.string), - "b" -> toNumber(12) - ) - ), - IntoCopy[Id]( - (), - NonEmptyMap.of( - "c" -> toNumber(54), - "d" -> VarToken("someVar") + NonEmptyList.of( + NamedArg.Full(toName("a"), toStr("str")), + NamedArg.Full(toName("b"), toNumber(12)) ) ) ) ) } + it should "parse sequential copy" in { + copyOpsP(".copy(a = \"str\", b = 12).copy(c = 54, d = someVar)") should be( + NonEmptyList.of( + IntoCopy[Id]( + (), + NonEmptyList.of( + NamedArg.Full(toName("a"), toStr("str")), + NamedArg.Full(toName("b"), toNumber(12)) + ) + ), + IntoCopy[Id]( + (), + NonEmptyList.of( + NamedArg.Full(toName("c"), toNumber(54)), + NamedArg.Full(toName("d"), toVar("someVar")) + ) + ) + ) + ) + } + + it should "parse mixed args in copy" in { + val args = List( + "a = \"str\"" -> NamedArg.Full(toName("a"), toStr("str")), + "b = 12" -> NamedArg.Full(toName("b"), toNumber(12)), + "c" -> NamedArg.Short(toVar("c")), + "d" -> NamedArg.Short(toVar("d")) + ) + + args.toSet.subsets().filter(_.nonEmpty).flatMap(_.toList.permutations).foreach { args => + val str = args.map(_._1).mkString(".copy(", ", ", ")") + val expected = NonEmptyList.of( + IntoCopy[Id]((), NonEmptyList.fromListUnsafe(args.map(_._2))) + ) + + copyOpsP(str) should be(expected) + } + } + } diff --git a/semantics/src/main/scala/aqua/semantics/rules/ValuesAlgebra.scala b/semantics/src/main/scala/aqua/semantics/rules/ValuesAlgebra.scala index 21ba297c..02806643 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/ValuesAlgebra.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/ValuesAlgebra.scala @@ -34,6 +34,23 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](using report: ReportAlgebra[S, Alg] ) extends Logging { + private def reportNamedArgsDuplicates( + args: NonEmptyList[NamedArg[S]] + ): Alg[Unit] = args + .groupBy(_.argName.value) + .filter { case (_, group) => + group.size > 1 + } + .toList + .traverse_ { case (name, group) => + group.traverse_ { arg => + report.error( + arg.argName, + s"Duplicate argument `$name`" + ) + } + } + private def resolveSingleProperty(rootType: Type, op: PropertyOp[S]): Alg[Option[PropertyRaw]] = op match { case op: IntoField[S] => @@ -46,12 +63,13 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](using ) } yield arrowProp case op: IntoCopy[S] => - for { - maybeFields <- op.fields.traverse(valueToRaw) - copyProp <- maybeFields.sequence.flatTraverse( - T.resolveCopy(rootType, op, _) + (for { + _ <- OptionT.liftF( + reportNamedArgsDuplicates(op.args) ) - } yield copyProp + fields <- op.args.traverse(arg => OptionT(valueToRaw(arg.argValue)).map(arg -> _)) + prop <- OptionT(T.resolveCopy(op, rootType, fields)) + } yield prop).value case op: IntoIndex[S] => for { maybeIdx <- op.idx.fold(LiteralRaw.Zero.some.pure)(valueToRaw) @@ -102,7 +120,13 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](using case dvt @ NamedValueToken(typeName, fields) => (for { resolvedType <- OptionT(T.resolveType(typeName)) - fieldsGiven <- fields.traverse(value => OptionT(valueToRaw(value))) + // Report duplicate fields + _ <- OptionT.liftF( + reportNamedArgsDuplicates(fields) + ) + fieldsGiven <- fields + .traverse(arg => OptionT(valueToRaw(arg.argValue)).map(arg.argName.value -> _)) + .map(_.toNem) // Take only last value for a field fieldsGivenTypes = fieldsGiven.map(_.`type`) generated <- OptionT.fromOption( resolvedType match { @@ -297,7 +321,8 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](using valueToRaw(v).flatMap( _.flatTraverse { case ca: CallArrowRaw => (ca, ca.baseType).some.pure[Alg] - case apr@ApplyPropertyRaw(_, IntoArrowRaw(_, arrowType, _)) => (apr, arrowType).some.pure[Alg] + case apr @ ApplyPropertyRaw(_, IntoArrowRaw(_, arrowType, _)) => + (apr, arrowType).some.pure[Alg] // TODO: better error message (`raw` formatting) case raw => report.error(v, s"Expected arrow call, got $raw").as(none) } diff --git a/semantics/src/main/scala/aqua/semantics/rules/types/TypesAlgebra.scala b/semantics/src/main/scala/aqua/semantics/rules/types/TypesAlgebra.scala index 3fc8476b..40409769 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/types/TypesAlgebra.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/types/TypesAlgebra.scala @@ -37,10 +37,11 @@ trait TypesAlgebra[S[_], Alg[_]] { def resolveIndex(rootT: Type, op: IntoIndex[S], idx: ValueRaw): Alg[Option[PropertyRaw]] def resolveCopy( + token: IntoCopy[S], rootT: Type, - op: IntoCopy[S], - fields: NonEmptyMap[String, ValueRaw] + fields: NonEmptyList[(NamedArg[S], ValueRaw)] ): Alg[Option[PropertyRaw]] + def resolveField(rootT: Type, op: IntoField[S]): Alg[Option[PropertyRaw]] def resolveArrow( diff --git a/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala index af088c17..474b5d70 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala @@ -216,7 +216,7 @@ class TypesInterpreter[S[_], X](using ) .as(None) ) { - case at@ArrowType(_, _) => + case at @ ArrowType(_, _) => locations .pointFieldLocation(name, op.name.value, op) .as(Some(IntoArrowRaw(op.name.value, at, arguments))) @@ -245,22 +245,33 @@ class TypesInterpreter[S[_], X](using // TODO actually it's stateless, exists there just for reporting needs override def resolveCopy( + token: IntoCopy[S], rootT: Type, - op: IntoCopy[S], - fields: NonEmptyMap[String, ValueRaw] + args: NonEmptyList[(NamedArg[S], ValueRaw)] ): State[X, Option[PropertyRaw]] = rootT match { case st: StructType => - fields.toSortedMap.toList.traverse { case (fieldName, value) => + args.forallM { case (arg, value) => + val fieldName = arg.argName.value st.fields.lookup(fieldName) match { case Some(t) => - ensureTypeMatches(op.fields.lookup(fieldName).getOrElse(op), t, value.`type`) - case None => report.error(op, s"No field with name '$fieldName' in $rootT").as(false) + ensureTypeMatches(arg.argValue, t, value.`type`) + case None => + report.error(arg.argName, s"No field with name '$fieldName' in $rootT").as(false) } - }.map(res => if (res.forall(identity)) Some(IntoCopyRaw(st, fields)) else None) + }.map( + Option.when(_)( + IntoCopyRaw( + st, + args.map { case (arg, value) => + arg.argName.value -> value + }.toNem + ) + ) + ) case _ => - report.error(op, s"Expected $rootT to be a data type").as(None) + report.error(token, s"Expected $rootT to be a data type").as(None) } // TODO actually it's stateless, exists there just for reporting needs @@ -339,12 +350,12 @@ class TypesInterpreter[S[_], X](using ) .as(false) } else { - valueFields.toSortedMap.toList.traverse { (name, `type`) => + valueFields.toSortedMap.toList.forallM { (name, `type`) => typeFields.lookup(name) match { case Some(t) => val nextToken = token match { case NamedValueToken(_, fields) => - fields.lookup(name).getOrElse(token) + fields.find(_.argName.value == name).getOrElse(token) // TODO: Is it needed? case PropertyToken(_, properties) => properties.last @@ -359,7 +370,7 @@ class TypesInterpreter[S[_], X](using ) .as(false) } - }.map(_.forall(identity)) + } } case _ => val notes = diff --git a/semantics/src/test/scala/aqua/semantics/SemanticsSpec.scala b/semantics/src/test/scala/aqua/semantics/SemanticsSpec.scala index ff327b1c..04147a4b 100644 --- a/semantics/src/test/scala/aqua/semantics/SemanticsSpec.scala +++ b/semantics/src/test/scala/aqua/semantics/SemanticsSpec.scala @@ -5,7 +5,7 @@ import aqua.parser.Ast import aqua.raw.ops.{Call, CallArrowRawTag, FuncOp, OnTag, ParTag, RawTag, SeqGroupTag, SeqTag} import aqua.parser.Parser import aqua.parser.lift.{LiftParser, Span} -import aqua.raw.value.{ApplyBinaryOpRaw, LiteralRaw, ValueRaw, VarRaw} +import aqua.raw.value.* import aqua.types.* import aqua.raw.ops.* @@ -674,4 +674,126 @@ class SemanticsSpec extends AnyFlatSpec with Matchers with Inside { warnings.exists(_.hints.exists(_.contains("used"))) should be(true) } } + + { + val fieldCases = List( + "field = 42" -> "field = field", + "field = 42" -> "field", + "integer = 42" -> "field = integer", + "" -> "field = 42" + ) + + val strCases = List( + "str = \"str\"" -> "str = str", + "str = \"str\"" -> "str", + "string = \"str\"" -> "str = string", + "" -> "str = \"str\"" + ) + + it should "handle struct creation" in { + for { + fieldCase <- fieldCases + (fieldDef, fieldArg) = fieldCase + strCase <- strCases + (strDef, strArg) = strCase + } { + val defs = List(fieldDef, strDef).filter(_.nonEmpty).mkString("\n ") + val args = List(fieldArg, strArg).filter(_.nonEmpty).mkString(", ") + val script = s"""|data Struct: + | field: i8 + | str: string + | + |func main() -> Struct: + | $defs + | <- Struct($args) + |""".stripMargin + + insideBody(script) { body => + matchSubtree(body) { case (ReturnTag(vals), _) => + inside(vals.head) { case MakeStructRaw(fields, _) => + fields.contains("field") should be(true) + fields.contains("str") should be(true) + } + } + } + } + } + + it should "handle ability creation" in { + def arrow(name: String) = + s"""|$name = (x: i8) -> bool: + | <- x > 0 + |""".stripMargin + val arrowCases = List( + arrow("arrow") -> "arrow = arrow", + arrow("arrow") -> "arrow", + arrow("closure") -> "arrow = closure" + ) + + for { + arrowCase <- arrowCases + (arrowDef, arrowArg) = arrowCase + fieldCase <- fieldCases + (fieldDef, fieldArg) = fieldCase + strCase <- strCases + (strDef, strArg) = strCase + } { + val defs = List(arrowDef, fieldDef, strDef).filter(_.nonEmpty).mkString("\n ") + val args = List(arrowArg, fieldArg, strArg).filter(_.nonEmpty).mkString(", ") + val script = s"""|ability Ab: + | field: i8 + | str: string + | arrow(x: i8) -> bool + | + |func main() -> Ab: + | $defs + | <- Ab($args) + |""".stripMargin + + insideBody(script) { body => + matchSubtree(body) { case (ReturnTag(vals), _) => + inside(vals.head) { case AbilityRaw(fields, _) => + fields.contains("arrow") should be(true) + fields.contains("field") should be(true) + fields.contains("str") should be(true) + } + } + } + } + } + } + + it should "forbid duplicate fields in data or ability creation" in { + List("data", "ability").foreach { form => + + val script = s"""|$form StructOrAb: + | field: i8 + | + |func main() -> StructOrAb: + | field = 24 + | <- StructOrAb(field = 42, field) + |""".stripMargin + + insideSemErrors(script) { errors => + atLeast(1, errors.toChain.toList) shouldBe a[RulesViolated[Span.S]] + } + } + } + + it should "forbid duplicate fields in data copy" in { + + val script = """|data Struct: + | field: i8 + | + |func main() -> Struct: + | st = Struct(field = 24) + | field = 37 + | <- st.copy(field = 42, field) + |""".stripMargin + + insideSemErrors(script) { errors => + atLeast(1, errors.toChain.toList) shouldBe a[RulesViolated[Span.S]] + } + + } }