diff --git a/aqua-run/src/main/scala/aqua/run/CliFunc.scala b/aqua-run/src/main/scala/aqua/run/CliFunc.scala index f66e80de..ae8f554f 100644 --- a/aqua-run/src/main/scala/aqua/run/CliFunc.scala +++ b/aqua-run/src/main/scala/aqua/run/CliFunc.scala @@ -4,68 +4,61 @@ import aqua.parser.lexer.{CallArrowToken, CollectionToken, LiteralToken, VarToke import aqua.parser.lift.Span import aqua.raw.value.{CollectionRaw, LiteralRaw, ValueRaw, VarRaw} import aqua.types.{ArrayType, BottomType} + import cats.data.{NonEmptyList, Validated, ValidatedNel} import cats.data.Validated.{invalid, invalidNel, validNel} -import cats.{Id, ~>} +import cats.{~>, Id} import cats.syntax.traverse.* +import cats.syntax.validated.* +import cats.syntax.either.* +import cats.syntax.comonad.* +import cats.syntax.option.* -case class CliFunc(name: String, args: List[ValueRaw] = Nil, ability: Option[String] = None) +case class CliFunc(name: String, args: List[ValueRaw] = Nil) object CliFunc { def spanToId: Span.S ~> Id = new (Span.S ~> Id) { - override def apply[A](span: Span.S[A]): Id[A] = { - span._2 - } + override def apply[A](span: Span.S[A]): Id[A] = span.extract } def fromString(func: String): ValidatedNel[String, CliFunc] = { - CallArrowToken.callArrow.parseAll(func.trim) match { - case Right(exprSpan) => - val expr = exprSpan.mapK(spanToId) - - val argsV = expr.args.collect { + CallArrowToken.callArrow + .parseAll(func.trim) + .toValidated + .leftMap( + _.expected.map(_.context.mkString("\n")) + ) + .map(_.mapK(spanToId)) + .andThen(expr => + expr.args.traverse { case LiteralToken(value, ts) => - validNel(LiteralRaw(value, ts)) - case VarToken(name, _) => - validNel(VarRaw(name.value, BottomType)) + LiteralRaw(value, ts).valid + case VarToken(name) => + VarRaw(name.value, BottomType).valid case CollectionToken(_, values) => - val hasVariables = values.exists { - case LiteralToken(_, _) => false - case _ => true - } - if (!hasVariables) { - val literals = values.collect { case LiteralToken(value, ts) => - LiteralRaw(value, ts) - } - val hasSameTypesOrEmpty = - literals.isEmpty || literals.map(_.baseType).toSet.size == 1 - - if (hasSameTypesOrEmpty) { - validNel( - NonEmptyList - .fromList(literals) - .map(l => CollectionRaw(l, ArrayType(l.head.baseType))) - .getOrElse(ValueRaw.Nil) - ) - } else - invalidNel( - "If the argument is an array, then it must contain elements of the same type." - ) - - } else - invalidNel( - "Array arguments can only have numbers, strings, or booleans." + values.traverse { + case LiteralToken(value, ts) => + LiteralRaw(value, ts).some + case _ => none + }.toValid( + "Array elements can only be numbers, strings, or booleans." + ).ensure( + "If the argument is an array, then it must contain elements of the same type." + )(_.distinctBy(_.`type`).size <= 1) + .map( + NonEmptyList + .fromList(_) + .map(l => CollectionRaw(l, ArrayType(l.head.baseType))) + .getOrElse(ValueRaw.Nil) ) + .toValidatedNel case CallArrowToken(_, _, _) => - invalidNel("Function calls as arguments are not supported.") - }.sequence - argsV.andThen(args => - validNel(CliFunc(expr.funcName.value, args, expr.ability.map(_.name))) - ) - - case Left(err) => invalid(err.expected.map(_.context.mkString("\n"))) - } + "Function calls as arguments are not supported.".invalidNel + case _ => + "Unsupported argument.".invalidNel + }.map(args => CliFunc(expr.funcName.value, args)) + ) } } diff --git a/aqua-run/src/main/scala/aqua/run/FuncCompiler.scala b/aqua-run/src/main/scala/aqua/run/FuncCompiler.scala index 9a92e421..5bd3747e 100644 --- a/aqua-run/src/main/scala/aqua/run/FuncCompiler.scala +++ b/aqua-run/src/main/scala/aqua/run/FuncCompiler.scala @@ -9,6 +9,7 @@ import aqua.model.transform.TransformConfig import aqua.model.{AquaContext, FuncArrow} import aqua.parser.lift.FileSpan import aqua.run.CliFunc + import cats.data.Validated.{invalidNec, validNec} import cats.data.{Chain, NonEmptyList, Validated, ValidatedNec} import cats.effect.IO @@ -19,6 +20,7 @@ import cats.syntax.functor.* import cats.syntax.monad.* import cats.syntax.show.* import cats.syntax.traverse.* +import cats.syntax.option.* import fs2.io.file.{Files, Path} import scribe.Logging @@ -84,16 +86,9 @@ object FuncCompiler { def findFunction( contexts: Chain[AquaContext], func: CliFunc - ): ValidatedNec[String, FuncArrow] = - func.ability - .fold( - contexts - .collectFirstSome(_.allFuncs.get(func.name)) - )(ab => contexts.collectFirstSome(_.abilities.get(ab).flatMap(_.allFuncs.get(func.name)))) - .map(validNec) - .getOrElse( - Validated.invalidNec[String, FuncArrow]( - s"There is no function '${func.ability.map(_ + ".").getOrElse("")}${func.name}' or it is not exported. Check the spelling or see https://fluence.dev/docs/aqua-book/language/header/#export" - ) - ) + ): ValidatedNec[String, FuncArrow] = contexts + .collectFirstSome(_.allFuncs.get(func.name)) + .toValidNec( + s"There is no function '${func.name}' or it is not exported. Check the spelling or see https://fluence.dev/docs/aqua-book/language/header/#export" + ) } diff --git a/aqua-src/antithesis.aqua b/aqua-src/antithesis.aqua index deb7165c..cbeefc63 100644 --- a/aqua-src/antithesis.aqua +++ b/aqua-src/antithesis.aqua @@ -1,87 +1,7 @@ -aqua Main +service Srv("srv"): + call(x: i32) -> i32 -use DECLARE_CONST, decl_bar from "declare.aqua" as Declare - -export SomeService, handleAb, bug214, checkAbCalls - -service SomeService("wed"): - getStr(s: string) -> string - -ability SomeAb: - someArrow(s: string) -> string, string - str: string - -ability SecondAb: - arrow(s: string) -> string - num: u32 - -func funcStr(s: string) -> string, string: - strInFunc <- SomeService.getStr(Declare.DECLARE_CONST) - strInFunc2 <- SomeService.getStr(s) - <- strInFunc, strInFunc2 - -func handleSecAb {SomeAb, SecondAb}() -> string, string, string, u32: - SomeAb.someArrow("eferfrfrf") - b, c <- SomeAb.someArrow("efre") - d <- SecondAb.arrow(SomeAb.str) - <- b, c, d, SecondAb.num - -func returnAb(s: string) -> SomeAb: - SomeAb = SomeAb(someArrow = funcStr, str = s) - <- SomeAb - -func handleAb(fff: string) -> string, string, string, u32: - SomeAb = returnAb(fff) - SecondAb = SecondAb(arrow = funcStr, num = 12) - res1, res2, res3, res4 <- handleSecAb{SomeAb, SecondAb}() - <- res1, res2, res3, res4 - -data Struct: - int: i8 - -ability Simple: - st: Struct - arrow(x: i8) -> bool - -ability Complex: - simple: Simple - field: string - -func foo{Complex, Simple}() -> bool, bool: - closure = () -> bool: - <- Simple.st.int >= 0 - res <- closure() - <- Complex.simple.arrow( - Complex.simple.st.int - ), res - -func bug214() -> bool, bool: - closure = (x: i8) -> bool: - <- x > 0 - - MyComplex = Complex( - simple = Simple( - st = Struct(int = 0), - arrow = closure - ), - field = "complex" - ) - - res1, res2 <- foo{MyComplex, MyComplex.simple}() - <- res1, res2 - -ability SSS: - arrow(x: i8) -> bool - -ability CCCC: - arrow(x: i8) -> bool - simple: SSS - -func checkAbCalls() -> bool, bool: - closure = (x: i8) -> bool: - <- x > 20 - - MySSS = SSS(arrow = closure) - MyCCCC = CCCC(simple = MySSS, arrow = MySSS.arrow) - - <- MySSS.arrow(42), MyCCCC.arrow(12) +func main() -> i32: + arr = [1, 2, 3] + a <- Srv.call(0) + <- arr[Srv.call(1)] \ No newline at end of file diff --git a/model/raw/src/main/scala/aqua/raw/value/PropertyRaw.scala b/model/raw/src/main/scala/aqua/raw/value/PropertyRaw.scala index b989a3b9..e5357ca5 100644 --- a/model/raw/src/main/scala/aqua/raw/value/PropertyRaw.scala +++ b/model/raw/src/main/scala/aqua/raw/value/PropertyRaw.scala @@ -19,18 +19,21 @@ case class IntoFieldRaw(name: String, `type`: Type) extends PropertyRaw { override def varNames: Set[String] = Set.empty } -case class IntoArrowRaw(name: String, arrowType: Type, arguments: List[ValueRaw]) extends PropertyRaw { +case class IntoArrowRaw(name: String, arrowType: Type, arguments: List[ValueRaw]) + extends PropertyRaw { override def `type`: Type = arrowType - + override def map(f: ValueRaw => ValueRaw): PropertyRaw = this override def varNames: Set[String] = arguments.flatMap(_.varNames).toSet - override def renameVars(vals: Map[String, String]): PropertyRaw = copy(arguments = arguments.map(_.renameVars(vals))) + override def renameVars(vals: Map[String, String]): PropertyRaw = + copy(arguments = arguments.map(_.renameVars(vals))) } -case class IntoCopyRaw(`type`: StructType, fields: NonEmptyMap[String, ValueRaw]) extends PropertyRaw { +case class IntoCopyRaw(`type`: StructType, fields: NonEmptyMap[String, ValueRaw]) + extends PropertyRaw { override def map(f: ValueRaw => ValueRaw): IntoCopyRaw = copy(fields = fields.map(f)) override def varNames: Set[String] = Set.empty @@ -38,14 +41,6 @@ case class IntoCopyRaw(`type`: StructType, fields: NonEmptyMap[String, ValueRaw] override def renameVars(vals: Map[String, String]): IntoCopyRaw = this } -case class MethodRaw(name: String, `type`: Type) extends PropertyRaw { - override def map(f: ValueRaw => ValueRaw): MethodRaw = this - - override def renameVars(vals: Map[String, String]): MethodRaw = this - - override def varNames: Set[String] = Set.empty -} - case class FunctorRaw(name: String, `type`: Type) extends PropertyRaw { override def map(f: ValueRaw => ValueRaw): FunctorRaw = this diff --git a/model/raw/src/main/scala/aqua/raw/value/ValueRaw.scala b/model/raw/src/main/scala/aqua/raw/value/ValueRaw.scala index 3c5061d3..6e634c1c 100644 --- a/model/raw/src/main/scala/aqua/raw/value/ValueRaw.scala +++ b/model/raw/src/main/scala/aqua/raw/value/ValueRaw.scala @@ -1,8 +1,10 @@ package aqua.raw.value import aqua.types.* + import cats.data.{Chain, NonEmptyList, NonEmptyMap} import cats.Eq +import cats.syntax.option.* import scribe.Logging sealed trait ValueRaw { @@ -263,3 +265,46 @@ case class CallArrowRaw( s"(call ${ability.fold("")(a => s"|$a| ")} (${serviceId.fold("")(_.toString + " ")}$name) [${arguments .mkString(" ")}] :: $baseType)" } + +object CallArrowRaw { + + def func( + funcName: String, + baseType: ArrowType, + arguments: List[ValueRaw] = Nil + ): CallArrowRaw = CallArrowRaw( + ability = None, + name = funcName, + arguments = arguments, + baseType = baseType, + serviceId = None + ) + + def ability( + abilityName: String, + funcName: String, + baseType: ArrowType, + arguments: List[ValueRaw] = Nil + ): CallArrowRaw = CallArrowRaw( + ability = None, + name = AbilityType.fullName(abilityName, funcName), + arguments = arguments, + baseType = baseType, + serviceId = None + ) + + def service( + abilityName: String, + serviceId: ValueRaw, + funcName: String, + baseType: ArrowType, + arguments: List[ValueRaw] = Nil + ): CallArrowRaw = CallArrowRaw( + ability = abilityName.some, + name = funcName, + arguments = arguments, + baseType = baseType, + serviceId = Some(serviceId) + ) + +} diff --git a/parser/src/main/scala/aqua/parser/expr/ConstantExpr.scala b/parser/src/main/scala/aqua/parser/expr/ConstantExpr.scala index 183e7b1f..319089ef 100644 --- a/parser/src/main/scala/aqua/parser/expr/ConstantExpr.scala +++ b/parser/src/main/scala/aqua/parser/expr/ConstantExpr.scala @@ -2,20 +2,15 @@ package aqua.parser.expr import aqua.parser.Expr import aqua.parser.lexer.Token.* -import aqua.parser.lexer.{ - CallArrowToken, - CollectionToken, - InfixToken, - LiteralToken, - Name, - ValueToken -} +import aqua.parser.lexer.* import aqua.parser.lift.LiftParser import cats.Comonad import cats.parse.Parser as P import cats.~> import aqua.parser.lift.Span import aqua.parser.lift.Span.{P0ToSpan, PToSpan} +import aqua.parser.lexer.PrefixToken +import aqua.parser.lexer.VarToken case class ConstantExpr[F[_]]( name: Name[F], @@ -35,20 +30,17 @@ object ConstantExpr extends Expr.Leaf { override val p: P[ConstantExpr[Span.S]] = (((constName ~ `?`.?).with1 <* `=` <* ` `) ~ ValueToken.`value`).flatMap { case ((name, mark), value) => + lazy val fail = (what: String) => + P.failWith( + s"'$name' is $what, but only strings, numbers or booleans can be used" + ) value match { - case CollectionToken(point, _) => - P.failWith( - s"'$name' is an array, but only strings, numbers or booleans can be used" - ) - case CallArrowToken(_, _, _) => - P.failWith( - s"'$name' is a function call, but only strings, numbers or booleans can be used" - ) - case InfixToken(_, _, _) => - P.failWith( - s"'$name' an expression, but only strings, numbers or booleans can be used" - ) - case _ => + case CollectionToken(point, _) => fail("a collection") + case CallArrowToken(_, _, _) => fail("a function call") + case InfixToken(_, _, _) | PrefixToken(_, _) => fail("an expression") + case PropertyToken(_, _) => fail("a property") + case NamedValueToken(_, _) => fail("an ability or data") + case LiteralToken(_, _) | VarToken(_) => P.pure(ConstantExpr(name, value, mark.nonEmpty)) } diff --git a/parser/src/main/scala/aqua/parser/expr/func/ArrowExpr.scala b/parser/src/main/scala/aqua/parser/expr/func/ArrowExpr.scala index 01800d07..17041b44 100644 --- a/parser/src/main/scala/aqua/parser/expr/func/ArrowExpr.scala +++ b/parser/src/main/scala/aqua/parser/expr/func/ArrowExpr.scala @@ -27,9 +27,8 @@ object ArrowExpr extends Expr.AndIndented { // It is important for IfExpr to be before CallArrowExpr // because `if (1 + 1) == 2` is parsed as if `if(1 + 1)` is an arrow call IfExpr :: - CallArrowExpr :: - TryExpr :: ElseOtherwiseExpr :: + TryExpr :: CatchExpr :: Expr.defer(ParExpr) :: Expr.defer(CoExpr) :: @@ -37,6 +36,10 @@ object ArrowExpr extends Expr.AndIndented { DeclareStreamExpr :: Expr.defer(ClosureExpr) :: AssignmentExpr :: + // It is important for CallArrowExpr to be last + // because it can parse prefixes of other expressions + // e.g. `if` could be parsed as variable name + CallArrowExpr :: Nil override val validChildren: List[Expr.Lexem] = diff --git a/parser/src/main/scala/aqua/parser/expr/func/AssignmentExpr.scala b/parser/src/main/scala/aqua/parser/expr/func/AssignmentExpr.scala index 70084af1..050fca33 100644 --- a/parser/src/main/scala/aqua/parser/expr/func/AssignmentExpr.scala +++ b/parser/src/main/scala/aqua/parser/expr/func/AssignmentExpr.scala @@ -20,7 +20,7 @@ case class AssignmentExpr[F[_]]( object AssignmentExpr extends Expr.Leaf { override val p: P[AssignmentExpr[Span.S]] = - (((Name.cl | Name.p) <* ` = `).with1 ~ ValueToken.`value`).flatMap { case (variable, value) => + ((Name.variable <* ` = `).with1 ~ ValueToken.`value`).flatMap { case (variable, value) => value match { case CollectionToken(_, values) => if (values.isEmpty) diff --git a/parser/src/main/scala/aqua/parser/expr/func/CallArrowExpr.scala b/parser/src/main/scala/aqua/parser/expr/func/CallArrowExpr.scala index 800a1466..13d30e47 100644 --- a/parser/src/main/scala/aqua/parser/expr/func/CallArrowExpr.scala +++ b/parser/src/main/scala/aqua/parser/expr/func/CallArrowExpr.scala @@ -12,7 +12,9 @@ import cats.{~>, Comonad} case class CallArrowExpr[F[_]]( variables: List[Name[F]], - callArrow: CallArrowToken[F] + // Here `ValueToken` is used to allow + // a, b <- ServiceOrAbility.call() + callArrow: ValueToken[F] ) extends Expr[F](CallArrowExpr, callArrow) { def mapK[K[_]: Comonad](fk: F ~> K): CallArrowExpr[K] = @@ -27,9 +29,9 @@ object CallArrowExpr extends Expr.Leaf { override val p: P[CallArrowExpr[Span.S]] = { val variables: P0[Option[NonEmptyList[Name[Span.S]]]] = (comma(Name.p) <* ` <- `).backtrack.? - (variables.with1 ~ CallArrowToken.callArrow.withContext( - "Only results of a function call can be written to a stream" - )).map { case (variables, token) => + // TODO: Restrict to function call only + // or allow any expression? + (variables.with1 ~ ValueToken.value).map { case (variables, token) => CallArrowExpr(variables.toList.flatMap(_.toList), token) } } diff --git a/parser/src/main/scala/aqua/parser/expr/func/JoinExpr.scala b/parser/src/main/scala/aqua/parser/expr/func/JoinExpr.scala index cf981a8d..131f5cc3 100644 --- a/parser/src/main/scala/aqua/parser/expr/func/JoinExpr.scala +++ b/parser/src/main/scala/aqua/parser/expr/func/JoinExpr.scala @@ -3,14 +3,14 @@ package aqua.parser.expr.func import aqua.parser.Expr import aqua.parser.expr.* import aqua.parser.lexer.Token.* -import aqua.parser.lexer.{ValueToken, VarToken} +import aqua.parser.lexer.{PropertyToken, ValueToken} import aqua.parser.lift.{LiftParser, Span} import aqua.parser.lift.Span.{P0ToSpan, PToSpan} import cats.parse.Parser import cats.{~>, Comonad} import cats.data.NonEmptyList -case class JoinExpr[F[_]](values: NonEmptyList[VarToken[F]]) +case class JoinExpr[F[_]](values: NonEmptyList[ValueToken[F]]) extends Expr[F](JoinExpr, values.head) { override def mapK[K[_]: Comonad](fk: F ~> K): JoinExpr[K] = @@ -20,5 +20,5 @@ case class JoinExpr[F[_]](values: NonEmptyList[VarToken[F]]) object JoinExpr extends Expr.Leaf { override val p: Parser[JoinExpr[Span.S]] = - (`join` *> ` ` *> comma(ValueToken.varProperty)).map(JoinExpr(_)) + (`join` *> ` ` *> comma(PropertyToken.property)).map(JoinExpr(_)) } diff --git a/parser/src/main/scala/aqua/parser/expr/func/ParExpr.scala b/parser/src/main/scala/aqua/parser/expr/func/ParExpr.scala index a9dafb3a..a3cc342a 100644 --- a/parser/src/main/scala/aqua/parser/expr/func/ParExpr.scala +++ b/parser/src/main/scala/aqua/parser/expr/func/ParExpr.scala @@ -20,7 +20,9 @@ case class ParExpr[F[_]](point: Token[F]) extends Expr[F](ParExpr, point) { object ParExpr extends Expr.Prefix() { override def continueWith: List[Expr.Lexem] = - CallArrowExpr :: OnExpr :: ForExpr :: JoinExpr :: Nil + // Here it is important for CallArrowExpr to be last + // because it could parse prefixes of other expressions + OnExpr :: ForExpr :: JoinExpr :: CallArrowExpr :: Nil override val p: Parser[Expr[Span.S]] = `par`.lift.map(Token.lift[Span.S, Unit](_)).map(ParExpr(_)) diff --git a/parser/src/main/scala/aqua/parser/lexer/Name.scala b/parser/src/main/scala/aqua/parser/lexer/Name.scala index 3caa8cbf..f2714e11 100644 --- a/parser/src/main/scala/aqua/parser/lexer/Name.scala +++ b/parser/src/main/scala/aqua/parser/lexer/Name.scala @@ -14,6 +14,8 @@ import aqua.parser.lift.Span.{P0ToSpan, PToSpan} case class Name[F[_]: Comonad](name: F[String]) extends Token[F] { override def as[T](v: T): F[T] = name.as(v) + def asTypeToken: NamedTypeToken[F] = NamedTypeToken(name) + override def mapK[K[_]: Comonad](fk: F ~> K): Name[K] = copy(fk(name)) def rename(newName: String): Name[F] = copy(name.as(newName)) @@ -30,16 +32,12 @@ object Name { val p: P[Name[Span.S]] = `name`.lift.map(Name(_)) - val cl: P[Name[Span.S]] = - `Class`.lift.map(Name(_)) + val variable: P[Name[Span.S]] = + (name | Class).lift.map(Name(_)) val upper: P[Name[Span.S]] = NAME.lift.map(Name(_)) - val dotted: P[Name[Span.S]] = - ((`Class` ~ `.`).backtrack.rep0.?.with1 ~ P.oneOf(`name` :: NAME :: Nil)).string.lift - .map(Name(_)) - val nameAs: P[As[Span.S]] = asOpt(p) } diff --git a/parser/src/main/scala/aqua/parser/lexer/PropertyOp.scala b/parser/src/main/scala/aqua/parser/lexer/PropertyOp.scala index e5a9fff7..6fa49a78 100644 --- a/parser/src/main/scala/aqua/parser/lexer/PropertyOp.scala +++ b/parser/src/main/scala/aqua/parser/lexer/PropertyOp.scala @@ -14,15 +14,18 @@ import cats.~> import aqua.parser.lift.Span import aqua.parser.lift.Span.{P0ToSpan, PToSpan} import aqua.types.LiteralType +import aqua.parser.lexer.CallArrowToken.CallBraces sealed trait PropertyOp[F[_]] extends Token[F] { def mapK[K[_]: Comonad](fk: F ~> K): PropertyOp[K] } -case class IntoArrow[F[_]: Comonad](name: Name[F], arguments: List[ValueToken[F]]) extends PropertyOp[F] { +case class IntoArrow[F[_]: Comonad](name: Name[F], arguments: List[ValueToken[F]]) + extends PropertyOp[F] { override def as[T](v: T): F[T] = name.as(v) - override def mapK[K[_]: Comonad](fk: F ~> K): PropertyOp[K] = copy(name.mapK(fk), arguments.map(_.mapK(fk))) + override def mapK[K[_]: Comonad](fk: F ~> K): PropertyOp[K] = + copy(name.mapK(fk), arguments.map(_.mapK(fk))) override def toString: String = s".$name(${arguments.map(_.toString).mkString(", ")})" } @@ -55,10 +58,12 @@ case class IntoCopy[F[_]: Comonad](point: F[Unit], fields: NonEmptyMap[String, V object PropertyOp { private val parseField: P[PropertyOp[Span.S]] = - (`.` *> `name`).lift.map(IntoField(_)) + (`.` *> anyName).lift.map(IntoField(_)) val parseArrow: P[PropertyOp[Span.S]] = - (`.` *> CallArrowToken.callBraces()).lift.map(p => IntoArrow(p._2._1, p._2._2 ++ p._2._3)) + (`.` *> CallArrowToken.callBraces).map { case CallBraces(name, abilities, args) => + IntoArrow(name, abilities ++ args) + } val parseCopy: P[PropertyOp[Span.S]] = (`.` *> (`copy`.lift ~ namedArgs)).map { case (point, fields) => @@ -67,7 +72,10 @@ object PropertyOp { private val parseIdx: P[PropertyOp[Span.S]] = (P.defer( - (ValueToken.`value`.surroundedBy(`/s*`).between(`[`.between(` *`, `/s*`), `/s*` *> `]`).lift | (exclamation *> ValueToken.num).lift) + (ValueToken.`value` + .surroundedBy(`/s*`) + .between(`[`.between(` *`, `/s*`), `/s*` *> `]`) + .lift | (exclamation *> ValueToken.num).lift) .map(v => IntoIndex(v.map(_.unit), Some(v._2))) .backtrack ) | exclamation.lift.map(e => IntoIndex(e, None))).flatMap { ii => diff --git a/parser/src/main/scala/aqua/parser/lexer/Token.scala b/parser/src/main/scala/aqua/parser/lexer/Token.scala index f11c8862..fee1a1cb 100644 --- a/parser/src/main/scala/aqua/parser/lexer/Token.scala +++ b/parser/src/main/scala/aqua/parser/lexer/Token.scala @@ -28,7 +28,9 @@ object Token { private val inAZ = P.charIn(AZ) private val inaz = P.charIn(az) - private val whileAnum = P.charsWhile(anum_) + private val inaZ = P.charIn(az ++ AZ) + private val whileAnum_ = P.charsWhile(anum_) + private val whileUpperAnum_ = P.charsWhile(upperAnum_) val ` *` : P0[String] = P.charsWhile0(fSpaces) val ` ` : P[String] = P.charsWhile(fSpaces) @@ -68,14 +70,13 @@ object Token { val `copy`: P[Unit] = P.string("copy") val `:` : P[Unit] = P.char(':') val ` : ` : P[Unit] = P.char(':').surroundedBy(` `.?) - val `anum_*` : P[Unit] = whileAnum.void + val `anum_*` : P[Unit] = whileAnum_.void - val NAME: P[String] = (inAZ ~ P.charsWhile(upperAnum_).?).string - val `name`: P[String] = (inaz ~ whileAnum.?).string + val NAME: P[String] = (inAZ ~ whileUpperAnum_.?).string + val `name`: P[String] = (inaz ~ whileAnum_.?).string + val `Class`: P[String] = (inAZ ~ whileAnum_.?).string + val anyName: P[String] = (inaZ ~ whileAnum_.?).string - val `Class`: P[String] = (inAZ ~ whileAnum.backtrack.?).map { case (c, s) ⇒ - c.toString ++ s.getOrElse("") - } val `\n` : P[Unit] = P.string("\n\r") | P.char('\n') | P.string("\r\n") val `--` : P[Unit] = ` `.?.with1 *> P.string("--") <* ` `.? @@ -119,17 +120,16 @@ object Token { val `/s*` : P0[Unit] = ` \n+`.backtrack | ` *`.void val namedArg: P[(String, ValueToken[S])] = - P.defer(`name`.between(` *`, `/s*`) ~ - `=`.between(` *`, `/s*`).void ~ - ValueToken.`value`.between(` *`, `/s*`)).map { case ((name, _), vt) => + P.defer( + `name`.between(` *`, `/s*`) ~ + `=`.between(` *`, `/s*`).void ~ + ValueToken.`value`.between(` *`, `/s*`) + ).map { case ((name, _), vt) => (name, vt) } - val namedArgs: P[NonEmptyList[(String, ValueToken[S])]] = P.defer( - ((` `.?.with1 *> P.char('(') <* `/s*`) ~ comma( - namedArg - ) <* (`/s*` *> P.char(')'))).map(_._2) - ) + val namedArgs: P[NonEmptyList[(String, ValueToken[S])]] = + P.defer(` `.?.with1 ~ `(` ~ `/s*` *> comma(namedArg) <* `/s*` *> `)`) case class LiftToken[F[_]: Functor, A](point: F[A]) extends Token[F] { override def as[T](v: T): F[T] = Functor[F].as(point, v) diff --git a/parser/src/main/scala/aqua/parser/lexer/ValueToken.scala b/parser/src/main/scala/aqua/parser/lexer/ValueToken.scala index 38b5ed88..afc42750 100644 --- a/parser/src/main/scala/aqua/parser/lexer/ValueToken.scala +++ b/parser/src/main/scala/aqua/parser/lexer/ValueToken.scala @@ -16,16 +16,153 @@ import cats.{~>, Comonad, Functor} import cats.data.{NonEmptyList, NonEmptyMap} import cats.syntax.foldable.* import cats.arrow.FunctionK +import cats.syntax.traverse.* +import cats.syntax.option.* sealed trait ValueToken[F[_]] extends Token[F] { def mapK[K[_]: Comonad](fk: F ~> K): ValueToken[K] } -case class VarToken[F[_]](name: Name[F], property: List[PropertyOp[F]] = Nil) - extends ValueToken[F] { +case class PropertyToken[F[_]: Comonad]( + value: ValueToken[F], + properties: NonEmptyList[PropertyOp[F]] +) extends ValueToken[F] { + override def as[T](v: T): F[T] = value.as(v) + + def mapK[K[_]: Comonad](fk: F ~> K): PropertyToken[K] = + copy(value.mapK(fk), properties.map(_.mapK(fk))) + + private def isClass(name: String): Boolean = + name.headOption.exists(_.isUpper) + + private def isField(name: String): Boolean = + name.headOption.exists(_.isLower) + + private def isConst(name: String): Boolean = + name.forall(c => !c.isLetter || c.isUpper) + + /** + * This method tries to convert property token to + * call arrow token. + * + * Next properties pattern is transformed: + * (Class)+ arrow() + * ^^^^^^^ + * this part is transformed to ability name. + */ + private def toCallArrow: Option[CallArrowToken[F]] = value match { + case VarToken(name) => + val ability = properties.init.traverse { + case f @ IntoField(_) => f.value.some + case _ => none + }.map( + name.value +: _ + ).filter( + _.forall(isClass) + ).map(props => name.rename(props.mkString("."))) + + (properties.last, ability) match { + case (IntoArrow(funcName, args), Some(ability)) => + CallArrowToken( + ability.asTypeToken.some, + funcName, + args + ).some + case _ => none + } + case _ => none + } + + /** + * This method tries to convert property token to + * property token with dotted var name inside value token. + * + * Next properties pattern is untouched: + * Class (field)* + * + * Next properties pattern is transformed: + * (Class)* (CONST | field) ..props.. + * ^^^^^^^^^^^^^^^^^^^^^^^^ + * this part is transformed to dotted name. + */ + private def toDottedName: Option[ValueToken[F]] = value match { + case VarToken(name) => + // Pattern `Class (field)*` is ability access + // and should not be transformed + val isAbility = isClass(name.value) && properties.forall { + case f @ IntoField(_) => isField(f.value) + case _ => true + } + + if (isAbility) none + else { + // Gather prefix of properties that are IntoField + val props = name.value +: properties.toList.view.map { + case IntoField(name) => name.extract.some + case _ => none + }.takeWhile(_.isDefined).flatten.toList + + val propsWithIndex = props.zipWithIndex + + // Find first property that is not Class + val classesTill = propsWithIndex.find { case (name, _) => + !isClass(name) + }.collect { case (_, idx) => + idx + }.getOrElse(props.length) + + // Find last property after classes + // that is CONST or field + val lastSuitable = propsWithIndex + .take(classesTill) + .findLast { case (name, _) => + isConst(name) || isField(name) + } + .collect { case (_, idx) => idx } + + lastSuitable.map(last => + val newProps = NonEmptyList.fromList( + properties.toList.drop(last + 1) + ) + val newName = props.take(last + 1).mkString(".") + val varToken = VarToken(name.rename(newName)) + + newProps.fold(varToken)(props => PropertyToken(varToken, props)) + ) + } + case _ => none + } + + /** + * This is a hacky method to adjust parsing result + * to format that was used previously. + * This method tries to convert property token to + * call arrow token or property token with + * dotted var name inside value token. + * + * @return Some(token) if token was adjusted, None otherwise + */ + def adjust: Option[ValueToken[F]] = + toCallArrow.orElse(toDottedName) +} + +object PropertyToken { + + val property: P[ValueToken[Span.S]] = + (ValueToken.basic ~ PropertyOp.ops.backtrack.?).map { case (v, ops) => + ops.fold(v)(ops => PropertyToken(v, ops)) + } + +} + +case class VarToken[F[_]](name: Name[F]) extends ValueToken[F] { override def as[T](v: T): F[T] = name.as(v) - def mapK[K[_]: Comonad](fk: F ~> K): VarToken[K] = copy(name.mapK(fk), property.map(_.mapK(fk))) + def mapK[K[_]: Comonad](fk: F ~> K): VarToken[K] = copy(name.mapK(fk)) +} + +object VarToken { + lazy val variable: P[VarToken[Span.S]] = Name.variable.map(VarToken(_)) } case class LiteralToken[F[_]: Comonad](valueToken: F[String], ts: LiteralType) @@ -74,6 +211,9 @@ object CollectionToken { } case class CallArrowToken[F[_]: Comonad]( + // NOTE: Call with ability is not parsed by CallArrowToken + // it is parsed by PropertyToken and then adjusted + // It is done for legacy support reasons ability: Option[NamedTypeToken[F]], funcName: Name[F], args: List[ValueToken[F]] @@ -87,17 +227,23 @@ case class CallArrowToken[F[_]: Comonad]( object CallArrowToken { + def apply[F[_]: Comonad](funcName: Name[F], args: List[ValueToken[F]]): CallArrowToken[F] = + CallArrowToken(None, funcName, args) + case class CallBraces(name: Name[S], abilities: List[ValueToken[S]], args: List[ValueToken[S]]) // {SomeAb, SecondAb} for ValueToken def abilities(): P[NonEmptyList[ValueToken[S]]] = `{` *> comma(ValueToken.`value`.surroundedBy(`/s*`)) <* `}` - def callBraces(): P[CallBraces] = P + lazy val callBraces: P[CallBraces] = P .defer( - Name.p - ~ abilities().? ~ comma0(ValueToken.`value`.surroundedBy(`/s*`)) - .between(` `.?.with1 *> `(` <* `/s*`, `/s*` *> `)`) + Name.p ~ + abilities().? ~ + comma0(ValueToken.`value`.surroundedBy(`/s*`)).between( + ` `.?.with1 *> `(` <* `/s*`, + `/s*` *> `)` + ) ) .map { case ((n, ab), args) => CallBraces(n, ab.map(_.toList).getOrElse(Nil), args) @@ -107,12 +253,8 @@ object CallArrowToken { ) val callArrow: P[CallArrowToken[Span.S]] = - ((NamedTypeToken.dotted <* `.`).?.with1 ~ - callBraces() - .withContext( - "Missing braces '()' after the function call" - )).map { case (ab, callBraces) => - CallArrowToken(ab, callBraces.name, callBraces.abilities ++ callBraces.args) + callBraces.map { braces => + CallArrowToken(braces.name, braces.abilities ++ braces.args) } } @@ -135,7 +277,7 @@ object NamedValueToken { "Missing braces '()' after the struct type" ) .map { case (dn, args) => - NamedValueToken(NamedTypeToken(dn), NonEmptyMap.of(args.head, args.tail: _*)) + NamedValueToken(NamedTypeToken(dn), args.toNem) } } @@ -409,16 +551,6 @@ object PrefixToken { object ValueToken { - val varProperty: P[VarToken[Span.S]] = - (Name.dotted ~ PropertyOp.ops.?).map { case (n, l) ⇒ - VarToken(n, l.foldMap(_.toList)) - } - - val abProperty: P[VarToken[Span.S]] = - (Name.cl ~ PropertyOp.ops.?).map { case (n, l) ⇒ - VarToken(n, l.foldMap(_.toList)) - } - val bool: P[LiteralToken[Span.S]] = P.oneOf( ("true" :: "false" :: Nil) @@ -456,20 +588,23 @@ object ValueToken { private def brackets(basic: P[ValueToken[Span.S]]): P[ValueToken[Span.S]] = basic.between(`(`, `)`).backtrack - // Basic element of math expression - val atom: P[ValueToken[S]] = P.oneOf( + // Basic element of value expression + // (without property access) + val basic = P.oneOf( literal.backtrack :: initPeerId.backtrack :: P.defer(CollectionToken.collection).backtrack :: P.defer(NamedValueToken.dataValue).backtrack :: P.defer(CallArrowToken.callArrow).backtrack :: - P.defer(abProperty).backtrack :: + P.defer(VarToken.variable).backtrack :: P.defer(PrefixToken.value).backtrack :: - P.defer(brackets(InfixToken.value)).backtrack :: - varProperty :: + P.defer(brackets(value)).backtrack :: Nil ) + // Atomic element of math expression + val atom: P[ValueToken[S]] = P.defer(PropertyToken.property) + // One of entry points for parsing the whole math expression val `value`: P[ValueToken[Span.S]] = P.defer(InfixToken.value) diff --git a/parser/src/test/scala/aqua/AquaSpec.scala b/parser/src/test/scala/aqua/AquaSpec.scala index da37a52c..4a5145d4 100644 --- a/parser/src/test/scala/aqua/AquaSpec.scala +++ b/parser/src/test/scala/aqua/AquaSpec.scala @@ -20,6 +20,7 @@ import aqua.parser.lift.Span import aqua.parser.lift.Span.{P0ToSpan, PToSpan} import cats.~> import cats.syntax.bifunctor.* +import cats.data.NonEmptyList import scala.collection.mutable import scala.language.implicitConversions @@ -33,35 +34,43 @@ object AquaSpec { } } - implicit def toAb(str: String): Ability[Id] = Ability[Id](str) + def toName(str: String): Name[Id] = Name[Id](str) - implicit def toName(str: String): Name[Id] = Name[Id](str) - implicit def toNameOp(str: Option[String]): Option[Name[Id]] = str.map(s => toName(s)) + def toNameOp(str: Option[String]): Option[Name[Id]] = str.map(s => toName(s)) - implicit def toFields(fields: List[String]): List[IntoField[Id]] = - fields.map(f => IntoField[Id](f)) + def toAb(str: String): Ability[Id] = Ability[Id](str) - implicit def toVar(name: String): VarToken[Id] = VarToken[Id](toName(name), Nil) + def toVar(name: String): VarToken[Id] = VarToken[Id](toName(name)) - implicit def toVarOp(name: Option[String]): Option[VarToken[Id]] = - name.map(s => VarToken[Id](toName(s), Nil)) + def toVarOp(name: Option[String]): Option[VarToken[Id]] = + name.map(toVar) - implicit def toVarLambda(name: String, fields: List[String]): VarToken[Id] = - VarToken[Id](toName(name), toFields(fields)) + def toVarLambda(name: String, fields: List[String]): ValueToken[Id] = + NonEmptyList + .fromList(fields) + .fold(toVar(name))(fs => + PropertyToken( + toVar(name), + fs.map(IntoField[Id].apply) + ) + ) - implicit def toVarIndex(name: String, idx: Int): VarToken[Id] = - VarToken[Id](toName(name), IntoIndex[Id](toNumber(idx).unit, Some(toNumber(idx))) :: Nil) - implicit def toLiteral(name: String, t: LiteralType): LiteralToken[Id] = LiteralToken[Id](name, t) + def toVarIndex(name: String, idx: Int): PropertyToken[Id] = + PropertyToken[Id]( + VarToken[Id](toName(name)), + NonEmptyList.one(IntoIndex[Id](toNumber(idx).unit, Some(toNumber(idx)))) + ) - implicit def toNumber(n: Int): LiteralToken[Id] = - LiteralToken[Id](n.toString, LiteralType.forInt(n)) - implicit def toBool(n: Boolean): LiteralToken[Id] = LiteralToken[Id](n.toString, bool) - implicit def toStr(n: String): LiteralToken[Id] = LiteralToken[Id]("\"" + n + "\"", string) + def toLiteral(name: String, t: LiteralType): LiteralToken[Id] = LiteralToken[Id](name, t) - implicit def toNamedType(str: String): NamedTypeToken[Id] = NamedTypeToken[Id](str) + def toNumber(n: Int): LiteralToken[Id] = LiteralToken[Id](n.toString, LiteralType.forInt(n)) + def toBool(n: Boolean): LiteralToken[Id] = LiteralToken[Id](n.toString, bool) + def toStr(n: String): LiteralToken[Id] = LiteralToken[Id]("\"" + n + "\"", string) + + def toNamedType(str: String): NamedTypeToken[Id] = NamedTypeToken[Id](str) def toArrayType(str: String): ArrayTypeToken[Id] = ArrayTypeToken[Id]((), str) - implicit def toArrowType( + def toArrowType( args: List[DataTypeToken[Id]], res: Option[DataTypeToken[Id]] ): ArrowTypeToken[Id] = @@ -73,18 +82,23 @@ object AquaSpec { ): ArrowTypeToken[Id] = ArrowTypeToken[Id]((), args.map(ab => Some(Name[Id](ab._1)) -> ab._2), res) - implicit def toNamedArg(str: String, customType: String): Arg[Id] = + def toNamedArg(str: String, customType: String): Arg[Id] = Arg[Id](str, toNamedType(customType)) - implicit def toArg(str: String, typeToken: TypeToken[Id]): Arg[Id] = Arg[Id](str, typeToken) + def toArg(str: String, typeToken: TypeToken[Id]): Arg[Id] = Arg[Id](str, typeToken) - implicit def toArgSc(str: String, scalarType: ScalarType): Arg[Id] = + def toArgSc(str: String, scalarType: ScalarType): Arg[Id] = Arg[Id](str, scToBt(scalarType)) - implicit def scToBt(sc: ScalarType): BasicTypeToken[Id] = BasicTypeToken[Id](sc) + def scToBt(sc: ScalarType): BasicTypeToken[Id] = BasicTypeToken[Id](sc) val boolSc: BasicTypeToken[Id] = BasicTypeToken[Id](ScalarType.bool) val stringSc: BasicTypeToken[Id] = BasicTypeToken[Id](ScalarType.string) + + given Conversion[String, Name[Id]] = toName + given Conversion[String, NamedTypeToken[Id]] = toNamedType + given Conversion[Int, LiteralToken[Id]] = toNumber + given Conversion[ScalarType, BasicTypeToken[Id]] = scToBt } trait AquaSpec extends EitherValues { @@ -116,8 +130,8 @@ trait AquaSpec extends EitherValues { def parseAssign(str: String): AssignmentExpr[Id] = AssignmentExpr.p.parseAll(str).value.mapK(spanToId) - def parseVar(str: String): VarToken[Id] = - ValueToken.varProperty.parseAll(str).value.mapK(spanToId) + def parseVar(str: String): ValueToken[Id] = + ValueToken.value.parseAll(str).value.mapK(spanToId) def parseData(str: String): NamedValueToken[Id] = NamedValueToken.dataValue.parseAll(str).value.mapK(spanToId) diff --git a/parser/src/test/scala/aqua/parser/AbilityValueExprSpec.scala b/parser/src/test/scala/aqua/parser/AbilityValueExprSpec.scala index 6374ad11..26c4d6a3 100644 --- a/parser/src/test/scala/aqua/parser/AbilityValueExprSpec.scala +++ b/parser/src/test/scala/aqua/parser/AbilityValueExprSpec.scala @@ -16,14 +16,15 @@ class AbilityValueExprSpec extends AnyFlatSpec with Matchers with AquaSpec { import AquaSpec.* private def parseAndCheckAbility(str: String) = { - parseData( - str - ) should be( + parseData(str) should be( NamedValueToken( NamedTypeToken[Id]("AbilityA"), NonEmptyMap.of( "v1" -> toNumber(1), - "f1" -> VarToken(Name[Id]("input"), IntoField[Id]("arrow") :: Nil) + "f1" -> PropertyToken[Id]( + VarToken(toName("input")), + NonEmptyList.one(IntoField("arrow")) + ) ) ) ) diff --git a/parser/src/test/scala/aqua/parser/AliasExprSpec.scala b/parser/src/test/scala/aqua/parser/AliasExprSpec.scala index aa0371c3..e998aef1 100644 --- a/parser/src/test/scala/aqua/parser/AliasExprSpec.scala +++ b/parser/src/test/scala/aqua/parser/AliasExprSpec.scala @@ -9,7 +9,7 @@ import org.scalatest.matchers.should.Matchers class AliasExprSpec extends AnyFlatSpec with Matchers with AquaSpec { - import AquaSpec._ + import AquaSpec.{given, *} "alias" should "be parsed properly" in { parseAlias("alias SomeAlias : u32") should be( diff --git a/parser/src/test/scala/aqua/parser/ArrowTypeExprSpec.scala b/parser/src/test/scala/aqua/parser/ArrowTypeExprSpec.scala index 4d8dadb6..2036d248 100644 --- a/parser/src/test/scala/aqua/parser/ArrowTypeExprSpec.scala +++ b/parser/src/test/scala/aqua/parser/ArrowTypeExprSpec.scala @@ -10,7 +10,7 @@ import org.scalatest.matchers.should.Matchers class ArrowTypeExprSpec extends AnyFlatSpec with Matchers with AquaSpec { - import AquaSpec._ + import AquaSpec.{given, *} "arrow types" should "be parsed properly" in { parseArrow("onIn: string -> ()") should be( @@ -31,7 +31,14 @@ class ArrowTypeExprSpec extends AnyFlatSpec with Matchers with AquaSpec { parseArrow("onIn{SomeAb}(a: Custom, b: Custom2)") should be( ArrowTypeExpr[Id]( "onIn", - toNamedArrow(List("SomeAb" -> toNamedType("SomeAb"), "a" -> toNamedType("Custom"), "b" -> toNamedType("Custom2")), Nil) + toNamedArrow( + List( + "SomeAb" -> toNamedType("SomeAb"), + "a" -> toNamedType("Custom"), + "b" -> toNamedType("Custom2") + ), + Nil + ) ) ) diff --git a/parser/src/test/scala/aqua/parser/AssignmentExprSpec.scala b/parser/src/test/scala/aqua/parser/AssignmentExprSpec.scala index 74d07e09..55ec978b 100644 --- a/parser/src/test/scala/aqua/parser/AssignmentExprSpec.scala +++ b/parser/src/test/scala/aqua/parser/AssignmentExprSpec.scala @@ -8,7 +8,7 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class AssignmentExprSpec extends AnyFlatSpec with Matchers with AquaSpec { - import AquaSpec._ + import AquaSpec.{given, *} "assign" should "be parsed" in { parseAssign("a = \"b\"") should be( diff --git a/parser/src/test/scala/aqua/parser/CallArrowSpec.scala b/parser/src/test/scala/aqua/parser/CallArrowSpec.scala index 7328c903..c659508d 100644 --- a/parser/src/test/scala/aqua/parser/CallArrowSpec.scala +++ b/parser/src/test/scala/aqua/parser/CallArrowSpec.scala @@ -2,7 +2,9 @@ package aqua.parser import aqua.AquaSpec import aqua.parser.expr.func.CallArrowExpr -import aqua.parser.lexer.{CallArrowToken, Name, VarToken} +import aqua.parser.lexer.{CallArrowToken, IntoArrow, Name, PropertyToken, VarToken} + +import cats.data.NonEmptyList import cats.Id import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -12,19 +14,25 @@ class CallArrowSpec extends AnyFlatSpec with Matchers with AquaSpec { "func calls" should "parse func()" in { parseExpr("func()") should be( - CallArrowExpr[Id](Nil, CallArrowToken(None, toName("func"), Nil)) + CallArrowExpr[Id](Nil, CallArrowToken(toName("func"), Nil)) ) + parseExpr("Ab.func(arg)") should be( CallArrowExpr[Id]( Nil, - CallArrowToken(Some(toNamedType("Ab")), Name[Id]("func"), List(VarToken[Id](toName("arg")))) + PropertyToken[Id]( + VarToken[Id](toName("Ab")), + NonEmptyList.one( + IntoArrow(toName("func"), toVar("arg") :: Nil) + ) + ) ) ) parseExpr("func(arg.doSomething)") should be( CallArrowExpr[Id]( Nil, - CallArrowToken(None, Name[Id]("func"), List(toVarLambda("arg", List("doSomething")))) + CallArrowToken(Name[Id]("func"), List(toVarLambda("arg", List("doSomething")))) ) ) @@ -32,7 +40,6 @@ class CallArrowSpec extends AnyFlatSpec with Matchers with AquaSpec { CallArrowExpr[Id]( Nil, CallArrowToken( - None, Name[Id]("func"), List(toVarLambda("arg", List("doSomething", "and", "doSomethingElse"))) ) @@ -43,7 +50,6 @@ class CallArrowSpec extends AnyFlatSpec with Matchers with AquaSpec { CallArrowExpr[Id]( Nil, CallArrowToken( - None, Name[Id]("func"), List(toVarLambda("arg", List("doSomething", "and", "doSomethingElse"))) ) @@ -53,12 +59,16 @@ class CallArrowSpec extends AnyFlatSpec with Matchers with AquaSpec { parseExpr("Ab.func(arg.doSomething.and.doSomethingElse, arg2.someFunc)") should be( CallArrowExpr[Id]( Nil, - CallArrowToken( - Some(toNamedType("Ab")), - Name[Id]("func"), - List( - toVarLambda("arg", List("doSomething", "and", "doSomethingElse")), - toVarLambda("arg2", List("someFunc")) + PropertyToken[Id]( + VarToken[Id](toName("Ab")), + NonEmptyList.one( + IntoArrow( + toName("func"), + List( + toVarLambda("arg", List("doSomething", "and", "doSomethingElse")), + toVarLambda("arg2", List("someFunc")) + ) + ) ) ) ) @@ -68,7 +78,6 @@ class CallArrowSpec extends AnyFlatSpec with Matchers with AquaSpec { CallArrowExpr[Id]( List(toName("x")), CallArrowToken( - None, Name[Id]("func"), List( toVarLambda("arg", List("doSomething")) @@ -81,7 +90,6 @@ class CallArrowSpec extends AnyFlatSpec with Matchers with AquaSpec { CallArrowExpr[Id]( toName("x") :: toName("y") :: toName("z") :: Nil, CallArrowToken( - None, Name[Id]("func"), List( toVarLambda("arg", List("doSomething")) diff --git a/parser/src/test/scala/aqua/parser/ClosureExprSpec.scala b/parser/src/test/scala/aqua/parser/ClosureExprSpec.scala index 5400e372..1d3ff28e 100644 --- a/parser/src/test/scala/aqua/parser/ClosureExprSpec.scala +++ b/parser/src/test/scala/aqua/parser/ClosureExprSpec.scala @@ -2,8 +2,16 @@ package aqua.parser import aqua.AquaSpec import aqua.parser.expr.{FuncExpr, RootExpr} -import aqua.parser.expr.func.{ArrowExpr, CallArrowExpr, ClosureExpr, ReturnExpr} -import aqua.parser.lexer.{Ability, CallArrowToken, NamedTypeToken, Token, VarToken} +import aqua.parser.expr.func.{ArrowExpr, AssignmentExpr, CallArrowExpr, ClosureExpr, ReturnExpr} +import aqua.parser.lexer.{ + Ability, + CallArrowToken, + IntoArrow, + NamedTypeToken, + PropertyToken, + Token, + VarToken +} import aqua.types.ScalarType.string import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -15,7 +23,7 @@ import scala.collection.mutable class ClosureExprSpec extends AnyFlatSpec with Matchers with AquaSpec { - import AquaSpec._ + import AquaSpec.{given, *} val parser = Parser.spanParser @@ -53,16 +61,26 @@ class ClosureExprSpec extends AnyFlatSpec with Matchers with AquaSpec { qTree.d() shouldBe ArrowExpr(toNamedArrow(("s", scToBt(string)) :: Nil, scToBt(string) :: Nil)) qTree.d() shouldBe CallArrowExpr( Nil, - CallArrowToken(Some(NamedTypeToken[Id]("LocalSrv")), toName("inside"), Nil) + PropertyToken[Id]( + VarToken[Id](toName("LocalSrv")), + NonEmptyList.one( + IntoArrow[Id](toName("inside"), Nil) + ) + ) ) qTree.d() shouldBe CallArrowExpr( toName("p2Id") :: Nil, - CallArrowToken(Some(NamedTypeToken[Id]("Peer")), toName("identify"), Nil) + PropertyToken[Id]( + VarToken[Id](toName("Peer")), + NonEmptyList.one( + IntoArrow[Id](toName("identify"), Nil) + ) + ) ) qTree.d() shouldBe ReturnExpr(NonEmptyList(VarToken[Id](toName("p2Id")), Nil)) qTree.d() shouldBe CallArrowExpr( toName("v") :: Nil, - CallArrowToken(None, toName("closure"), toStr("input") :: Nil) + CallArrowToken(toName("closure"), toStr("input") :: Nil) ) qTree.d() shouldBe ReturnExpr(NonEmptyList(VarToken[Id](toName("v")), Nil)) } diff --git a/parser/src/test/scala/aqua/parser/CoExprSpec.scala b/parser/src/test/scala/aqua/parser/CoExprSpec.scala index b895e984..5f5bead0 100644 --- a/parser/src/test/scala/aqua/parser/CoExprSpec.scala +++ b/parser/src/test/scala/aqua/parser/CoExprSpec.scala @@ -1,34 +1,95 @@ package aqua.parser import aqua.AquaSpec -import aqua.AquaSpec.spanToId -import aqua.parser.expr.func.{CallArrowExpr, CoExpr} +import aqua.AquaSpec.* +import aqua.parser.expr.func.{CallArrowExpr, CoExpr, ForExpr, JoinExpr, OnExpr} import aqua.parser.lexer.{CallArrowToken, Token} import aqua.parser.lift.LiftParser.Implicits.idLiftParser -import cats.data.Chain + +import cats.data.{Chain, NonEmptyList} import cats.free.Cofree import cats.{Eval, Id} import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers +import org.scalatest.Inside -class CoExprSpec extends AnyFlatSpec with Matchers with AquaSpec { +class CoExprSpec extends AnyFlatSpec with Matchers with Inside with AquaSpec { + + def insideCo(str: String)(testFun: Ast.Tree[Id] => Any) = + inside(CoExpr.readLine.parseAll(str).map(_.map(_.mapK(spanToId)).forceAll)) { + case Right(tree) => testFun(tree) + } + + def co(expr: Expr[Id]): Ast.Tree[Id] = + Cofree( + CoExpr(Token.lift(())), + Eval.now( + Chain( + Cofree( + expr, + Eval.now(Chain.empty) + ) + ) + ) + ) "co" should "be parsed" in { - CoExpr.readLine.parseAll("co x <- y()").value.map(_.mapK(spanToId)).forceAll should be( - Cofree[Chain, Expr[Id]]( - CoExpr[Id](Token.lift[Id, Unit](())), - Eval.now( - Chain( - Cofree[Chain, Expr[Id]]( - CallArrowExpr( - List(AquaSpec.toName("x")), - CallArrowToken(None, AquaSpec.toName("y"), Nil) - ), - Eval.now(Chain.empty) + insideCo("co x <- y()")( + _ should be( + co( + CallArrowExpr( + List(toName("x")), + CallArrowToken(toName("y"), Nil) + ) + ) + ) + ) + + insideCo("co call()")( + _ should be( + co( + CallArrowExpr( + Nil, + CallArrowToken(toName("call"), Nil) + ) + ) + ) + ) + + insideCo("co on call() via relay:")( + _ should be( + co( + OnExpr( + CallArrowToken(toName("call"), Nil), + toVar("relay") :: Nil + ) + ) + ) + ) + + insideCo("co join call(), x")( + _ should be( + co( + JoinExpr( + NonEmptyList.of( + CallArrowToken(toName("call"), Nil), + toVar("x") ) ) ) ) ) + + insideCo("co for w <- getWorkers():")( + _ should be( + co( + ForExpr( + toName("w"), + CallArrowToken(toName("getWorkers"), Nil), + None + ) + ) + ) + ) } } diff --git a/parser/src/test/scala/aqua/parser/DataStructExprSpec.scala b/parser/src/test/scala/aqua/parser/DataStructExprSpec.scala index e16527bf..bfa583aa 100644 --- a/parser/src/test/scala/aqua/parser/DataStructExprSpec.scala +++ b/parser/src/test/scala/aqua/parser/DataStructExprSpec.scala @@ -9,7 +9,7 @@ import org.scalatest.matchers.should.Matchers class DataStructExprSpec extends AnyFlatSpec with Matchers with AquaSpec { - import AquaSpec._ + import AquaSpec.{given, *} "data struct" should "be parsed properly" in { parseDataStruct("data Smth") should be( diff --git a/parser/src/test/scala/aqua/parser/FieldTypeExprSpec.scala b/parser/src/test/scala/aqua/parser/FieldTypeExprSpec.scala index 0382ea71..796f08d7 100644 --- a/parser/src/test/scala/aqua/parser/FieldTypeExprSpec.scala +++ b/parser/src/test/scala/aqua/parser/FieldTypeExprSpec.scala @@ -8,7 +8,7 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class FieldTypeExprSpec extends AnyFlatSpec with Matchers with AquaSpec { - import AquaSpec._ + import AquaSpec.{given, *} "else" should "be parsed" in { parseFieldType("some: bool") should be( diff --git a/parser/src/test/scala/aqua/parser/ForExprSpec.scala b/parser/src/test/scala/aqua/parser/ForExprSpec.scala index e8774ddc..ec0ab277 100644 --- a/parser/src/test/scala/aqua/parser/ForExprSpec.scala +++ b/parser/src/test/scala/aqua/parser/ForExprSpec.scala @@ -7,7 +7,7 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class ForExprSpec extends AnyFlatSpec with Matchers with AquaSpec { - import AquaSpec._ + import AquaSpec.{given, *} "for expression" should "be parsed" in { parseFor("for some <- \"a\"") should be( diff --git a/parser/src/test/scala/aqua/parser/FuncExprSpec.scala b/parser/src/test/scala/aqua/parser/FuncExprSpec.scala index 369397e0..7c1ddce6 100644 --- a/parser/src/test/scala/aqua/parser/FuncExprSpec.scala +++ b/parser/src/test/scala/aqua/parser/FuncExprSpec.scala @@ -29,9 +29,12 @@ import scala.collection.mutable import scala.language.implicitConversions import aqua.parser.lift.Span import aqua.parser.lift.Span.{P0ToSpan, PToSpan} +import aqua.parser.lexer.PropertyToken +import aqua.parser.lexer.IntoArrow +import aqua.parser.expr.func.AssignmentExpr class FuncExprSpec extends AnyFlatSpec with Matchers with Inside with Inspectors with AquaSpec { - import AquaSpec._ + import AquaSpec.{given, *} private val parser = Parser.spanParser @@ -110,11 +113,19 @@ class FuncExprSpec extends AnyFlatSpec with Matchers with Inside with Inspectors ).toList ifBody.head.head.mapK(spanToId) should be( - CallArrowExpr(List(toName("x")), CallArrowToken(Some(toNamedType("Ab")), "func", Nil)) + CallArrowExpr( + List(toName("x")), + PropertyToken[Id]( + VarToken[Id](toName("Ab")), + NonEmptyList.one( + IntoArrow[Id](toName("func"), Nil) + ) + ) + ) ) ifBody(1).head.mapK(spanToId) should be(AbilityIdExpr(toNamedType("Peer"), toStr("some id"))) ifBody(2).head.mapK(spanToId) should be( - CallArrowExpr(Nil, CallArrowToken(None, "call", List(toBool(true)))) + CallArrowExpr(Nil, CallArrowToken("call", List(toBool(true)))) ) } @@ -255,7 +266,12 @@ class FuncExprSpec extends AnyFlatSpec with Matchers with Inside with Inspectors qTree.d() shouldBe OnExpr(toStr("deeper"), List(toStr("deep"))) qTree.d() shouldBe CallArrowExpr( List("v"), - CallArrowToken(Some(toNamedType("Local")), "gt", Nil) + PropertyToken[Id]( + VarToken[Id](toName("Local")), + NonEmptyList.one( + IntoArrow[Id](toName("gt"), Nil) + ) + ) ) qTree.d() shouldBe ReturnExpr(NonEmptyList.one(toVar("v"))) // genC function @@ -267,13 +283,23 @@ class FuncExprSpec extends AnyFlatSpec with Matchers with Inside with Inspectors qTree.d() shouldBe ArrowExpr(toNamedArrow(("val" -> string) :: Nil, boolSc :: Nil)) qTree.d() shouldBe CallArrowExpr( List("one"), - CallArrowToken(Some(toNamedType("Local")), "gt", Nil) + PropertyToken[Id]( + VarToken[Id](toName("Local")), + NonEmptyList.one( + IntoArrow[Id](toName("gt"), Nil) + ) + ) ) qTree.d() shouldBe OnExpr(toStr("smth"), List(toStr("else"))) - qTree.d() shouldBe CallArrowExpr(List("two"), CallArrowToken(None, "tryGen", Nil)) + qTree.d() shouldBe CallArrowExpr(List("two"), CallArrowToken("tryGen", Nil)) qTree.d() shouldBe CallArrowExpr( List("three"), - CallArrowToken(Some(toNamedType("Local")), "gt", Nil) + PropertyToken[Id]( + VarToken[Id](toName("Local")), + NonEmptyList.one( + IntoArrow[Id](toName("gt"), Nil) + ) + ) ) qTree.d() shouldBe ReturnExpr(NonEmptyList.one(toVar("two"))) } diff --git a/parser/src/test/scala/aqua/parser/IfExprSpec.scala b/parser/src/test/scala/aqua/parser/IfExprSpec.scala index 490ff037..dc247e71 100644 --- a/parser/src/test/scala/aqua/parser/IfExprSpec.scala +++ b/parser/src/test/scala/aqua/parser/IfExprSpec.scala @@ -3,9 +3,19 @@ package aqua.parser import aqua.AquaSpec import aqua.parser.expr.func.IfExpr import aqua.parser.lexer.InfixToken.Op.{Add, Sub} -import aqua.parser.lexer.{CallArrowToken, CollectionToken, InfixToken} +import aqua.parser.lexer.{ + CallArrowToken, + CollectionToken, + InfixToken, + IntoArrow, + PropertyToken, + ValueToken, + VarToken +} import aqua.parser.lexer.CollectionToken.Mode.OptionMode + import cats.Id +import cats.data.NonEmptyList import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -41,27 +51,43 @@ class IfExprSpec extends AnyFlatSpec with Matchers with AquaSpec { parseIf("if Op.identity(\"str\") == \"a\"") should be( IfExpr[Id]( equ( - CallArrowToken[Id](Some(toNamedType("Op")), toName("identity"), toStr("str") :: Nil), + PropertyToken[Id]( + VarToken[Id](toName("Op")), + NonEmptyList.one( + IntoArrow(toName("identity"), toStr("str") :: Nil) + ) + ), toStr("a") ) ) ) - parseIf("if Op.identity(\"str\") != Op.identity(\"str\")") should be( - IfExpr[Id]( - neq( - CallArrowToken[Id](Some(toNamedType("Op")), toName("identity"), toStr("str") :: Nil), - CallArrowToken[Id](Some(toNamedType("Op")), toName("identity"), toStr("str") :: Nil) + parseIf("if Op.identity(\"str\") != Op.identity(\"str\")") should be { + val operand = PropertyToken[Id]( + VarToken[Id](toName("Op")), + NonEmptyList.one( + IntoArrow(toName("identity"), toStr("str") :: Nil) ) ) - ) + IfExpr[Id]( + neq( + operand, + operand + ) + ) + } parseIf("if 2 - 3 != Op.identity(4) + 5") should be( IfExpr[Id]( neq( sub(toNumber(2), toNumber(3)), add( - CallArrowToken[Id](Some(toNamedType("Op")), toName("identity"), toNumber(4) :: Nil), + PropertyToken[Id]( + VarToken[Id](toName("Op")), + NonEmptyList.one( + IntoArrow(toName("identity"), toNumber(4) :: Nil) + ) + ), toNumber(5) ) ) @@ -71,8 +97,8 @@ class IfExprSpec extends AnyFlatSpec with Matchers with AquaSpec { parseIf("if funcCall(3) == funcCall2(4)") should be( IfExpr[Id]( equ( - CallArrowToken[Id](None, toName("funcCall"), toNumber(3) :: Nil), - CallArrowToken[Id](None, toName("funcCall2"), toNumber(4) :: Nil) + CallArrowToken[Id](toName("funcCall"), toNumber(3) :: Nil), + CallArrowToken[Id](toName("funcCall2"), toNumber(4) :: Nil) ) ) ) diff --git a/parser/src/test/scala/aqua/parser/IntoArrowSpec.scala b/parser/src/test/scala/aqua/parser/IntoArrowSpec.scala index 4354332c..9253066e 100644 --- a/parser/src/test/scala/aqua/parser/IntoArrowSpec.scala +++ b/parser/src/test/scala/aqua/parser/IntoArrowSpec.scala @@ -1,10 +1,12 @@ package aqua.parser import aqua.AquaSpec -import aqua.parser.lexer.{IntoArrow, PropertyOp, VarToken} +import aqua.parser.lexer.{IntoArrow, PropertyOp, PropertyToken, VarToken} import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers + import cats.Id +import cats.data.NonEmptyList class IntoArrowSpec extends AnyFlatSpec with Matchers with AquaSpec { import AquaSpec.* @@ -27,7 +29,11 @@ class IntoArrowSpec extends AnyFlatSpec with Matchers with AquaSpec { val arrowStr = "input.arrow(\"\")" val result = parseVar(arrowStr) - val expected = VarToken[Id](toName("input"), IntoArrow[Id](toName("arrow"), toStr("") :: Nil) :: Nil) + val expected = PropertyToken[Id]( + VarToken[Id](toName("input")), + NonEmptyList.one(IntoArrow[Id](toName("arrow"), toStr("") :: Nil)) + ) + result should be(expected) } } diff --git a/parser/src/test/scala/aqua/parser/ParExprSpec.scala b/parser/src/test/scala/aqua/parser/ParExprSpec.scala index 86f90eb3..9d962899 100644 --- a/parser/src/test/scala/aqua/parser/ParExprSpec.scala +++ b/parser/src/test/scala/aqua/parser/ParExprSpec.scala @@ -1,40 +1,95 @@ package aqua.parser import aqua.AquaSpec -import aqua.parser.expr.func.{CallArrowExpr, ParExpr} +import aqua.AquaSpec.* +import aqua.parser.expr.func.{CallArrowExpr, ForExpr, JoinExpr, OnExpr, ParExpr} import aqua.parser.lexer.{CallArrowToken, Token} +import aqua.parser.lift.LiftParser.Implicits.idLiftParser + import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers +import org.scalatest.Inside import cats.{Eval, Id} -import aqua.parser.lift.LiftParser.Implicits.idLiftParser -import cats.data.Chain +import cats.data.{Chain, NonEmptyList} import cats.free.Cofree -class ParExprSpec extends AnyFlatSpec with Matchers with AquaSpec { +class ParExprSpec extends AnyFlatSpec with Matchers with Inside with AquaSpec { - import AquaSpec._ + def insidePar(str: String)(testFun: Ast.Tree[Id] => Any) = + inside(ParExpr.readLine.parseAll(str).map(_.map(_.mapK(spanToId)).forceAll)) { + case Right(tree) => testFun(tree) + } + + def par(expr: Expr[Id]): Ast.Tree[Id] = + Cofree( + ParExpr(Token.lift(())), + Eval.now( + Chain( + Cofree( + expr, + Eval.now(Chain.empty) + ) + ) + ) + ) "par" should "be parsed" in { - ParExpr.readLine.parseAll("par x <- y()").value.map(_.mapK(spanToId)).forceAll should be( - Cofree[Chain, Expr[Id]]( - ParExpr[Id](Token.lift[Id, Unit](())), - Eval.now( - Chain( - Cofree[Chain, Expr[Id]]( - CallArrowExpr( - List(AquaSpec.toName("x")), - CallArrowToken( - None, - AquaSpec.toName("y"), - Nil - ) + insidePar("par x <- y()")( + _ should be( + par( + CallArrowExpr( + List(toName("x")), + CallArrowToken(toName("y"), Nil) + ) + ) + ) + ) - ), - Eval.now(Chain.empty) + insidePar("par call()")( + _ should be( + par( + CallArrowExpr( + Nil, + CallArrowToken(toName("call"), Nil) + ) + ) + ) + ) + + insidePar("par on call() via relay:")( + _ should be( + par( + OnExpr( + CallArrowToken(toName("call"), Nil), + toVar("relay") :: Nil + ) + ) + ) + ) + + insidePar("par join call(), x")( + _ should be( + par( + JoinExpr( + NonEmptyList.of( + CallArrowToken(toName("call"), Nil), + toVar("x") ) ) ) ) ) + + insidePar("par for w <- getWorkers():")( + _ should be( + par( + ForExpr( + toName("w"), + CallArrowToken(toName("getWorkers"), Nil), + None + ) + ) + ) + ) } } diff --git a/parser/src/test/scala/aqua/parser/PushToStreamExprSpec.scala b/parser/src/test/scala/aqua/parser/PushToStreamExprSpec.scala index fd3e125e..c52ce171 100644 --- a/parser/src/test/scala/aqua/parser/PushToStreamExprSpec.scala +++ b/parser/src/test/scala/aqua/parser/PushToStreamExprSpec.scala @@ -7,7 +7,7 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class PushToStreamExprSpec extends AnyFlatSpec with Matchers with AquaSpec { - import AquaSpec._ + import AquaSpec.{given, *} "assign" should "be parsed" in { parsePush("a <<- \"b\"") should be( diff --git a/parser/src/test/scala/aqua/parser/StructValueExprSpec.scala b/parser/src/test/scala/aqua/parser/StructValueExprSpec.scala index f9eb2974..19bc4060 100644 --- a/parser/src/test/scala/aqua/parser/StructValueExprSpec.scala +++ b/parser/src/test/scala/aqua/parser/StructValueExprSpec.scala @@ -13,6 +13,7 @@ import aqua.parser.lexer.{ Name, NamedTypeToken, NamedValueToken, + PropertyToken, Token, ValueToken, VarToken @@ -51,12 +52,15 @@ class StructValueExprSpec extends AnyFlatSpec with Matchers with AquaSpec { NonEmptyMap.of( "i1" -> two, "i2" -> b, - "i3" -> CallArrowToken(None, Name[Id]("funcCall"), List(three)), - "i4" -> VarToken[Id](Name[Id]("value"), Nil) + "i3" -> CallArrowToken(Name[Id]("funcCall"), List(three)), + "i4" -> VarToken[Id](Name[Id]("value")) ) ), - "f6" -> CallArrowToken(None, Name[Id]("funcCall"), List(one)), - "f7" -> CallArrowToken(Option(NamedTypeToken[Id]("Serv")), Name[Id]("call"), List(two)) + "f6" -> CallArrowToken(Name[Id]("funcCall"), List(one)), + "f7" -> PropertyToken[Id]( + VarToken[Id](Name[Id]("Serv")), + NonEmptyList.one(IntoArrow[Id](Name[Id]("call"), List(two))) + ) ) ) ) diff --git a/parser/src/test/scala/aqua/parser/ValueTokenComplexSpec.scala b/parser/src/test/scala/aqua/parser/ValueTokenComplexSpec.scala index 82d462b3..1f96fc90 100644 --- a/parser/src/test/scala/aqua/parser/ValueTokenComplexSpec.scala +++ b/parser/src/test/scala/aqua/parser/ValueTokenComplexSpec.scala @@ -26,13 +26,13 @@ class ValueTokenComplexSpec extends AnyFlatSpec with Matchers with Inside with A } } - import AquaSpec.* + import AquaSpec.{given, *} private def variable(name: String): ValueToken[Id] = - VarToken(Name(name), Nil) + VarToken(Name(name)) private def func(name: String, args: List[ValueToken[Id]]): ValueToken[Id] = - CallArrowToken(None, Name(name), args) + CallArrowToken(Name(name), args) private def literal(n: Int): ValueToken[Id] = toNumber(n) diff --git a/parser/src/test/scala/aqua/parser/lexer/PropertyOpSpec.scala b/parser/src/test/scala/aqua/parser/lexer/PropertyOpSpec.scala index 727d1f61..14dbb5e0 100644 --- a/parser/src/test/scala/aqua/parser/lexer/PropertyOpSpec.scala +++ b/parser/src/test/scala/aqua/parser/lexer/PropertyOpSpec.scala @@ -10,7 +10,7 @@ import org.scalatest.matchers.should.Matchers class PropertyOpSpec extends AnyFlatSpec with Matchers with EitherValues { - import aqua.AquaSpec._ + import aqua.AquaSpec.{given, *} "lambda ops" should "parse" in { val opsP = (s: String) => PropertyOp.ops.parseAll(s).value.map(_.mapK(spanToId)) diff --git a/parser/src/test/scala/aqua/parser/lexer/ValueTokenSpec.scala b/parser/src/test/scala/aqua/parser/lexer/ValueTokenSpec.scala index 6fe30809..6a584d3c 100644 --- a/parser/src/test/scala/aqua/parser/lexer/ValueTokenSpec.scala +++ b/parser/src/test/scala/aqua/parser/lexer/ValueTokenSpec.scala @@ -5,7 +5,9 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import aqua.parser.lift.LiftParser.Implicits.idLiftParser import aqua.types.LiteralType + import cats.Id +import cats.data.NonEmptyList class ValueTokenSpec extends AnyFlatSpec with Matchers with EitherValues { @@ -13,13 +15,21 @@ class ValueTokenSpec extends AnyFlatSpec with Matchers with EitherValues { "var getter" should "parse" in { ValueToken.`value`.parseAll("varname").value.mapK(spanToId) should be( - VarToken(Name[Id]("varname"), Nil) + VarToken(Name[Id]("varname")) ) + ValueToken.`value`.parseAll("varname.field").value.mapK(spanToId) should be( - VarToken(Name[Id]("varname"), IntoField[Id]("field") :: Nil) + PropertyToken[Id]( + VarToken(Name[Id]("varname")), + NonEmptyList.one(IntoField[Id]("field")) + ) ) + ValueToken.`value`.parseAll("varname.field.sub").value.mapK(spanToId) should be( - VarToken(Name[Id]("varname"), IntoField[Id]("field") :: IntoField[Id]("sub") :: Nil) + PropertyToken[Id]( + VarToken(Name[Id]("varname")), + NonEmptyList.of(IntoField[Id]("field"), IntoField[Id]("sub")) + ) ) } diff --git a/parser/src/test/scala/aqua/parser/lexer/VarLambdaSpec.scala b/parser/src/test/scala/aqua/parser/lexer/VarLambdaSpec.scala index 42c2eb40..1dab5e71 100644 --- a/parser/src/test/scala/aqua/parser/lexer/VarLambdaSpec.scala +++ b/parser/src/test/scala/aqua/parser/lexer/VarLambdaSpec.scala @@ -12,47 +12,43 @@ class VarLambdaSpec extends AnyFlatSpec with Matchers with EitherValues { import aqua.AquaSpec._ "var lambda" should "parse" in { - val opsP = (s: String) => Name.dotted.parseAll(s).value.mapK(spanToId) + val opsP = (s: String) => ValueToken.value.parseAll(s).value.mapK(spanToId) - opsP("SomeClass.some_val") should be(Name[Id]("SomeClass.some_val")) + opsP("some_val") should be( + VarToken[Id](Name[Id]("some_val")) + ) - opsP("some_val") should be(Name[Id]("some_val")) + opsP("SOME_CONST") should be( + VarToken[Id](Name[Id]("SOME_CONST")) + ) - opsP("SOME_CONST") should be(Name[Id]("SOME_CONST")) + opsP("SomeClass.some_val") should be( + PropertyToken[Id]( + VarToken[Id](Name[Id]("SomeClass")), + NonEmptyList.one(IntoField[Id]("some_val")) + ) + ) - opsP("SomeClass.SOME_CONST") should be(Name[Id]("SomeClass.SOME_CONST")) - } + opsP("SomeClass.Some_Other_Class") should be( + PropertyToken[Id]( + VarToken[Id](Name[Id]("SomeClass")), + NonEmptyList.one(IntoField[Id]("Some_Other_Class")) + ) + ) - "var lambda in VarToken" should "parse" in { - val opsP = (s: String) => ValueToken.varProperty.parseAll(s).value.mapK(spanToId) - - opsP("some_val") should be(VarToken[Id](Name[Id]("some_val"))) - - opsP("SomeClass.SOME_CONST") should be(VarToken[Id](Name[Id]("SomeClass.SOME_CONST"))) - } - - "var lambda in value" should "parse" in { - val opsP = (s: String) => ValueToken.atom.parseAll(s).value.mapK(spanToId) - opsP("some_val") should be(VarToken[Id](Name[Id]("some_val"))) - opsP("SomeClass.SOME_CONST") should be(VarToken[Id](Name[Id]("SomeClass.SOME_CONST"))) - } - - "var lambda in ability" should "parse" in { - val opsP = (s: String) => ValueToken.abProperty.parseAll(s).value.mapK(spanToId) - - opsP("SomeClass") should be(VarToken[Id](Name[Id]("SomeClass"))) + opsP("SomeClass.SOME_CONST") should be( + PropertyToken[Id]( + VarToken[Id](Name[Id]("SomeClass")), + NonEmptyList.one(IntoField[Id]("SOME_CONST")) + ) + ) opsP("SomeClass.call()") should be( - VarToken[Id](Name[Id]("SomeClass"), IntoArrow(Name[Id]("call"), Nil) :: Nil) + PropertyToken[Id]( + VarToken[Id](Name[Id]("SomeClass")), + NonEmptyList.one(IntoArrow[Id](Name[Id]("call"), Nil)) + ) ) } - "parse Class " should "parse" in { - val opsP = (s: String) => Name.cl.parseAll(s).value.mapK(spanToId) - - opsP("SomeClass") should be(Name[Id]("SomeClass")) - - opsP("SC") should be(Name[Id]("SC")) - } - } diff --git a/semantics/src/main/scala/aqua/semantics/Prog.scala b/semantics/src/main/scala/aqua/semantics/Prog.scala index d6340449..aadddaf4 100644 --- a/semantics/src/main/scala/aqua/semantics/Prog.scala +++ b/semantics/src/main/scala/aqua/semantics/Prog.scala @@ -30,7 +30,7 @@ sealed abstract class Prog[Alg[_]: Monad, A] extends (Alg[A] => Alg[A]) { (_: Unit, m: A) => Ab.endScope() as m ) ) - + def namesScope[S[_]](token: Token[S])(implicit N: NamesAlgebra[S, Alg]): Prog[Alg, A] = wrap( RunAround( @@ -70,6 +70,9 @@ object Prog { def after[Alg[_]: Monad, A](prog: A => Alg[A]): Prog[Alg, A] = RunAround(Monad[Alg].unit, (_: Unit, a: A) => prog(a)) + def after_[Alg[_]: Monad, A](prog: => Alg[A]): Prog[Alg, A] = + after(_ => prog) + def around[Alg[_]: Monad, R, A](before: Alg[R], after: (R, A) => Alg[A]): Prog[Alg, A] = RunAround(before, after) diff --git a/semantics/src/main/scala/aqua/semantics/expr/AbilitySem.scala b/semantics/src/main/scala/aqua/semantics/expr/AbilitySem.scala index 0de7df8b..d6cd25b2 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/AbilitySem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/AbilitySem.scala @@ -10,12 +10,13 @@ import aqua.semantics.rules.abilities.AbilitiesAlgebra import aqua.semantics.rules.definitions.DefinitionsAlgebra import aqua.semantics.rules.names.NamesAlgebra import aqua.semantics.rules.types.TypesAlgebra -import aqua.types.{ArrowType, AbilityType, Type} +import aqua.types.{AbilityType, ArrowType, Type} import cats.syntax.apply.* import cats.syntax.flatMap.* import cats.syntax.functor.* import cats.syntax.applicative.* import cats.syntax.semigroupal.* +import cats.syntax.traverse.* import cats.Monad import cats.data.{NonEmptyList, NonEmptyMap} @@ -25,21 +26,15 @@ class AbilitySem[S[_]](val expr: AbilityExpr[S]) extends AnyVal { T: TypesAlgebra[S, Alg], D: DefinitionsAlgebra[S, Alg] ): Prog[Alg, Raw] = { - Prog.after(_ => - D.purgeDefs(expr.name).flatMap { - case Some(fields) => - val t = AbilityType(expr.name.value, fields) - T.defineNamedType(expr.name, t).map { - case true => - TypeRaw( - expr.name.value, - t - ): Raw - case false => - Raw.error("Ability types unresolved") - } - case None => Raw.error("Ability types unresolved").pure[Alg] - } + Prog.after_( + for { + defs <- D.purgeDefs(expr.name) + abType = defs.map(fields => AbilityType(expr.name.value, fields)) + result <- abType.flatTraverse(t => + T.defineNamedType(expr.name, t) + .map(Option.when(_)(TypeRaw(expr.name.value, t))) + ) + } yield result.getOrElse(Raw.error("Ability types unresolved")) ) } } diff --git a/semantics/src/main/scala/aqua/semantics/expr/func/CallArrowSem.scala b/semantics/src/main/scala/aqua/semantics/expr/func/CallArrowSem.scala index 68da7ba3..ab825c6c 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/func/CallArrowSem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/func/CallArrowSem.scala @@ -1,6 +1,7 @@ package aqua.semantics.expr.func import aqua.parser.expr.func.CallArrowExpr +import aqua.parser.lexer.{CallArrowToken, IntoArrow, IntoField, PropertyToken, VarToken} import aqua.raw.Raw import aqua.raw.ops.{Call, CallArrowRawTag, FuncOp} import aqua.raw.value.CallArrowRaw @@ -13,6 +14,9 @@ import cats.Monad import cats.syntax.flatMap.* import cats.syntax.functor.* import cats.syntax.traverse.* +import cats.syntax.option.* +import cats.syntax.applicative.* +import cats.syntax.comonad.* class CallArrowSem[S[_]](val expr: CallArrowExpr[S]) extends AnyVal { @@ -31,12 +35,16 @@ class CallArrowSem[S[_]](val expr: CallArrowExpr[S]) extends AnyVal { } } - private def toModel[Alg[_]: Monad](implicit + private def toModel[Alg[_]: Monad](using N: NamesAlgebra[S, Alg], T: TypesAlgebra[S, Alg], V: ValuesAlgebra[S, Alg] ): Alg[Option[FuncOp]] = for { - callArrowRaw <- V.callArrowToRaw(callArrow) + callArrowRaw <- V.valueToRaw(callArrow).map { + // TODO: Refactor this to support other results + case Some(car: CallArrowRaw) => car.some + case _ => none + } maybeOp <- callArrowRaw.traverse(car => variables .drop(car.baseType.codomain.length) diff --git a/semantics/src/main/scala/aqua/semantics/rules/ValuesAlgebra.scala b/semantics/src/main/scala/aqua/semantics/rules/ValuesAlgebra.scala index b1237a60..c1848daa 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/ValuesAlgebra.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/ValuesAlgebra.scala @@ -16,6 +16,7 @@ import cats.syntax.apply.* import cats.syntax.flatMap.* import cats.syntax.functor.* import cats.syntax.traverse.* +import cats.syntax.foldable.* import cats.syntax.option.* import cats.instances.list.* import cats.data.{NonEmptyList, NonEmptyMap} @@ -52,75 +53,57 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](implicit case op: IntoField[S] => T.resolveField(rootType, op) case op: IntoArrow[S] => - op.arguments - .map(valueToRaw) - .sequence - .map(_.sequence) - .flatMap { - case None => None.pure[Alg] - case Some(arguments) => T.resolveArrow(rootType, op, arguments) - } - case op: IntoCopy[S] => - op.fields - .map(valueToRaw) - .sequence - .map(_.sequence) - .flatMap { - case None => None.pure[Alg] - case Some(values) => T.resolveCopy(rootType, op, values) - } - case op: IntoIndex[S] => - op.idx - .fold[Alg[Option[ValueRaw]]](Option(LiteralRaw.Zero).pure[Alg])( - valueToRaw + for { + maybeArgs <- op.arguments.traverse(valueToRaw) + arrowProp <- maybeArgs.sequence.flatTraverse( + T.resolveArrow(rootType, op, _) ) - .flatMap { - case None => None.pure[Alg] - case Some(values) => T.resolveIndex(rootType, op, values) - } + } yield arrowProp + case op: IntoCopy[S] => + for { + maybeFields <- op.fields.traverse(valueToRaw) + copyProp <- maybeFields.sequence.flatTraverse( + T.resolveCopy(rootType, op, _) + ) + } yield copyProp + case op: IntoIndex[S] => + for { + maybeIdx <- op.idx.fold(LiteralRaw.Zero.some.pure)(valueToRaw) + idxProp <- maybeIdx.flatTraverse( + T.resolveIndex(rootType, op, _) + ) + } yield idxProp } def valueToRaw(v: ValueToken[S]): Alg[Option[ValueRaw]] = v match { - case l: LiteralToken[S] => Some(LiteralRaw(l.value, l.ts)).pure[Alg] - case VarToken(name, ops) => + case l @ LiteralToken(value, t) => + LiteralRaw(l.value, t).some.pure[Alg] + + case VarToken(name) => N.read(name).flatMap { case Some(t) => - // Prepare property expression: take the last known type and the next op, add next op to accumulator - ops - .foldLeft[Alg[(Option[Type], Chain[PropertyRaw])]]( - (Some(t) -> Chain.empty).pure[Alg] - ) { case (acc, op) => - acc.flatMap { - // Some(rootType) means that the previous property op was resolved successfully - case (Some(rootType), prop) => - // Resolve a single property - resolveSingleProperty(rootType, op).map { - // Property op resolved, add it to accumulator and update the last known type - case Some(p) => (Some(p.`type`), prop :+ p) - // Property op is not resolved, it's an error, stop iterations - case None => (None, Chain.empty) - } - - // We have already errored, do nothing - case _ => (None, Chain.empty).pure[Alg] - } - - } - .map { - // Some(_) means no errors occured - case (Some(_), property) if property.length == ops.length => - Some(property.foldLeft[ValueRaw](VarRaw(name.value, t)) { case (v, p) => - ApplyPropertyRaw(v, p) - }) - - case _ => None - } - + VarRaw(name.value, t).some.pure[Alg] case None => None.pure[Alg] } + case prop @ PropertyToken(value, properties) => + prop.adjust.fold( + for { + valueRaw <- valueToRaw(value) + result <- valueRaw.flatTraverse(raw => + properties + .foldLeftM(raw) { case (prev, op) => + OptionT( + resolveSingleProperty(prev.`type`, op) + ).map(prop => ApplyPropertyRaw(prev, prop)) + } + .value + ) + } yield result + )(valueToRaw) + case dvt @ NamedValueToken(typeName, fields) => T.resolveType(typeName).flatMap { case Some(resolvedType) => @@ -153,26 +136,28 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](implicit } case ct @ CollectionToken(_, values) => - values.traverse(valueToRaw).map(_.flatten).map(NonEmptyList.fromList).map { - case Some(raws) if raws.size == values.size => - val element = raws.map(_.`type`).reduceLeft(_ `∩` _) - // In case we mix values of uncomparable types, intersection returns bottom, meaning "uninhabited type". - // But we want to get to TopType instead: this would mean that intersection is empty, and you cannot - // make any decision about the structure of type, but can push anything inside - val elementNotBottom = if (element == BottomType) TopType else element - Some( - CollectionRaw( - raws, - ct.mode match { - case CollectionToken.Mode.StreamMode => StreamType(elementNotBottom) - case CollectionToken.Mode.ArrayMode => ArrayType(elementNotBottom) - case CollectionToken.Mode.OptionMode => OptionType(elementNotBottom) - } - ) - ) - case _ if values.isEmpty => Some(ValueRaw.Nil) - case _ => None - } + for { + maybeValuesRaw <- values.traverse(valueToRaw).map(_.sequence) + raw = maybeValuesRaw.map(raws => + NonEmptyList + .fromList(raws) + .fold(ValueRaw.Nil) { nonEmpty => + val element = raws.map(_.`type`).reduceLeft(_ `∩` _) + // In case we mix values of uncomparable types, intersection returns bottom, meaning "uninhabited type". + // But we want to get to TopType instead: this would mean that intersection is empty, and you cannot + // make any decision about the structure of type, but can push anything inside + val elementNotBottom = if (element == BottomType) TopType else element + CollectionRaw( + nonEmpty, + ct.mode match { + case CollectionToken.Mode.StreamMode => StreamType(elementNotBottom) + case CollectionToken.Mode.ArrayMode => ArrayType(elementNotBottom) + case CollectionToken.Mode.OptionMode => OptionType(elementNotBottom) + } + ) + } + ) + } yield raw case ca: CallArrowToken[S] => callArrowToRaw(ca).map(_.widen[ValueRaw]) @@ -316,79 +301,76 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](implicit } } - // Generate CallArrowRaw for arrow in ability - // WARNING: arguments are resolved at the end of the function and added to CallArrowRaw - def callAbType( - ab: String, - abType: AbilityType, - ca: CallArrowToken[S] - ): Alg[Option[CallArrowRaw]] = - abType.arrows.get(ca.funcName.value) match { - case Some(arrowType) => - Option( - CallArrowRaw(None, AbilityType.fullName(ab, ca.funcName.value), Nil, arrowType, None) - ).pure[Alg] - case None => None.pure[Alg] - } + private def callArrowFromAbility( + ab: Name[S], + at: AbilityType, + funcName: Name[S] + ): Option[CallArrowRaw] = at.arrows + .get(funcName.value) + .map(arrowType => + CallArrowRaw.ability( + ab.value, + funcName.value, + arrowType + ) + ) - def callArrowToRaw(ca: CallArrowToken[S]): Alg[Option[CallArrowRaw]] = { + private def callArrowToRaw( + callArrow: CallArrowToken[S] + ): Alg[Option[CallArrowRaw]] = for { - raw <- ca.ability - .fold( - N.readArrow(ca.funcName) - .map( - _.map(bt => - CallArrowRaw( - ability = None, - name = ca.funcName.value, - arguments = Nil, - baseType = bt, - serviceId = None - ) - ) + raw <- callArrow.ability.fold( + for { + myabeArrowType <- N.readArrow(callArrow.funcName) + } yield myabeArrowType + .map(arrowType => + CallArrowRaw.func( + funcName = callArrow.funcName.value, + baseType = arrowType ) - )(ab => - // Check that we have variable as ability - N.read(ab.asName, false).flatMap { - case Some(at @ AbilityType(_, _)) => - callAbType(ab.value, at, ca) - case _ => - // Check that we have registered ability type. - // If it exists - this is ability type in file, if not - imported ability - T.getType(ab.value).flatMap { - case Some(abType: AbilityType) => - callAbType(ab.value, abType, ca) - case t => - (A.getArrow(ab, ca.funcName), A.getServiceId(ab)).mapN { - case (Some(at), Right(sid)) => - // Service call, actually - CallArrowRaw( - ability = Some(ab.value), - name = ca.funcName.value, - arguments = Nil, - baseType = at, - serviceId = Some(sid) - ).some - case (Some(at), Left(true)) => - // Ability function call, actually - CallArrowRaw( - ability = Some(ab.value), - name = ca.funcName.value, - arguments = Nil, - baseType = at, - serviceId = None - ).some - case _ => none - } - } - } - ) + ) + )(ab => + N.read(ab.asName, mustBeDefined = false).flatMap { + case Some(at: AbilityType) => + callArrowFromAbility(ab.asName, at, callArrow.funcName).pure + case _ => + T.getType(ab.value).flatMap { + case Some(at: AbilityType) => + callArrowFromAbility(ab.asName, at, callArrow.funcName).pure + case _ => + (A.getArrow(ab, callArrow.funcName), A.getServiceId(ab)).mapN { + case (Some(at), Right(sid)) => + CallArrowRaw + .service( + abilityName = ab.value, + serviceId = sid, + funcName = callArrow.funcName.value, + baseType = at + ) + .some + case (Some(at), Left(true)) => + CallArrowRaw + .ability( + abilityName = ab.value, + funcName = callArrow.funcName.value, + baseType = at + ) + .some + case _ => none + } + } + } + ) result <- raw.flatTraverse(r => val arr = r.baseType for { - argsCheck <- T.checkArgumentsNumber(ca.funcName, arr.domain.length, ca.args.length) + argsCheck <- T.checkArgumentsNumber( + callArrow.funcName, + arr.domain.length, + callArrow.args.length + ) args <- Option - .when(argsCheck)(ca.args zip arr.domain.toList) + .when(argsCheck)(callArrow.args zip arr.domain.toList) .traverse( _.flatTraverse { case (tkn, tp) => for { @@ -406,7 +388,6 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](implicit } yield result ) } yield result - } } diff --git a/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala index 4cbd2bc6..8dac3ede 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala @@ -244,12 +244,6 @@ class TypesInterpreter[S[_], X](implicit else report(token, s"Cannot compare '$left' with '$right''").as(false) } - private def extractToken(token: Token[S]) = - token match { - case VarToken(n, properties) => properties.lastOption.getOrElse(n) - case t => t - } - override def ensureTypeMatches( token: Token[S], expected: Type, @@ -271,11 +265,14 @@ class TypesInterpreter[S[_], X](implicit valueFields.toSortedMap.toList.traverse { (name, `type`) => typeFields.lookup(name) match { case Some(t) => - val nextToken = extractToken(token match { + val nextToken = token match { case NamedValueToken(_, fields) => fields.lookup(name).getOrElse(token) - case t => t - }) + // TODO: Is it needed? + case PropertyToken(_, properties) => + properties.last + case _ => token + } ensureTypeMatches(nextToken, `type`, t) case None => report( diff --git a/semantics/src/test/scala/aqua/semantics/SemanticsSpec.scala b/semantics/src/test/scala/aqua/semantics/SemanticsSpec.scala index 7f1dd4be..4dade43c 100644 --- a/semantics/src/test/scala/aqua/semantics/SemanticsSpec.scala +++ b/semantics/src/test/scala/aqua/semantics/SemanticsSpec.scala @@ -85,8 +85,15 @@ class SemanticsSpec extends AnyFlatSpec with Matchers with Inside { insideBody(script) { body => val arrowType = ArrowType(NilType, ConsType.cons(ScalarType.string, NilType)) - val serviceCall = - CallArrowRawTag.service(LiteralRaw.quote("srv1"), "fn1", emptyCall, "A", arrowType).leaf + val serviceCall = CallArrowRawTag + .service( + serviceId = LiteralRaw.quote("srv1"), + fnName = "fn1", + call = emptyCall, + name = "A", + arrowType = arrowType + ) + .leaf val expected = ParTag.wrap( diff --git a/semantics/src/test/scala/aqua/semantics/ValuesAlgebraSpec.scala b/semantics/src/test/scala/aqua/semantics/ValuesAlgebraSpec.scala index 381535db..8e2b3c49 100644 --- a/semantics/src/test/scala/aqua/semantics/ValuesAlgebraSpec.scala +++ b/semantics/src/test/scala/aqua/semantics/ValuesAlgebraSpec.scala @@ -57,8 +57,8 @@ class ValuesAlgebraSpec extends AnyFlatSpec with Matchers with Inside { def literal(value: String, `type`: LiteralType) = LiteralToken(Id(value), `type`) - def variable(name: String) = - VarToken(Name(Id(name)), Nil) + def variable(name: String): VarToken[Id] = + VarToken[Id](Name[Id](name)) def allPairs[A](list: List[A]): List[(A, A)] = for { a <- list