mirror of
https://github.com/fluencelabs/aqua.git
synced 2024-12-04 14:40:17 +00:00
LNG-96 object creation (#592)
This commit is contained in:
parent
a51c42192f
commit
63433f2c91
2
.github/workflows/e2e.yml
vendored
2
.github/workflows/e2e.yml
vendored
@ -38,7 +38,7 @@ jobs:
|
||||
|
||||
aqua-playground:
|
||||
needs: aqua
|
||||
uses: fluencelabs/aqua-playground/.github/workflows/tests.yml@update-e2e
|
||||
uses: fluencelabs/aqua-playground/.github/workflows/tests.yml@master
|
||||
with:
|
||||
aqua-version: "${{ needs.aqua.outputs.aqua-version }}"
|
||||
|
||||
|
@ -17,7 +17,7 @@ val scribeV = "3.7.1"
|
||||
name := "aqua-hll"
|
||||
|
||||
val commons = Seq(
|
||||
baseAquaVersion := "0.7.7",
|
||||
baseAquaVersion := "0.8.0",
|
||||
version := baseAquaVersion.value + "-" + sys.env.getOrElse("BUILD_NUMBER", "SNAPSHOT"),
|
||||
scalaVersion := dottyVersion,
|
||||
libraryDependencies ++= Seq(
|
||||
|
@ -112,7 +112,9 @@ object JsonEncoder {
|
||||
.sequence
|
||||
.map { fields =>
|
||||
// HACK: JSON can have empty object and it is possible if there is only optional fields
|
||||
val fs = if (fields.isEmpty) List(("some_random_field_that_does_not_even_exists", BottomType)) else fields
|
||||
val fs =
|
||||
if (fields.isEmpty) List(("some_random_field_that_does_not_even_exists", BottomType))
|
||||
else fields
|
||||
StructType("", NonEmptyMap.fromMap(SortedMap(fs: _*)).get)
|
||||
}
|
||||
|
||||
|
@ -96,9 +96,9 @@ object DistOpts extends Logging {
|
||||
}
|
||||
val addBlueprintType = StructType(
|
||||
"AddBlueprint",
|
||||
NonEmptyMap(
|
||||
NonEmptyMap.of(
|
||||
("name", ScalarType.string),
|
||||
SortedMap(("dependencies", ArrayType(ScalarType.string)))
|
||||
("dependencies", ArrayType(ScalarType.string))
|
||||
)
|
||||
)
|
||||
val addBlueprintRequestVar =
|
||||
|
@ -0,0 +1,63 @@
|
||||
package aqua.model.inline
|
||||
|
||||
import aqua.model.{
|
||||
CallModel,
|
||||
CallServiceModel,
|
||||
LiteralModel,
|
||||
OpModel,
|
||||
SeqModel,
|
||||
ValueModel,
|
||||
VarModel
|
||||
}
|
||||
import aqua.model.inline.raw.RawInliner
|
||||
import cats.data.Chain
|
||||
import aqua.model.inline.state.{Arrows, Exports, Mangler}
|
||||
import aqua.raw.value.{MakeStructRaw, LiteralRaw}
|
||||
import cats.data.{NonEmptyMap, State}
|
||||
import aqua.model.inline.Inline
|
||||
import aqua.model.inline.RawValueInliner.{unfold, valueToModel}
|
||||
import aqua.types.ScalarType
|
||||
import cats.syntax.traverse.*
|
||||
import cats.syntax.monoid.*
|
||||
import cats.syntax.functor.*
|
||||
import cats.syntax.flatMap.*
|
||||
import cats.syntax.apply.*
|
||||
|
||||
object MakeStructRawInliner extends RawInliner[MakeStructRaw] {
|
||||
|
||||
private def createObj(fields: NonEmptyMap[String, ValueModel], result: VarModel): OpModel.Tree = {
|
||||
val args = fields.toSortedMap.toList.flatMap { case (name, value) =>
|
||||
LiteralModel.fromRaw(LiteralRaw.quote(name)) :: value :: Nil
|
||||
}
|
||||
CallServiceModel(
|
||||
LiteralModel("\"json\"", ScalarType.string),
|
||||
"obj",
|
||||
CallModel(
|
||||
args,
|
||||
CallModel.Export(result.name, result.`type`) :: Nil
|
||||
)
|
||||
).leaf
|
||||
}
|
||||
|
||||
override def apply[S: Mangler: Exports: Arrows](
|
||||
raw: MakeStructRaw,
|
||||
propertiesAllowed: Boolean
|
||||
): State[S, (ValueModel, Inline)] = {
|
||||
for {
|
||||
name <- Mangler[S].findAndForbidName(raw.structType.name + "_obj")
|
||||
foldedFields <- raw.fields.nonEmptyTraverse(unfold(_))
|
||||
} yield {
|
||||
val varModel = VarModel(name, raw.baseType)
|
||||
val valsInline = foldedFields.toSortedMap.values.map(_._2).fold(Inline.empty)(_ |+| _)
|
||||
val fields = foldedFields.map(_._1)
|
||||
val objCreation = createObj(fields, varModel)
|
||||
(
|
||||
varModel,
|
||||
Inline(
|
||||
valsInline.flattenValues,
|
||||
Chain.one(SeqModel.wrap((valsInline.predo :+ objCreation).toList: _*))
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
@ -42,6 +42,9 @@ object RawValueInliner extends Logging {
|
||||
case cr: CollectionRaw =>
|
||||
CollectionRawInliner(cr, propertiesAllowed)
|
||||
|
||||
case dr: MakeStructRaw =>
|
||||
MakeStructRawInliner(dr, propertiesAllowed)
|
||||
|
||||
case cr: CallArrowRaw =>
|
||||
CallArrowRawInliner(cr, propertiesAllowed)
|
||||
|
||||
|
@ -42,12 +42,11 @@ class ArrowInlinerSpec extends AnyFlatSpec with Matchers {
|
||||
}
|
||||
|
||||
/*
|
||||
func stream-callback(cb: []string -> ()):
|
||||
records: *string
|
||||
cb(records)
|
||||
func stream-callback(cb: []string -> ()):
|
||||
records: *string
|
||||
cb(records)
|
||||
*/
|
||||
"arrow inliner" should "pass stream to callback properly" in {
|
||||
|
||||
val streamType = StreamType(ScalarType.string)
|
||||
val streamVar = VarRaw("records", streamType)
|
||||
val streamModel = VarModel("records", StreamType(ScalarType.string))
|
||||
@ -125,14 +124,14 @@ class ArrowInlinerSpec extends AnyFlatSpec with Matchers {
|
||||
)
|
||||
) should be(true)
|
||||
|
||||
} /*
|
||||
func stream-callback(cb: string -> ()):
|
||||
records: *string
|
||||
cb(records!)
|
||||
*/
|
||||
}
|
||||
|
||||
// TODO: unignore and fix after stream restrictions will be implemented
|
||||
ignore /*"arrow inliner"*/ should "pass stream to callback properly, holding property" in {
|
||||
/*
|
||||
func stream-callback(cb: string -> ()):
|
||||
records: *string
|
||||
cb(records!)
|
||||
*/
|
||||
ignore /*"arrow inliner"*/ should "pass stream with gate to callback properly" in {
|
||||
val streamType = StreamType(ScalarType.string)
|
||||
val streamVar = VarRaw("records", streamType)
|
||||
val streamVarLambda =
|
||||
@ -221,17 +220,17 @@ class ArrowInlinerSpec extends AnyFlatSpec with Matchers {
|
||||
}
|
||||
|
||||
/*
|
||||
service TestService("test-service"):
|
||||
get_records() -> []string
|
||||
service TestService("test-service"):
|
||||
get_records() -> []string
|
||||
|
||||
func inner(inner-records: *[]string):
|
||||
inner-records <- TestService.get_records()
|
||||
func inner(inner-records: *[]string):
|
||||
inner-records <- TestService.get_records()
|
||||
|
||||
func retrieve_records() -> [][]string:
|
||||
records: *[]string
|
||||
-- 'inner-records' argument in `inner` should be renamed as `records` in resulted AIR
|
||||
append_records(records)
|
||||
<- records
|
||||
func retrieve_records() -> [][]string:
|
||||
records: *[]string
|
||||
-- 'inner-records' argument in `inner` should be renamed as `records` in resulted AIR
|
||||
append_records(records)
|
||||
<- records
|
||||
*/
|
||||
"arrow inliner" should "work with streams as arguments" in {
|
||||
|
||||
@ -412,7 +411,6 @@ class ArrowInlinerSpec extends AnyFlatSpec with Matchers {
|
||||
CallModel(ValueModel.fromRaw(flattenObject) :: Nil, Nil)
|
||||
).leaf
|
||||
)
|
||||
|
||||
)
|
||||
) should be(true)
|
||||
|
||||
@ -558,7 +556,8 @@ class ArrowInlinerSpec extends AnyFlatSpec with Matchers {
|
||||
.leaf
|
||||
)
|
||||
|
||||
val foldOp = ForTag(iVar.name, array, Some(ForTag.WaitMode)).wrap(inFold, NextTag(iVar.name).leaf)
|
||||
val foldOp =
|
||||
ForTag(iVar.name, array, Some(ForTag.WaitMode)).wrap(inFold, NextTag(iVar.name).leaf)
|
||||
|
||||
val model: OpModel.Tree = ArrowInliner
|
||||
.callArrow[InliningState](
|
||||
|
@ -375,8 +375,8 @@ class RawValueInlinerSpec extends AnyFlatSpec with Matchers {
|
||||
.value
|
||||
._2
|
||||
|
||||
// println(resVal)
|
||||
// println(resTree)
|
||||
// println(resVal)
|
||||
// println(resTree)
|
||||
}
|
||||
|
||||
"raw value inliner" should "desugarize a recursive lambda value" in {
|
||||
|
@ -55,7 +55,9 @@ case class ApplyPropertyRaw(value: ValueRaw, property: PropertyRaw) extends Valu
|
||||
override def renameVars(map: Map[String, String]): ValueRaw =
|
||||
ApplyPropertyRaw(value.renameVars(map), property.renameVars(map))
|
||||
|
||||
override def map(f: ValueRaw => ValueRaw): ValueRaw = f(ApplyPropertyRaw(f(value), property.map(f)))
|
||||
override def map(f: ValueRaw => ValueRaw): ValueRaw = f(
|
||||
ApplyPropertyRaw(f(value), property.map(f))
|
||||
)
|
||||
|
||||
override def toString: String = s"$value.$property"
|
||||
|
||||
@ -185,6 +187,20 @@ case class CollectionRaw(values: NonEmptyList[ValueRaw], boxType: BoxType) exten
|
||||
copy(values = values.map(_.renameVars(map)))
|
||||
}
|
||||
|
||||
case class MakeStructRaw(fields: NonEmptyMap[String, ValueRaw], structType: StructType) extends ValueRaw {
|
||||
|
||||
override def baseType: Type = structType
|
||||
|
||||
override def map(f: ValueRaw => ValueRaw): ValueRaw = f(copy(fields = fields.map(f)))
|
||||
|
||||
override def varNames: Set[String] = {
|
||||
fields.toSortedMap.values.flatMap(_.varNames).toSet
|
||||
}
|
||||
|
||||
override def renameVars(map: Map[String, String]): ValueRaw =
|
||||
copy(fields = fields.map(_.renameVars(map)))
|
||||
}
|
||||
|
||||
case class CallArrowRaw(
|
||||
// TODO: ability should hold a type, not name
|
||||
ability: Option[String],
|
||||
|
@ -42,7 +42,7 @@ object ModuleExpr extends HeaderExpr.Leaf {
|
||||
nameOrAbList.map(Left(_)) | `star`.lift.map(Token.lift(_)).map(Right(_))
|
||||
|
||||
override val p: Parser[ModuleExpr[Span.S]] =
|
||||
(`module` *> ` ` *> Ability.dotted ~
|
||||
((`module` | `aqua-word`) *> ` ` *> Ability.dotted ~
|
||||
(` declares ` *> nameOrAbListOrAll).?).map {
|
||||
case (name, None) =>
|
||||
ModuleExpr(name, None, Nil, Nil)
|
||||
|
@ -19,6 +19,8 @@ case class Name[F[_]: Comonad](name: F[String]) extends Token[F] {
|
||||
def rename(newName: String): Name[F] = copy(name.as(newName))
|
||||
|
||||
def value: String = name.extract
|
||||
|
||||
override def toString() = value
|
||||
}
|
||||
|
||||
object Name {
|
||||
|
@ -25,14 +25,15 @@ case class IntoField[F[_]: Comonad](name: F[String]) extends PropertyOp[F] {
|
||||
override def mapK[K[_]: Comonad](fk: F ~> K): PropertyOp[K] = copy(fk(name))
|
||||
|
||||
def value: String = name.extract
|
||||
|
||||
override def toString: String = name.extract
|
||||
}
|
||||
|
||||
case class IntoIndex[F[_]: Comonad](token: Token[F], idx: Option[ValueToken[F]])
|
||||
case class IntoIndex[F[_]: Comonad](point: F[Unit], idx: Option[ValueToken[F]])
|
||||
extends PropertyOp[F] {
|
||||
override def as[T](v: T): F[T] = token.as(v)
|
||||
override def as[T](v: T): F[T] = point.as(v)
|
||||
|
||||
override def mapK[K[_]: Comonad](fk: F ~> K): IntoIndex[K] =
|
||||
copy(token.mapK(fk), idx.map(_.mapK(fk)))
|
||||
override def mapK[K[_]: Comonad](fk: F ~> K): IntoIndex[K] = copy(fk(point), idx.map(_.mapK(fk)))
|
||||
}
|
||||
|
||||
object PropertyOp {
|
||||
@ -42,10 +43,10 @@ object PropertyOp {
|
||||
|
||||
private val parseIdx: P[PropertyOp[Span.S]] =
|
||||
(P.defer(
|
||||
(ValueToken.`value`.between(`[`, `]`) | (exclamation *> ValueToken.num))
|
||||
.map(v => IntoIndex(v, Some(v)))
|
||||
(ValueToken.`value`.between(`[`, `]`).lift | (exclamation *> ValueToken.num).lift)
|
||||
.map(v => IntoIndex(v.map(_.unit), Some(v._2)))
|
||||
.backtrack
|
||||
) | exclamation.lift.map(e => IntoIndex(Token.lift[Span.S, Unit](e), None))).flatMap { ii =>
|
||||
) | exclamation.lift.map(e => IntoIndex(e, None))).flatMap { ii =>
|
||||
ii.idx match {
|
||||
case Some(LiteralToken(_, lt)) if lt == LiteralType.signed =>
|
||||
P.fail.withContext("Collection indexes must be non-negative")
|
||||
|
@ -3,6 +3,7 @@ package aqua.parser.lexer
|
||||
import cats.data.NonEmptyList
|
||||
import cats.parse.{Accumulator0, Parser as P, Parser0 as P0}
|
||||
import cats.{~>, Comonad, Functor}
|
||||
import cats.syntax.functor.*
|
||||
|
||||
trait Token[F[_]] {
|
||||
def as[T](v: T): F[T]
|
||||
@ -34,6 +35,7 @@ object Token {
|
||||
val `data`: P[Unit] = P.string("data")
|
||||
val `import`: P[Unit] = P.string("import")
|
||||
val `module`: P[Unit] = P.string("module")
|
||||
val `aqua-word`: P[Unit] = P.string("aqua")
|
||||
val `declares`: P[Unit] = P.string("declares")
|
||||
val ` declares ` : P[Unit] = `declares`.surroundedBy(` `)
|
||||
val `declare`: P[Unit] = P.string("declare")
|
||||
|
@ -69,6 +69,8 @@ case class CustomTypeToken[F[_]: Comonad](name: F[String]) extends DataTypeToken
|
||||
override def mapK[K[_]: Comonad](fk: F ~> K): CustomTypeToken[K] = copy(fk(name))
|
||||
|
||||
def value: String = name.extract
|
||||
|
||||
override def toString: String = name.extract
|
||||
}
|
||||
|
||||
object CustomTypeToken {
|
||||
|
@ -11,7 +11,7 @@ import cats.parse.{Numbers, Parser as P, Parser0 as P0}
|
||||
import cats.syntax.comonad.*
|
||||
import cats.syntax.functor.*
|
||||
import cats.{~>, Comonad, Functor}
|
||||
import cats.data.NonEmptyList
|
||||
import cats.data.{NonEmptyList, NonEmptyMap}
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan, S}
|
||||
|
||||
@ -19,7 +19,8 @@ sealed trait ValueToken[F[_]] extends Token[F] {
|
||||
def mapK[K[_]: Comonad](fk: F ~> K): ValueToken[K]
|
||||
}
|
||||
|
||||
case class VarToken[F[_]](name: Name[F], property: List[PropertyOp[F]] = Nil) extends ValueToken[F] {
|
||||
case class VarToken[F[_]](name: Name[F], property: List[PropertyOp[F]] = Nil)
|
||||
extends ValueToken[F] {
|
||||
override def as[T](v: T): F[T] = name.as(v)
|
||||
|
||||
def mapK[K[_]: Comonad](fk: F ~> K): VarToken[K] = copy(name.mapK(fk), property.map(_.mapK(fk)))
|
||||
@ -47,6 +48,8 @@ case class CollectionToken[F[_]: Comonad](
|
||||
override def as[T](v: T): F[T] = point.as(v)
|
||||
|
||||
def mode: CollectionToken.Mode = point.extract
|
||||
|
||||
override def toString: String = s"CollectionToken(${point.extract}, $values)"
|
||||
}
|
||||
|
||||
object CollectionToken {
|
||||
@ -91,6 +94,33 @@ object CallArrowToken {
|
||||
}
|
||||
}
|
||||
|
||||
case class StructValueToken[F[_]: Comonad](
|
||||
typeName: CustomTypeToken[F],
|
||||
fields: NonEmptyMap[String, ValueToken[F]]
|
||||
) extends ValueToken[F] {
|
||||
|
||||
override def mapK[K[_]: Comonad](fk: F ~> K): StructValueToken[K] =
|
||||
copy(typeName.mapK(fk), fields.map(_.mapK(fk)))
|
||||
|
||||
override def as[T](v: T): F[T] = typeName.as(v)
|
||||
}
|
||||
|
||||
object StructValueToken {
|
||||
|
||||
val dataValue: P[StructValueToken[Span.S]] =
|
||||
(`Class`.lift
|
||||
~ comma(
|
||||
((`name` <* (` `.?.with1 *> `=` *> ` `.?)).with1 ~ ValueToken.`value`).surroundedBy(`/s*`)
|
||||
)
|
||||
.between(` `.?.with1 *> `(` <* `/s*`, `/s*` *> `)`))
|
||||
.withContext(
|
||||
"Missing braces '()' after the struct type"
|
||||
)
|
||||
.map { case (dn, args) =>
|
||||
StructValueToken(CustomTypeToken(dn), NonEmptyMap.of(args.head, args.tail: _*))
|
||||
}
|
||||
}
|
||||
|
||||
// Two values as operands, with an infix between them
|
||||
case class InfixToken[F[_]: Comonad](
|
||||
left: ValueToken[F],
|
||||
@ -166,6 +196,7 @@ object InfixToken {
|
||||
P.defer(
|
||||
CollectionToken.collection
|
||||
) ::
|
||||
P.defer(StructValueToken.dataValue).backtrack ::
|
||||
P.defer(CallArrowToken.callArrow).backtrack ::
|
||||
P.defer(brackets(InfixToken.mathExpr)) ::
|
||||
varProperty ::
|
||||
|
@ -2,26 +2,15 @@ package aqua
|
||||
|
||||
import aqua.AquaSpec.spanToId
|
||||
import aqua.parser.expr.*
|
||||
import aqua.parser.expr.func.{
|
||||
AbilityIdExpr,
|
||||
ArrowExpr,
|
||||
AssignmentExpr,
|
||||
CallArrowExpr,
|
||||
ClosureExpr,
|
||||
ElseOtherwiseExpr,
|
||||
ForExpr,
|
||||
IfExpr,
|
||||
OnExpr,
|
||||
PushToStreamExpr,
|
||||
ReturnExpr
|
||||
}
|
||||
import aqua.parser.expr.func.{AbilityIdExpr, ArrowExpr, AssignmentExpr, CallArrowExpr, ClosureExpr, ElseOtherwiseExpr, ForExpr, IfExpr, OnExpr, PushToStreamExpr, ReturnExpr}
|
||||
import aqua.parser.head.FromExpr.NameOrAbAs
|
||||
import aqua.parser.head.{FromExpr, UseFromExpr}
|
||||
import aqua.parser.lexer.*
|
||||
import aqua.parser.lexer.Token.LiftToken
|
||||
import aqua.parser.lift.LiftParser.Implicits.idLiftParser
|
||||
import aqua.types.LiteralType.{bool, number, string}
|
||||
import aqua.types.{LiteralType, ScalarType}
|
||||
import cats.{~>, Id}
|
||||
import cats.{Id, ~>}
|
||||
import org.scalatest.EitherValues
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||
@ -57,7 +46,7 @@ object AquaSpec {
|
||||
VarToken[Id](toName(name), toFields(fields))
|
||||
|
||||
implicit def toVarIndex(name: String, idx: Int): VarToken[Id] =
|
||||
VarToken[Id](toName(name), IntoIndex[Id](toNumber(idx), Some(toNumber(idx))) :: Nil)
|
||||
VarToken[Id](toName(name), IntoIndex[Id](toNumber(idx).unit, Some(toNumber(idx))) :: Nil)
|
||||
implicit def toLiteral(name: String, t: LiteralType): LiteralToken[Id] = LiteralToken[Id](name, t)
|
||||
implicit def toNumber(n: Int): LiteralToken[Id] = LiteralToken[Id](n.toString, number)
|
||||
implicit def toBool(n: Boolean): LiteralToken[Id] = LiteralToken[Id](n.toString, bool)
|
||||
@ -121,6 +110,9 @@ trait AquaSpec extends EitherValues {
|
||||
def parseAssign(str: String): AssignmentExpr[Id] =
|
||||
AssignmentExpr.p.parseAll(str).value.mapK(spanToId)
|
||||
|
||||
def parseData(str: String): StructValueToken[Id] =
|
||||
StructValueToken.dataValue.parseAll(str).value.mapK(spanToId)
|
||||
|
||||
def parsePush(str: String): PushToStreamExpr[Id] =
|
||||
PushToStreamExpr.p.parseAll(str).value.mapK(spanToId)
|
||||
|
||||
|
60
parser/src/test/scala/aqua/parser/DataValueExprSpec.scala
Normal file
60
parser/src/test/scala/aqua/parser/DataValueExprSpec.scala
Normal file
@ -0,0 +1,60 @@
|
||||
package aqua.parser
|
||||
|
||||
import aqua.AquaSpec
|
||||
import aqua.AquaSpec.{toNumber, toStr, toVar}
|
||||
import aqua.parser.expr.ConstantExpr
|
||||
import aqua.parser.expr.func.AssignmentExpr
|
||||
import aqua.parser.lexer.CollectionToken.Mode.ArrayMode
|
||||
import aqua.parser.lexer.{
|
||||
Ability,
|
||||
CallArrowToken,
|
||||
CollectionToken,
|
||||
CustomTypeToken,
|
||||
StructValueToken,
|
||||
LiteralToken,
|
||||
Name,
|
||||
VarToken
|
||||
}
|
||||
import aqua.types.LiteralType
|
||||
import cats.Id
|
||||
import org.scalatest.flatspec.AnyFlatSpec
|
||||
import org.scalatest.matchers.should.Matchers
|
||||
import cats.data.NonEmptyMap
|
||||
|
||||
class DataValueExprSpec extends AnyFlatSpec with Matchers with AquaSpec {
|
||||
import AquaSpec._
|
||||
|
||||
"data value" should "be parsed" in {
|
||||
val one = LiteralToken[Id]("1", LiteralType.number)
|
||||
val two = LiteralToken[Id]("2", LiteralType.number)
|
||||
val three = LiteralToken[Id]("3", LiteralType.number)
|
||||
val a = LiteralToken[Id]("\"a\"", LiteralType.string)
|
||||
val b = LiteralToken[Id]("\"b\"", LiteralType.string)
|
||||
val c = LiteralToken[Id]("\"c\"", LiteralType.string)
|
||||
|
||||
parseData(
|
||||
"""Obj(f1 = 1, f2 = "a", f3 = [1,2,3], f4=["b", "c"], f5 =NestedObj(i1 = 2, i2 = "b", i3= funcCall(3), i4 = value), f6=funcCall(1), f7 = Serv.call(2))"""
|
||||
) should be(
|
||||
StructValueToken(
|
||||
CustomTypeToken[Id]("Obj"),
|
||||
NonEmptyMap.of(
|
||||
"f1" -> one,
|
||||
"f2" -> a,
|
||||
"f3" -> CollectionToken[Id](ArrayMode, List(one, two, three)),
|
||||
"f4" -> CollectionToken[Id](ArrayMode, List(b, c)),
|
||||
"f5" -> StructValueToken(
|
||||
CustomTypeToken[Id]("NestedObj"),
|
||||
NonEmptyMap.of(
|
||||
"i1" -> two,
|
||||
"i2" -> b,
|
||||
"i3" -> CallArrowToken(None, Name[Id]("funcCall"), List(three)),
|
||||
"i4" -> VarToken[Id](Name[Id]("value"), Nil)
|
||||
)
|
||||
),
|
||||
"f6" -> CallArrowToken(None, Name[Id]("funcCall"), List(one)),
|
||||
"f7" -> CallArrowToken(Option(Ability[Id]("Serv")), Name[Id]("call"), List(two))
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
@ -15,7 +15,9 @@ import cats.syntax.flatMap.*
|
||||
import cats.syntax.functor.*
|
||||
import cats.syntax.traverse.*
|
||||
import cats.instances.list.*
|
||||
import cats.data.NonEmptyList
|
||||
import cats.data.{NonEmptyList, NonEmptyMap}
|
||||
|
||||
import scala.collection.immutable.SortedMap
|
||||
|
||||
class ValuesAlgebra[S[_], Alg[_]: Monad](implicit
|
||||
N: NamesAlgebra[S, Alg],
|
||||
@ -63,22 +65,23 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](implicit
|
||||
case Some(t) =>
|
||||
// Prepare property expression: take the last known type and the next op, add next op to accumulator
|
||||
ops
|
||||
.foldLeft[Alg[(Option[Type], Chain[PropertyRaw])]]((Some(t) -> Chain.empty).pure[Alg]) {
|
||||
case (acc, op) =>
|
||||
acc.flatMap {
|
||||
// Some(tt) means that the previous property op was resolved successfully
|
||||
case (Some(tt), prop) =>
|
||||
// Resolve a single property
|
||||
resolveSingleProperty(tt, op).map {
|
||||
// Property op resolved, add it to accumulator and update the last known type
|
||||
case Some(p) => (Some(p.`type`), prop :+ p)
|
||||
// Property op is not resolved, it's an error, stop iterations
|
||||
case None => (None, Chain.empty)
|
||||
}
|
||||
.foldLeft[Alg[(Option[Type], Chain[PropertyRaw])]](
|
||||
(Some(t) -> Chain.empty).pure[Alg]
|
||||
) { case (acc, op) =>
|
||||
acc.flatMap {
|
||||
// Some(tt) means that the previous property op was resolved successfully
|
||||
case (Some(tt), prop) =>
|
||||
// Resolve a single property
|
||||
resolveSingleProperty(tt, op).map {
|
||||
// Property op resolved, add it to accumulator and update the last known type
|
||||
case Some(p) => (Some(p.`type`), prop :+ p)
|
||||
// Property op is not resolved, it's an error, stop iterations
|
||||
case None => (None, Chain.empty)
|
||||
}
|
||||
|
||||
// We have already errored, do nothing
|
||||
case _ => (None, Chain.empty).pure[Alg]
|
||||
}
|
||||
// We have already errored, do nothing
|
||||
case _ => (None, Chain.empty).pure[Alg]
|
||||
}
|
||||
|
||||
}
|
||||
.map {
|
||||
@ -94,6 +97,31 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](implicit
|
||||
case None =>
|
||||
None.pure[Alg]
|
||||
}
|
||||
|
||||
case dvt @ StructValueToken(typeName, fields) =>
|
||||
T.resolveType(typeName).flatMap {
|
||||
case Some(struct @ StructType(_, _)) =>
|
||||
for {
|
||||
fieldsRawOp: NonEmptyMap[String, Option[ValueRaw]] <- fields.traverse(valueToRaw)
|
||||
fieldsRaw: List[(String, ValueRaw)] = fieldsRawOp.toSortedMap.toList.collect {
|
||||
case (n, Some(vr)) => n -> vr
|
||||
}
|
||||
rawFields = NonEmptyMap.fromMap(SortedMap.from(fieldsRaw))
|
||||
typeFromFieldsWithData = rawFields
|
||||
.map(rf =>
|
||||
(
|
||||
StructType(typeName.value, rf.map(_.`type`)),
|
||||
Some(MakeStructRaw(rf, struct))
|
||||
)
|
||||
)
|
||||
.getOrElse(BottomType -> None)
|
||||
(typeFromFields, data) = typeFromFieldsWithData
|
||||
isTypesCompatible <- T.ensureTypeMatches(dvt, struct, typeFromFields)
|
||||
} yield data.filter(_ => isTypesCompatible)
|
||||
case _ =>
|
||||
None.pure[Alg]
|
||||
}
|
||||
|
||||
case ct @ CollectionToken(_, values) =>
|
||||
values.traverse(valueToRaw).map(_.toList.flatten).map(NonEmptyList.fromList).map {
|
||||
case Some(raws) if raws.size == values.size =>
|
||||
|
@ -2,7 +2,7 @@ package aqua.semantics.rules.types
|
||||
|
||||
import aqua.parser.lexer.*
|
||||
import aqua.raw.value.{PropertyRaw, ValueRaw}
|
||||
import aqua.types.{ArrowType, Type}
|
||||
import aqua.types.{ArrowType, StructType, Type}
|
||||
import cats.data.NonEmptyMap
|
||||
import cats.data.NonEmptyList
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
package aqua.semantics.rules.types
|
||||
|
||||
import aqua.parser.lexer.*
|
||||
import aqua.raw.value.{FunctorRaw, IntoIndexRaw, IntoFieldRaw, PropertyRaw, ValueRaw}
|
||||
import aqua.raw.value.{FunctorRaw, IntoFieldRaw, IntoIndexRaw, PropertyRaw, ValueRaw}
|
||||
import aqua.semantics.lsp.{TokenDef, TokenTypeInfo}
|
||||
import aqua.semantics.rules.{ReportError, StackInterpreter}
|
||||
import aqua.types.{
|
||||
@ -146,7 +146,8 @@ class TypesInterpreter[S[_], X](implicit lens: Lens[X, TypesState[S]], error: Re
|
||||
}.as(Some(IntoFieldRaw(op.value, t)))
|
||||
}
|
||||
case t =>
|
||||
t.properties.get(op.value)
|
||||
t.properties
|
||||
.get(op.value)
|
||||
.fold(
|
||||
report(
|
||||
op,
|
||||
@ -195,6 +196,12 @@ class TypesInterpreter[S[_], X](implicit lens: Lens[X, TypesState[S]], error: Re
|
||||
.as(false)
|
||||
}
|
||||
|
||||
private def extractToken(token: Token[S]) =
|
||||
token match {
|
||||
case VarToken(n, properties) => properties.lastOption.getOrElse(n)
|
||||
case t => t
|
||||
}
|
||||
|
||||
override def ensureTypeMatches(
|
||||
token: Token[S],
|
||||
expected: Type,
|
||||
@ -202,19 +209,47 @@ class TypesInterpreter[S[_], X](implicit lens: Lens[X, TypesState[S]], error: Re
|
||||
): State[X, Boolean] =
|
||||
if (expected.acceptsValueOf(givenType)) State.pure(true)
|
||||
else {
|
||||
val notes =
|
||||
if (expected.acceptsValueOf(OptionType(givenType)))
|
||||
"note: Try converting value to optional" :: Nil
|
||||
else if (givenType.acceptsValueOf(OptionType(expected)))
|
||||
"note: You're providing an optional value where normal value is expected." ::
|
||||
"You can extract value with `!`, but be aware it may trigger join behaviour." ::
|
||||
Nil
|
||||
else Nil
|
||||
reportError(
|
||||
token,
|
||||
"Types mismatch." :: s"expected: $expected" :: s"given: $givenType" :: Nil ++ notes
|
||||
)
|
||||
.as(false)
|
||||
(expected, givenType) match {
|
||||
case (StructType(n, valueFields), StructType(typeName, typeFields)) =>
|
||||
// value can have more fields
|
||||
if (valueFields.length < typeFields.length) {
|
||||
report(
|
||||
token,
|
||||
s"Number of fields doesn't match the data type, expected: $expected, given: $givenType"
|
||||
).as(false)
|
||||
} else {
|
||||
valueFields.toSortedMap.toList.traverse { (name, `type`) =>
|
||||
typeFields.lookup(name) match {
|
||||
case Some(t) =>
|
||||
val nextToken = extractToken(token match {
|
||||
case StructValueToken(_, fields) =>
|
||||
fields.lookup(name).getOrElse(token)
|
||||
case t => t
|
||||
})
|
||||
ensureTypeMatches(nextToken, `type`, t)
|
||||
case None =>
|
||||
report(
|
||||
token,
|
||||
s"Wrong value type, expected: $expected, given: $givenType"
|
||||
).as(false)
|
||||
}
|
||||
}.map(_.toList.fold(true)(_ && _))
|
||||
}
|
||||
case _ =>
|
||||
val notes =
|
||||
if (expected.acceptsValueOf(OptionType(givenType)))
|
||||
"note: Try converting value to optional" :: Nil
|
||||
else if (givenType.acceptsValueOf(OptionType(expected)))
|
||||
"note: You're providing an optional value where normal value is expected." ::
|
||||
"You can extract value with `!`, but be aware it may trigger join behaviour." ::
|
||||
Nil
|
||||
else Nil
|
||||
reportError(
|
||||
token,
|
||||
"Types mismatch." :: s"expected: $expected" :: s"given: $givenType" :: Nil ++ notes
|
||||
)
|
||||
.as(false)
|
||||
}
|
||||
}
|
||||
|
||||
override def expectNoExport(token: Token[S]): State[X, Unit] =
|
||||
@ -287,15 +322,15 @@ class TypesInterpreter[S[_], X](implicit lens: Lens[X, TypesState[S]], error: Re
|
||||
false
|
||||
)
|
||||
)
|
||||
else if (frame.token.res.drop(values.length).nonEmpty)
|
||||
else if (frame.token.res.length > values.length)
|
||||
Left(
|
||||
(
|
||||
values.last._1,
|
||||
s"Expected ${frame.token.res.drop(values.length).length} more values to be returned, see return type declaration",
|
||||
s"Expected ${frame.token.res.length - values.length} more values to be returned, see return type declaration",
|
||||
false
|
||||
)
|
||||
)
|
||||
else if (values.toList.drop(frame.token.res.length).nonEmpty)
|
||||
else if (frame.token.res.length < values.length)
|
||||
Left(
|
||||
(
|
||||
values.toList.drop(frame.token.res.length).headOption.getOrElse(values.last)._1,
|
||||
@ -304,7 +339,6 @@ class TypesInterpreter[S[_], X](implicit lens: Lens[X, TypesState[S]], error: Re
|
||||
)
|
||||
)
|
||||
else {
|
||||
|
||||
frame.arrowType.codomain.toList
|
||||
.lazyZip(values.toList)
|
||||
.foldLeft[Either[(Token[S], String, Boolean), List[ValueRaw]]](Right(Nil)) {
|
||||
|
@ -1,8 +1,33 @@
|
||||
package aqua.semantics.rules.types
|
||||
|
||||
import aqua.raw.value.{FunctorRaw, IntoIndexRaw, PropertyRaw, LiteralRaw, ValueRaw}
|
||||
import aqua.parser.lexer.{ArrayTypeToken, ArrowTypeToken, BasicTypeToken, CustomTypeToken, IntoField, IntoIndex, PropertyOp, Name, OptionTypeToken, StreamTypeToken, Token, TopBottomToken, TypeToken}
|
||||
import aqua.types.{ArrayType, ArrowType, BottomType, DataType, OptionType, ProductType, StreamType, StructType, TopType, Type}
|
||||
import aqua.raw.value.{FunctorRaw, IntoIndexRaw, LiteralRaw, PropertyRaw, ValueRaw}
|
||||
import aqua.parser.lexer.{
|
||||
ArrayTypeToken,
|
||||
ArrowTypeToken,
|
||||
BasicTypeToken,
|
||||
CustomTypeToken,
|
||||
IntoField,
|
||||
IntoIndex,
|
||||
Name,
|
||||
OptionTypeToken,
|
||||
PropertyOp,
|
||||
StreamTypeToken,
|
||||
Token,
|
||||
TopBottomToken,
|
||||
TypeToken
|
||||
}
|
||||
import aqua.types.{
|
||||
ArrayType,
|
||||
ArrowType,
|
||||
BottomType,
|
||||
DataType,
|
||||
OptionType,
|
||||
ProductType,
|
||||
StreamType,
|
||||
StructType,
|
||||
TopType,
|
||||
Type
|
||||
}
|
||||
import cats.data.Validated.{Invalid, Valid}
|
||||
import cats.data.{Chain, NonEmptyChain, ValidatedNec}
|
||||
import cats.kernel.Monoid
|
||||
@ -37,23 +62,27 @@ case class TypesState[S[_]](
|
||||
resolveTypeToken(dtt).collect { case (it: DataType, t) =>
|
||||
(OptionType(it), t)
|
||||
}
|
||||
case ctt: CustomTypeToken[S] => strict.get(ctt.value).map(t => (t, definitions.get(ctt.value).toList.map(ctt -> _)))
|
||||
case ctt: CustomTypeToken[S] =>
|
||||
strict.get(ctt.value).map(t => (t, definitions.get(ctt.value).toList.map(ctt -> _)))
|
||||
case btt: BasicTypeToken[S] => Some((btt.value, Nil))
|
||||
case ArrowTypeToken(_, args, res) =>
|
||||
val strictArgs = args.map(_._2).map(resolveTypeToken).collect { case Some((dt: DataType, t)) =>
|
||||
(dt, t)
|
||||
}
|
||||
val strictArgs =
|
||||
args.map(_._2).map(resolveTypeToken).collect { case Some((dt: DataType, t)) =>
|
||||
(dt, t)
|
||||
}
|
||||
val strictRes = res.map(resolveTypeToken).collect { case Some((dt: DataType, t)) =>
|
||||
(dt, t)
|
||||
}
|
||||
Option.when(strictRes.length == res.length && strictArgs.length == args.length){
|
||||
Option.when(strictRes.length == res.length && strictArgs.length == args.length) {
|
||||
val (sArgs, argTokens) = strictArgs.unzip
|
||||
val (sRes, resTokens) = strictRes.unzip
|
||||
(ArrowType(ProductType(sArgs), ProductType(sRes)), argTokens.flatten ++ resTokens.flatten)
|
||||
}
|
||||
}
|
||||
|
||||
def resolveArrowDef(ad: ArrowTypeToken[S]): ValidatedNec[(Token[S], String), (ArrowType, List[(Token[S], CustomTypeToken[S])])] = {
|
||||
def resolveArrowDef(
|
||||
ad: ArrowTypeToken[S]
|
||||
): ValidatedNec[(Token[S], String), (ArrowType, List[(Token[S], CustomTypeToken[S])])] = {
|
||||
val resType = ad.res.map(resolveTypeToken)
|
||||
|
||||
NonEmptyChain
|
||||
@ -66,8 +95,16 @@ case class TypesState[S[_]](
|
||||
.toRight(tt -> s"Type unresolved")
|
||||
.map(argName.map(_.value) -> _)
|
||||
}
|
||||
.foldLeft[(Chain[(Token[S], String)], Chain[(Option[String], (Type, List[(Token[S], CustomTypeToken[S])]))])](
|
||||
(Chain.empty, Chain.empty[(Option[String], (Type, List[(Token[S], CustomTypeToken[S])]))])
|
||||
.foldLeft[
|
||||
(
|
||||
Chain[(Token[S], String)],
|
||||
Chain[(Option[String], (Type, List[(Token[S], CustomTypeToken[S])]))]
|
||||
)
|
||||
](
|
||||
(
|
||||
Chain.empty,
|
||||
Chain.empty[(Option[String], (Type, List[(Token[S], CustomTypeToken[S])]))]
|
||||
)
|
||||
) {
|
||||
case ((errs, argTypes), Right(at)) => (errs, argTypes.append(at))
|
||||
case ((errs, argTypes), Left(e)) => (errs.append(e), argTypes)
|
||||
@ -75,14 +112,20 @@ case class TypesState[S[_]](
|
||||
|
||||
NonEmptyChain
|
||||
.fromChain(errs)
|
||||
.fold[ValidatedNec[(Token[S], String), (ArrowType, List[(Token[S], CustomTypeToken[S])])]](
|
||||
Valid{
|
||||
.fold[ValidatedNec[
|
||||
(Token[S], String),
|
||||
(ArrowType, List[(Token[S], CustomTypeToken[S])])
|
||||
]](
|
||||
Valid {
|
||||
val (labels, types) = argTypes.toList.unzip
|
||||
val (resTypes, resTokens) = resType.flatten.unzip
|
||||
(ArrowType(
|
||||
ProductType.maybeLabelled(labels.zip(types.map(_._1))),
|
||||
ProductType(resTypes)
|
||||
), types.map(_._2).flatten ++ resTokens.flatten)
|
||||
(
|
||||
ArrowType(
|
||||
ProductType.maybeLabelled(labels.zip(types.map(_._1))),
|
||||
ProductType(resTypes)
|
||||
),
|
||||
types.map(_._2).flatten ++ resTokens.flatten
|
||||
)
|
||||
}
|
||||
)(Invalid(_))
|
||||
}(Invalid(_))
|
||||
|
@ -67,13 +67,13 @@ object CompareTypes {
|
||||
lf.keys.forall(rf.contains) && compareTypesList(
|
||||
lfView.values.toList,
|
||||
rfView.filterKeys(lfNEM.keys.contains).values.toList
|
||||
) == -1.0
|
||||
) == 1.0
|
||||
) 1.0
|
||||
else if (
|
||||
rf.keys.forall(lf.contains) && compareTypesList(
|
||||
lfView.filterKeys(rfNEM.keys.contains).values.toList,
|
||||
rfView.values.toList
|
||||
) == 1.0
|
||||
) == -1.0
|
||||
) -1.0
|
||||
else NaN
|
||||
}
|
||||
@ -127,8 +127,8 @@ object CompareTypes {
|
||||
case (x: OptionType, y: StreamType) => apply(x.element, y.element)
|
||||
case (x: OptionType, y: ArrayType) => apply(x.element, y.element)
|
||||
case (x: StreamType, y: StreamType) => apply(x.element, y.element)
|
||||
case (StructType(_, xFields), StructType(_, yFields)) =>
|
||||
compareStructs(xFields, yFields)
|
||||
case (StructType(_, lFields), StructType(_, rFields)) =>
|
||||
compareStructs(lFields, rFields)
|
||||
|
||||
// Products
|
||||
case (l: ProductType, r: ProductType) => compareProducts(l, r)
|
||||
@ -139,8 +139,8 @@ object CompareTypes {
|
||||
val cmpCodom = apply(lcodom, rcodom)
|
||||
|
||||
if (cmpDom == 0 && cmpCodom == 0) 0
|
||||
else if (cmpDom >= 0 && cmpCodom <= 0) -1.0
|
||||
else if (cmpDom <= 0 && cmpCodom >= 0) 1.0
|
||||
else if (cmpDom >= 0 && cmpCodom <= 0) -1.0
|
||||
else NaN
|
||||
|
||||
case _ =>
|
||||
|
@ -65,15 +65,23 @@ class TypeSpec extends AnyFlatSpec with Matchers {
|
||||
}
|
||||
|
||||
"structs of scalars" should "be variant" in {
|
||||
val one: Type = StructType("one", NonEmptyMap.of("field" -> u32))
|
||||
val two: Type = StructType("two", NonEmptyMap.of("field" -> u64, "other" -> string))
|
||||
val three: Type = StructType("three", NonEmptyMap.of("field" -> u32))
|
||||
val one: Type = StructType("one", NonEmptyMap.of("field" -> u64))
|
||||
val two: Type = StructType("two", NonEmptyMap.of("field" -> u32, "other" -> string))
|
||||
val three: Type = StructType("three", NonEmptyMap.of("field" -> u64))
|
||||
|
||||
accepts(one, two) should be(true)
|
||||
accepts(two, one) should be(false)
|
||||
PartialOrder[Type].eqv(one, three) should be(true)
|
||||
}
|
||||
|
||||
"structs of scalars with literals" should "be variant" in {
|
||||
val one: Type = StructType("one", NonEmptyMap.of("field" -> u64))
|
||||
val two: Type = StructType("two", NonEmptyMap.of("field" -> LiteralType.number, "other" -> string))
|
||||
|
||||
accepts(one, two) should be(true)
|
||||
accepts(two, one) should be(false)
|
||||
}
|
||||
|
||||
"streams" should "be accepted as an array, but not vice versa" in {
|
||||
val stream: Type = StreamType(bool)
|
||||
val array: Type = ArrayType(bool)
|
||||
|
Loading…
Reference in New Issue
Block a user