feat(compiler): Restrict abilities usage [fixes LNG-208] (#854)

* Refactor

* Refactor

* Refactor

* Remove ScopeRaw

* Refactor, forbid exporting

* Add export checks

* Refactor

* Forbid exporting abilities

* Fix integration tests

* Forbid implicit ability export

* Simplify exports

* Fordbid using non data types in collections

* Forbid unappropriate struct field types

* Refactor

* Add export tests

* Add collection tests

* Add struct fields test

* Fixes

* Fix
This commit is contained in:
InversionSpaces 2023-08-24 15:09:39 +02:00 committed by GitHub
parent 3b033852f9
commit 2a0b207633
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
31 changed files with 555 additions and 321 deletions

View File

@ -1,3 +1,5 @@
aqua ClosureReturnRename
export lng193Bug export lng193Bug
func getClosure(arg: u16, peer: string) -> u16 -> u16: func getClosure(arg: u16, peer: string) -> u16 -> u16:

View File

@ -1,3 +1,5 @@
aqua ReturnArrow
import "@fluencelabs/aqua-lib/builtin.aqua" import "@fluencelabs/aqua-lib/builtin.aqua"
export callReturnedArrow, callReturnedChainArrow export callReturnedArrow, callReturnedChainArrow

View File

@ -3,10 +3,11 @@ package aqua.lsp
import aqua.parser.lexer.{Ability, LiteralToken, Name, NamedTypeToken, Token} import aqua.parser.lexer.{Ability, LiteralToken, Name, NamedTypeToken, Token}
import aqua.raw.{RawContext, RawPart} import aqua.raw.{RawContext, RawPart}
import aqua.types.{ArrowType, Type} import aqua.types.{ArrowType, Type}
import RawContext.semiRC
import aqua.semantics.header.Picker
import cats.{Monoid, Semigroup} import cats.{Monoid, Semigroup}
import cats.syntax.monoid.* import cats.syntax.monoid.*
import RawContext.semiRC
import aqua.semantics.header.{Picker, PickerOps}
// Context with info that necessary for language server // Context with info that necessary for language server
case class LspContext[S[_]]( case class LspContext[S[_]](
@ -51,31 +52,40 @@ object LspContext {
} }
given [S[_]]: Picker[LspContext[S]] with { given [S[_]]: Picker[LspContext[S]] with {
import aqua.semantics.header.Picker.*
private def ops[S[_]](ctx: LspContext[S]) = PickerOps[RawContext](ctx.raw)
override def blank: LspContext[S] = LspContext[S](Picker[RawContext].blank, Map.empty) override def blank: LspContext[S] = LspContext[S](Picker[RawContext].blank, Map.empty)
override def exports(ctx: LspContext[S]): Option[Map[String, Option[String]]] = ops(ctx).exports override def exports(ctx: LspContext[S]): Map[String, Option[String]] = ctx.raw.exports
override def isAbility(ctx: LspContext[S], name: String): Boolean =
ctx.raw.isAbility(name)
override def funcReturnAbilityOrArrow(ctx: LspContext[S], name: String): Boolean = override def funcReturnAbilityOrArrow(ctx: LspContext[S], name: String): Boolean =
ops(ctx).funcReturnAbilityOrArrow(name) ctx.raw.funcReturnAbilityOrArrow(name)
override def funcNames(ctx: LspContext[S]): List[String] = ops(ctx).funcNames
override def funcAcceptAbility(ctx: LspContext[S], name: String): Boolean =
ctx.raw.funcAcceptAbility(name)
override def funcNames(ctx: LspContext[S]): Set[String] = ctx.raw.funcNames
override def definedAbilityNames(ctx: LspContext[S]): Set[String] =
ctx.raw.definedAbilityNames
override def addPart(ctx: LspContext[S], part: (LspContext[S], RawPart)): LspContext[S] = override def addPart(ctx: LspContext[S], part: (LspContext[S], RawPart)): LspContext[S] =
ctx.copy(raw = ops(ctx).addPart(part._1.raw -> part._2)) ctx.copy(raw = ctx.raw.addPart(part._1.raw -> part._2))
override def setInit(ctx: LspContext[S], ctxInit: Option[LspContext[S]]): LspContext[S] = override def setInit(ctx: LspContext[S], ctxInit: Option[LspContext[S]]): LspContext[S] =
ctx.copy(raw = ops(ctx).setInit(ctxInit.map(_.raw))) ctx.copy(raw = ctx.raw.setInit(ctxInit.map(_.raw)))
override def all(ctx: LspContext[S]): Set[String] = override def all(ctx: LspContext[S]): Set[String] =
ops(ctx).all ctx.raw.all
override def module(ctx: LspContext[S]): Option[String] = ops(ctx).module override def module(ctx: LspContext[S]): Option[String] = ctx.raw.module
override def declares(ctx: LspContext[S]): Set[String] = ops(ctx).declares override def declares(ctx: LspContext[S]): Set[String] = ctx.raw.declares
override def setAbility(ctx: LspContext[S], name: String, ctxAb: LspContext[S]): LspContext[S] = override def setAbility(ctx: LspContext[S], name: String, ctxAb: LspContext[S]): LspContext[S] =
val prefix = name + "." val prefix = name + "."
ctx.copy( ctx.copy(
raw = ops(ctx).setAbility(name, ctxAb.raw), raw = ctx.raw.setAbility(name, ctxAb.raw),
tokens = ctx.tokens ++ ctxAb.tokens.map(kv => (prefix + kv._1) -> kv._2) tokens = ctx.tokens ++ ctxAb.tokens.map(kv => (prefix + kv._1) -> kv._2)
) )
@ -84,13 +94,13 @@ object LspContext {
name: Option[String], name: Option[String],
declares: Set[String] declares: Set[String]
): LspContext[S] = ): LspContext[S] =
ctx.copy(raw = ops(ctx).setOptModule(name, declares)) ctx.copy(raw = ctx.raw.setOptModule(name, declares))
override def setExports( override def setExports(
ctx: LspContext[S], ctx: LspContext[S],
exports: Map[String, Option[String]] exports: Map[String, Option[String]]
): LspContext[S] = ): LspContext[S] =
ctx.copy(raw = ops(ctx).setExports(exports)) ctx.copy(raw = ctx.raw.setExports(exports))
override def pick( override def pick(
ctx: LspContext[S], ctx: LspContext[S],
@ -107,7 +117,7 @@ object LspContext {
} }
}.getOrElse(ctx.tokens) }.getOrElse(ctx.tokens)
ops(ctx) ctx.raw
.pick(name, rename, declared) .pick(name, rename, declared)
.map(rc => .map(rc =>
ctx.copy( ctx.copy(
@ -122,11 +132,11 @@ object LspContext {
) )
) )
override def pickHeader(ctx: LspContext[S]): LspContext[S] = ctx.copy(raw = ops(ctx).pickHeader) override def pickHeader(ctx: LspContext[S]): LspContext[S] = ctx.copy(raw = ctx.raw.pickHeader)
override def pickDeclared( override def pickDeclared(
ctx: LspContext[S] ctx: LspContext[S]
)(using Semigroup[LspContext[S]]): LspContext[S] = )(using Semigroup[LspContext[S]]): LspContext[S] =
ctx.copy(raw = ops(ctx).pickDeclared) ctx.copy(raw = ctx.raw.pickDeclared)
} }
} }

View File

@ -1,13 +1,15 @@
package aqua.raw package aqua.raw
import aqua.raw.arrow.FuncRaw
import aqua.raw.value.ValueRaw
import aqua.types.{StructType, Type, AbilityType}
import cats.Monoid import cats.Monoid
import cats.Semigroup import cats.Semigroup
import cats.data.Chain import cats.data.Chain
import cats.data.NonEmptyMap import cats.data.NonEmptyMap
import aqua.raw.arrow.FuncRaw
import aqua.raw.value.ValueRaw
import aqua.types.{StructType, Type}
import cats.syntax.monoid.* import cats.syntax.monoid.*
import cats.syntax.option.*
import scala.collection.immutable.SortedMap import scala.collection.immutable.SortedMap
@ -31,7 +33,7 @@ case class RawContext(
init: Option[RawContext] = None, init: Option[RawContext] = None,
module: Option[String] = None, module: Option[String] = None,
declares: Set[String] = Set.empty, declares: Set[String] = Set.empty,
exports: Option[Map[String, Option[String]]] = None, exports: Map[String, Option[String]] = Map.empty,
parts: Chain[(RawContext, RawPart)] = Chain.empty, parts: Chain[(RawContext, RawPart)] = Chain.empty,
abilities: Map[String, RawContext] = Map.empty abilities: Map[String, RawContext] = Map.empty
) { ) {
@ -46,6 +48,13 @@ case class RawContext(
private def collectPartsMap[T](f: PartialFunction[RawPart, T]): Map[String, T] = private def collectPartsMap[T](f: PartialFunction[RawPart, T]): Map[String, T] =
parts.collect { case (_, p) if f.isDefinedAt(p) => p.name -> f(p) }.toList.toMap parts.collect { case (_, p) if f.isDefinedAt(p) => p.name -> f(p) }.toList.toMap
private def all[T](what: RawContext => Map[String, T], prefix: String = ""): Map[String, T] =
abilities
.foldLeft(what(this)) { case (ts, (k, v)) =>
ts ++ v.all(what, k + ".")
}
.map(prefixFirst(prefix, _))
lazy val services: Map[String, ServiceRaw] = collectPartsMap { case srv: ServiceRaw => srv } lazy val services: Map[String, ServiceRaw] = collectPartsMap { case srv: ServiceRaw => srv }
lazy val allServices: Map[String, ServiceRaw] = lazy val allServices: Map[String, ServiceRaw] =
@ -72,15 +81,14 @@ case class RawContext(
c.value c.value
} }
private def all[T](what: RawContext => Map[String, T], prefix: String = ""): Map[String, T] =
abilities
.foldLeft(what(this)) { case (ts, (k, v)) =>
ts ++ v.all(what, k + ".")
}
.map(prefixFirst(prefix, _))
lazy val allValues: Map[String, ValueRaw] = all(_.values) lazy val allValues: Map[String, ValueRaw] = all(_.values)
lazy val definedAbilities: Map[String, AbilityType] =
collectPartsMap { case TypeRaw(_, at: AbilityType) => at }
lazy val allDefinedAbilities: Map[String, AbilityType] =
all(_.definedAbilities)
def `type`(name: String): Option[StructType] = def `type`(name: String): Option[StructType] =
NonEmptyMap NonEmptyMap
.fromMap( .fromMap(
@ -92,7 +100,14 @@ case class RawContext(
) )
.map(StructType(name, _)) .map(StructType(name, _))
override def toString: String = s"module: $module\ndeclares: $declares\nexports: $exports" override def toString: String =
s"""|module: ${module.getOrElse("unnamed")}
|declares: ${declares.mkString(", ")}
|exports: ${exports.map { case (name, rename) =>
rename.fold(name)(name + " as " + _)
}.mkString(", ")}
|parts: ${parts.map { case (_, part) => part.name }.toList.mkString(", ")}
|abilities: ${abilities.keys.mkString(", ")}""".stripMargin
} }
object RawContext { object RawContext {
@ -104,7 +119,7 @@ object RawContext {
x.init.flatMap(xi => y.init.map(xi |+| _)) orElse x.init orElse y.init, x.init.flatMap(xi => y.init.map(xi |+| _)) orElse x.init orElse y.init,
x.module orElse y.module, x.module orElse y.module,
x.declares ++ y.declares, x.declares ++ y.declares,
x.exports.flatMap(xe => y.exports.map(xe ++ _)) orElse x.exports orElse y.exports, x.exports ++ y.exports,
x.parts ++ y.parts, x.parts ++ y.parts,
x.abilities ++ y.abilities x.abilities ++ y.abilities
) )

View File

@ -1,19 +0,0 @@
package aqua.raw
import aqua.raw.arrow.FuncRaw
import aqua.types.{ArrowType, StructType, Type}
import cats.data.NonEmptyMap
import aqua.raw.value.ValueRaw
import scala.collection.immutable.SortedMap
case class ScopeRaw(
name: String,
fieldsAndArrows: NonEmptyMap[String, Type]
) extends RawPart {
lazy val rawPartType: StructType = StructType(name, fieldsAndArrows)
override def rename(s: String): RawPart = copy(name = s)
}

View File

@ -118,11 +118,7 @@ object AquaContext extends Logging {
logger.trace("raw: " + rawContext) logger.trace("raw: " + rawContext)
logger.trace("ctx: " + ctx) logger.trace("ctx: " + ctx)
rawContext.module rawContext.exports
.fold(
// if `module` header is not defined, then export everything defined in rawContext
rawContext.parts.map(_._2).map(_.name).map(_ -> Option.empty[String]).toList.toMap
)(_ => rawContext.exports.getOrElse(Map.empty))
.foldLeft( .foldLeft(
// Module name is what persists // Module name is what persists
blank.copy( blank.copy(

View File

@ -20,11 +20,7 @@ case class FieldTypeExpr[F[_]](name: Name[F], `type`: DataTypeToken[F])
object FieldTypeExpr extends Expr.Leaf { object FieldTypeExpr extends Expr.Leaf {
override val p: Parser[FieldTypeExpr[Span.S]] = override val p: Parser[FieldTypeExpr[Span.S]] =
((Name.p <* ` : `) ~ (Parser ((Name.p <* ` : `) ~ DataTypeToken.`datatypedef`).map { case (name, t) =>
.not(StreamTypeToken.`streamtypedef`)
.withContext(
"Data fields cannot be of stream type (stream is designated by '*')."
) *> DataTypeToken.`datatypedef`)).map { case (name, t) =>
FieldTypeExpr(name, t) FieldTypeExpr(name, t)
} }
} }

View File

@ -2,7 +2,7 @@ package aqua.parser.expr
import aqua.parser.Expr import aqua.parser.Expr
import aqua.parser.lexer.Token.* import aqua.parser.lexer.Token.*
import aqua.parser.lexer.{Ability, NamedTypeToken, ValueToken} import aqua.parser.lexer.{NamedTypeToken, ValueToken}
import aqua.parser.lift.LiftParser import aqua.parser.lift.LiftParser
import cats.Comonad import cats.Comonad
import cats.parse.Parser import cats.parse.Parser

View File

@ -3,10 +3,10 @@ package aqua.parser.expr.func
import aqua.parser.Expr import aqua.parser.Expr
import aqua.parser.expr.func.AbilityIdExpr import aqua.parser.expr.func.AbilityIdExpr
import aqua.parser.lexer.Token.* import aqua.parser.lexer.Token.*
import aqua.parser.lexer.{Ability, NamedTypeToken, ValueToken} import aqua.parser.lexer.{NamedTypeToken, ValueToken}
import aqua.parser.lift.LiftParser import aqua.parser.lift.LiftParser
import cats.parse.Parser as P import cats.parse.Parser as P
import cats.{Comonad, ~>} import cats.{~>, Comonad}
import aqua.parser.lift.Span import aqua.parser.lift.Span
import aqua.parser.lift.Span.{P0ToSpan, PToSpan} import aqua.parser.lift.Span.{P0ToSpan, PToSpan}

View File

@ -3,7 +3,7 @@ package aqua.parser.expr.func
import aqua.parser.Expr import aqua.parser.Expr
import aqua.parser.expr.func.CallArrowExpr import aqua.parser.expr.func.CallArrowExpr
import aqua.parser.lexer.Token.* import aqua.parser.lexer.Token.*
import aqua.parser.lexer.{Ability, CallArrowToken, Name, ValueToken, VarToken} import aqua.parser.lexer.{CallArrowToken, Name, ValueToken, VarToken}
import aqua.parser.lift.{LiftParser, Span} import aqua.parser.lift.{LiftParser, Span}
import aqua.parser.lift.Span.{P0ToSpan, PToSpan} import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
import cats.data.NonEmptyList import cats.data.NonEmptyList

View File

@ -1,12 +1,14 @@
package aqua.parser.head package aqua.parser.head
import aqua.parser.lexer.Token.*
import aqua.parser.lexer.{Ability, Name}
import aqua.parser.lift.LiftParser
import cats.Comonad import cats.Comonad
import cats.data.NonEmptyList import cats.data.NonEmptyList
import cats.parse.Parser as P import cats.parse.Parser as P
import cats.~> import cats.~>
import cats.syntax.bifunctor.*
import aqua.parser.lexer.Token.*
import aqua.parser.lexer.{Ability, Name}
import aqua.parser.lift.LiftParser
import aqua.parser.lift.Span import aqua.parser.lift.Span
import aqua.parser.lift.Span.{P0ToSpan, PToSpan} import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
@ -16,7 +18,9 @@ trait FromExpr[F[_]] {
object FromExpr { object FromExpr {
def mapK[F[_], K[_]: Comonad](imports: NonEmptyList[FromExpr.NameOrAbAs[F]])(fk: F ~> K): NonEmptyList[FromExpr.NameOrAbAs[K]] = def mapK[F[_], K[_]: Comonad](
imports: NonEmptyList[FromExpr.NameOrAbAs[F]]
)(fk: F ~> K): NonEmptyList[FromExpr.NameOrAbAs[K]] =
imports.map { imports.map {
case Left((n, nOp)) => Left((n.mapK(fk), nOp.map(_.mapK(fk)))) case Left((n, nOp)) => Left((n.mapK(fk), nOp.map(_.mapK(fk))))
case Right(a, aOp) => Right((a.mapK(fk), aOp.map(_.mapK(fk)))) case Right(a, aOp) => Right((a.mapK(fk), aOp.map(_.mapK(fk))))
@ -28,11 +32,17 @@ object FromExpr {
Name.nameAs.map(Left(_)) | Ability.abAs.map(Right(_)) Name.nameAs.map(Left(_)) | Ability.abAs.map(Right(_))
val importFrom: P[NonEmptyList[NameOrAbAs[Span.S]]] = val importFrom: P[NonEmptyList[NameOrAbAs[Span.S]]] =
comma[NameOrAbAs[Span.S]](nameOrAbAs) <* ` ` <* `from` comma(nameOrAbAs) <* ` ` <* `from`
def show[F[_]](ne: NonEmptyList[NameOrAbAs[F]]): String = def show[F[_]](ne: NonEmptyList[NameOrAbAs[F]]): String =
ne.toList.map(_.fold( ne.toList
non => non._1.value + non._2.map(_.value).fold("")(" as "+_), .map(
non => non._1.value + non._2.map(_.value).fold("")(" as "+_) _.bimap(
)).mkString(", ") _.bimap(_.value, _.map(_.value)),
_.bimap(_.value, _.map(_.value))
).map { case (name, rename) =>
s"$name${rename.fold("")(" as " + _)}"
}
)
.mkString(", ")
} }

View File

@ -43,7 +43,7 @@ case class StreamTypeToken[S[_]: Comonad](override val unit: S[Unit], data: Data
object StreamTypeToken { object StreamTypeToken {
val `streamtypedef`: P[StreamTypeToken[Span.S]] = val `streamtypedef`: P[StreamTypeToken[Span.S]] =
((`*`.lift <* P.not(`*`).withContext("Nested streams '**type' is prohibited")) ((`*`.lift <* P.not(`*`).withContext("Nested streams '**type' are prohibited"))
~ DataTypeToken.`withoutstreamdatatypedef`) ~ DataTypeToken.`withoutstreamdatatypedef`)
.map(ud => StreamTypeToken(ud._1, ud._2)) .map(ud => StreamTypeToken(ud._1, ud._2))

View File

@ -3,15 +3,7 @@ package aqua.parser
import aqua.AquaSpec import aqua.AquaSpec
import aqua.parser.expr.{FuncExpr, RootExpr} import aqua.parser.expr.{FuncExpr, RootExpr}
import aqua.parser.expr.func.{ArrowExpr, AssignmentExpr, CallArrowExpr, ClosureExpr, ReturnExpr} import aqua.parser.expr.func.{ArrowExpr, AssignmentExpr, CallArrowExpr, ClosureExpr, ReturnExpr}
import aqua.parser.lexer.{ import aqua.parser.lexer.{CallArrowToken, IntoArrow, NamedTypeToken, PropertyToken, Token, VarToken}
Ability,
CallArrowToken,
IntoArrow,
NamedTypeToken,
PropertyToken,
Token,
VarToken
}
import aqua.types.ScalarType.string import aqua.types.ScalarType.string
import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers import org.scalatest.matchers.should.Matchers

View File

@ -5,7 +5,6 @@ import aqua.AquaSpec.{toNumber, toStr, toVar}
import aqua.parser.expr.ConstantExpr import aqua.parser.expr.ConstantExpr
import aqua.parser.expr.func.AssignmentExpr import aqua.parser.expr.func.AssignmentExpr
import aqua.parser.lexer.{ import aqua.parser.lexer.{
Ability,
CallArrowToken, CallArrowToken,
CollectionToken, CollectionToken,
IntoArrow, IntoArrow,

View File

@ -1,7 +1,7 @@
package aqua.semantics.expr package aqua.semantics.expr
import aqua.parser.expr.AbilityExpr import aqua.parser.expr.AbilityExpr
import aqua.raw.{Raw, ScopeRaw, ServiceRaw, TypeRaw} import aqua.raw.{Raw, ServiceRaw, TypeRaw}
import aqua.parser.lexer.{Name, NamedTypeToken} import aqua.parser.lexer.{Name, NamedTypeToken}
import aqua.raw.{Raw, ServiceRaw} import aqua.raw.{Raw, ServiceRaw}
import aqua.semantics.Prog import aqua.semantics.Prog
@ -29,11 +29,9 @@ class AbilitySem[S[_]](val expr: AbilityExpr[S]) extends AnyVal {
Prog.after_( Prog.after_(
for { for {
defs <- D.purgeDefs(expr.name) defs <- D.purgeDefs(expr.name)
abType = defs.map(fields => AbilityType(expr.name.value, fields)) fields = defs.view.mapValues(d => d.name -> d.`type`).toMap
result <- abType.flatTraverse(t => abilityType <- T.defineAbilityType(expr.name, fields)
T.defineNamedType(expr.name, t) result = abilityType.map(st => TypeRaw(expr.name.value, st))
.map(Option.when(_)(TypeRaw(expr.name.value, t)))
)
} yield result.getOrElse(Raw.error("Ability types unresolved")) } yield result.getOrElse(Raw.error("Ability types unresolved"))
) )
} }

View File

@ -7,8 +7,10 @@ import aqua.semantics.rules.definitions.DefinitionsAlgebra
import aqua.semantics.rules.names.NamesAlgebra import aqua.semantics.rules.names.NamesAlgebra
import aqua.semantics.rules.types.TypesAlgebra import aqua.semantics.rules.types.TypesAlgebra
import aqua.types.StructType import aqua.types.StructType
import cats.syntax.functor.* import cats.syntax.functor.*
import cats.syntax.applicative.* import cats.syntax.applicative.*
import cats.syntax.traverse.*
import cats.syntax.flatMap.* import cats.syntax.flatMap.*
import cats.Monad import cats.Monad
@ -19,20 +21,12 @@ class DataStructSem[S[_]](val expr: DataStructExpr[S]) extends AnyVal {
T: TypesAlgebra[S, Alg] T: TypesAlgebra[S, Alg]
): Prog[Alg, Raw] = ): Prog[Alg, Raw] =
Prog.after((_: Raw) => Prog.after((_: Raw) =>
D.purgeDefs(expr.name).flatMap { for {
case Some(fields) => defs <- D.purgeDefs(expr.name)
val t = StructType(expr.name.value, fields) fields = defs.view.mapValues(d => d.name -> d.`type`).toMap
T.defineNamedType(expr.name, t).map { structType <- T.defineStructType(expr.name, fields)
case true => result = structType.map(st => TypeRaw(expr.name.value, st))
TypeRaw( } yield result.getOrElse(Raw.error("Data struct types unresolved"))
expr.name.value,
t
): Raw
case false =>
Raw.error("Data struct types unresolved")
}
case None => Raw.error("Data struct types unresolved").pure[Alg]
}
) )
} }

View File

@ -2,22 +2,26 @@ package aqua.semantics.header
import aqua.parser.Ast import aqua.parser.Ast
import aqua.parser.head.* import aqua.parser.head.*
import aqua.parser.lexer.{Ability, Token} import aqua.parser.lexer.{Ability, Name, Token}
import aqua.semantics.header.Picker.* import aqua.semantics.header.Picker.*
import aqua.semantics.{HeaderError, SemanticError} import aqua.semantics.{HeaderError, SemanticError}
import cats.data.* import cats.data.*
import cats.data.Validated.{invalidNec, validNec} import cats.data.Validated.*
import cats.free.Cofree import cats.free.Cofree
import cats.instances.list.* import cats.instances.list.*
import cats.instances.option.* import cats.instances.option.*
import cats.kernel.Semigroup import cats.kernel.Semigroup
import cats.syntax.option.*
import cats.syntax.foldable.* import cats.syntax.foldable.*
import cats.syntax.functor.* import cats.syntax.functor.*
import cats.syntax.semigroup.* import cats.syntax.semigroup.*
import cats.syntax.validated.* import cats.syntax.validated.*
import cats.syntax.bifunctor.*
import cats.syntax.apply.*
import cats.{Comonad, Eval, Monoid} import cats.{Comonad, Eval, Monoid}
class HeaderHandler[S[_]: Comonad, C](implicit class HeaderHandler[S[_]: Comonad, C](using
acm: Monoid[C], acm: Monoid[C],
headMonoid: Monoid[HeaderSem[S, C]], headMonoid: Monoid[HeaderSem[S, C]],
picker: Picker[C] picker: Picker[C]
@ -34,8 +38,27 @@ class HeaderHandler[S[_]: Comonad, C](implicit
Eval.later(parent |+| children.combineAll) Eval.later(parent |+| children.combineAll)
// Error generator with token pointer // Error generator with token pointer
private def error[T](token: Token[S], msg: String): ValidatedNec[SemanticError[S], T] = private def error[T](
invalidNec(HeaderError(token, msg)) token: Token[S],
msg: String
): SemanticError[S] = HeaderError(token, msg)
private def exportFuncChecks(ctx: C, token: Token[S], name: String): ResT[S, Unit] =
Validated.condNec(
!ctx.funcReturnAbilityOrArrow(name),
(),
error(
token,
s"The function '$name' cannot be exported, because it returns an arrow or an ability"
)
) combine Validated.condNec(
!ctx.funcAcceptAbility(name),
(),
error(
token,
s"The function '$name' cannot be exported, because it accepts an ability"
)
)
def sem(imports: Map[String, C], header: Ast.Head[S]): Res[S, C] = { def sem(imports: Map[String, C], header: Ast.Head[S]): Res[S, C] = {
// Resolve a filename from given imports or fail // Resolve a filename from given imports or fail
@ -43,52 +66,43 @@ class HeaderHandler[S[_]: Comonad, C](implicit
imports imports
.get(f.fileValue) .get(f.fileValue)
.map(_.pickDeclared) .map(_.pickDeclared)
.fold[ResAC[S]]( .toValidNec(
error(f.token, "Cannot resolve the import") error(f.token, "Cannot resolve the import")
)(validNec) )
// Get part of the declared context (for import/use ... from ... expressions) // Get part of the declared context (for import/use ... from ... expressions)
def getFrom(f: FromExpr[S], ctx: C): ResAC[S] = def getFrom(f: FromExpr[S], ctx: C): ResAC[S] =
f.imports ctx.pickHeader.validNec |+| f.imports
.map[ResAC[S]]( .map(
_.fold[ResAC[S]]( _.bimap(
{ case (n, rn) => _.bimap(n => (n, n.value), _.map(_.value)),
_.bimap(n => (n, n.value), _.map(_.value))
).merge match {
case ((token, name), rename) =>
ctx ctx
.pick(n.value, rn.map(_.value), ctx.module.nonEmpty) .pick(name, rename, ctx.module.nonEmpty)
.map(validNec) .toValidNec(
.getOrElse(
error( error(
n, token,
s"Imported file `declares ${ctx.declares.mkString(", ")}`, no ${n.value} declared. Try adding `declares ${n.value}` to that file." s"Imported file `declares ${ctx.declares.mkString(", ")}`, no $name declared. Try adding `declares $name` to that file."
)
)
},
{ case (n, rn) =>
ctx
.pick(n.value, rn.map(_.value), ctx.module.nonEmpty)
.map(validNec)
.getOrElse(
error(
n,
s"Imported file `declares ${ctx.declares.mkString(", ")}`, no ${n.value} declared. Try adding `declares ${n.value}` to that file."
) )
) )
} }
) )
) .combineAll
.foldLeft[ResAC[S]](validNec(ctx.pickHeader))(_ |+| _)
// Convert an imported context into a module (ability) // Convert an imported context into a module (ability)
def toModule(ctx: C, tkn: Token[S], rename: Option[Ability[S]]): ResAC[S] = def toModule(ctx: C, tkn: Token[S], rename: Option[Ability[S]]): ResAC[S] =
rename rename
.map(_.value) .map(_.value)
.orElse(ctx.module) .orElse(ctx.module)
.fold[ResAC[S]]( .map(modName => picker.blank.setAbility(modName, ctx))
.toValidNec(
error( error(
tkn, tkn,
s"Used module has no `module` header. Please add `module` header or use ... as ModuleName, or switch to import" s"Used module has no `module` header. Please add `module` header or use ... as ModuleName, or switch to import"
) )
)(modName => validNec(picker.blank.setAbility(modName, ctx))) )
// Handler for every header expression, will be combined later // Handler for every header expression, will be combined later
val onExpr: PartialFunction[HeaderExpr[S], Res[S, C]] = { val onExpr: PartialFunction[HeaderExpr[S], Res[S, C]] = {
@ -110,18 +124,17 @@ class HeaderHandler[S[_]: Comonad, C](implicit
) )
} else } else
( (
declareNames.map(n => n.value -> n) ::: declareCustom.map(a => a.value -> a) declareNames.fproductLeft(_.value) ::: declareCustom.fproductLeft(_.value)
).map[ValidatedNec[SemanticError[S], Int]] { case (n, t) => ).map { case (n, t) =>
ctx ctx
.pick(n, None, ctx.module.nonEmpty) .pick(n, None, ctx.module.nonEmpty)
// We just validate, nothing more .toValidNec(
.as(validNec(1))
.getOrElse(
error( error(
t, t,
s"`$n` is expected to be declared, but declaration is not found in the file" s"`$n` is expected to be declared, but declaration is not found in the file"
) )
) )
.void
}.combineAll }.combineAll
.as( .as(
// TODO: why module name and declares is lost? where is it lost? // TODO: why module name and declares is lost? where is it lost?
@ -133,6 +146,7 @@ class HeaderHandler[S[_]: Comonad, C](implicit
case f @ ImportExpr(_) => case f @ ImportExpr(_) =>
// Import everything from a file // Import everything from a file
resolve(f).map(fc => HeaderSem[S, C](fc, (c, _) => validNec(c))) resolve(f).map(fc => HeaderSem[S, C](fc, (c, _) => validNec(c)))
case f @ ImportFromExpr(_, _) => case f @ ImportFromExpr(_, _) =>
// Import, map declarations // Import, map declarations
resolve(f) resolve(f)
@ -165,37 +179,37 @@ class HeaderHandler[S[_]: Comonad, C](implicit
// Nothing there // Nothing there
picker.blank, picker.blank,
(ctx, initCtx) => (ctx, initCtx) =>
pubs
.map(
_.fold[(Token[S], String, Option[String])](
nrn => (nrn._1, nrn._1.value, nrn._2.map(_.value)),
nrn => (nrn._1, nrn._1.value, nrn._2.map(_.value))
)
)
.map { case (token, name, rename) =>
val sumCtx = initCtx |+| ctx val sumCtx = initCtx |+| ctx
if (sumCtx.funcReturnAbilityOrArrow(name)) pubs
error( .map(
token, _.bimap(
s"The function '$name' cannot be exported, because it returns arrow type or ability type" _.bimap(n => (n, n.value), _.map(_.value)),
_.bimap(n => (n, n.value), _.map(_.value))
).merge
) )
else .map { case ((token, name), rename) =>
sumCtx sumCtx
.pick(name, rename, declared = false) .pick(name, rename, declared = false)
.as(Map(name -> rename).validNec) .as(Map(name -> rename))
.getOrElse( .toValid(
error( error(
token, token,
s"File has no $name declaration or import, cannot export, available funcs: ${sumCtx.funcNames s"File has no $name declaration or import, " +
.mkString(", ")}" s"cannot export, available functions: ${sumCtx.funcNames.mkString(", ")}"
) )
) )
.ensure(
error(
token,
s"Can not export '$name' as it is an ability"
)
)(_ => !sumCtx.isAbility(name))
.toValidatedNec <* exportFuncChecks(sumCtx, token, name)
} }
.foldLeft[ResT[S, Map[String, Option[String]]]]( .prepend(validNec(ctx.exports))
validNec(ctx.exports.getOrElse(Map.empty)) .combineAll
)(_ |+| _) .map(ctx.setExports)
.map(expCtx => ctx.setExports(expCtx))
) )
) )
@ -204,7 +218,26 @@ class HeaderHandler[S[_]: Comonad, C](implicit
validNec( validNec(
HeaderSem[S, C]( HeaderSem[S, C](
acm.empty, acm.empty,
(ctx, _) => validNec(ctx.setExports(Map.empty)) (ctx, initCtx) => {
val sumCtx = initCtx |+| ctx
ctx.funcNames.toList
.traverse_(name =>
// TODO: Provide better token for this error
exportFuncChecks(sumCtx, token, name)
)
.combine(
ctx.definedAbilityNames.toList.traverse_(name =>
// TODO: Provide better token for this error
error(token, s"Can not export '$name' as it is an ability ").invalidNec
)
)
.as(
// Export everything
ctx.setExports(
ctx.all.map(_ -> None).toMap
)
)
}
) )
) )
@ -214,7 +247,7 @@ class HeaderHandler[S[_]: Comonad, C](implicit
Cofree Cofree
.cata[Chain, HeaderExpr[S], Res[S, C]](header) { case (expr, children) => .cata[Chain, HeaderExpr[S], Res[S, C]](header) { case (expr, children) =>
onExpr.lift.apply(expr).fold(Eval.later(children.combineAll))(combineAnd(children)(_)) onExpr.lift.apply(expr).fold(Eval.later(children.combineAll))(combineAnd(children))
} }
.value .value
} }

View File

@ -1,7 +1,7 @@
package aqua.semantics.header package aqua.semantics.header
import aqua.raw.{RawContext, RawPart} import aqua.raw.{RawContext, RawPart}
import aqua.types.{AbilityType, ArrowType} import aqua.types.{AbilityType, ArrowType, Type}
import cats.Semigroup import cats.Semigroup
import cats.syntax.semigroup.* import cats.syntax.semigroup.*
@ -9,14 +9,17 @@ import cats.syntax.semigroup.*
trait Picker[A] { trait Picker[A] {
def all(ctx: A): Set[String] def all(ctx: A): Set[String]
def funcNames(ctx: A): List[String] def funcNames(ctx: A): Set[String]
def definedAbilityNames(ctx: A): Set[String]
def blank: A def blank: A
def pick(ctx: A, name: String, rename: Option[String], declared: Boolean): Option[A] def pick(ctx: A, name: String, rename: Option[String], declared: Boolean): Option[A]
def pickDeclared(ctx: A)(implicit semi: Semigroup[A]): A def pickDeclared(ctx: A)(implicit semi: Semigroup[A]): A
def pickHeader(ctx: A): A def pickHeader(ctx: A): A
def module(ctx: A): Option[String] def module(ctx: A): Option[String]
def exports(ctx: A): Option[Map[String, Option[String]]] def exports(ctx: A): Map[String, Option[String]]
def isAbility(ctx: A, name: String): Boolean
def funcReturnAbilityOrArrow(ctx: A, name: String): Boolean def funcReturnAbilityOrArrow(ctx: A, name: String): Boolean
def funcAcceptAbility(ctx: A, name: String): Boolean
def declares(ctx: A): Set[String] def declares(ctx: A): Set[String]
def setAbility(ctx: A, name: String, ctxAb: A): A def setAbility(ctx: A, name: String, ctxAb: A): A
def setModule(ctx: A, name: Option[String], declares: Set[String]): A def setModule(ctx: A, name: Option[String], declares: Set[String]): A
@ -25,19 +28,27 @@ trait Picker[A] {
def addPart(ctx: A, part: (A, RawPart)): A def addPart(ctx: A, part: (A, RawPart)): A
} }
final class PickerOps[A: Picker](p: A) { object Picker {
extension [A: Picker](p: A) {
def blank: A = Picker[A].blank def blank: A = Picker[A].blank
def all: Set[String] = Picker[A].all(p) def all: Set[String] = Picker[A].all(p)
def funcNames: List[String] = Picker[A].funcNames(p) def funcNames: Set[String] = Picker[A].funcNames(p)
def definedAbilityNames: Set[String] = Picker[A].definedAbilityNames(p)
def pick(name: String, rename: Option[String], declared: Boolean): Option[A] = def pick(name: String, rename: Option[String], declared: Boolean): Option[A] =
Picker[A].pick(p, name, rename, declared) Picker[A].pick(p, name, rename, declared)
def pickDeclared(implicit semi: Semigroup[A]): A = Picker[A].pickDeclared(p) def pickDeclared(implicit semi: Semigroup[A]): A = Picker[A].pickDeclared(p)
def pickHeader: A = Picker[A].pickHeader(p) def pickHeader: A = Picker[A].pickHeader(p)
def module: Option[String] = Picker[A].module(p) def module: Option[String] = Picker[A].module(p)
def exports: Option[Map[String, Option[String]]] = Picker[A].exports(p) def exports: Map[String, Option[String]] = Picker[A].exports(p)
def funcReturnAbilityOrArrow(name: String): Boolean = Picker[A].funcReturnAbilityOrArrow(p, name)
def isAbility(name: String): Boolean = Picker[A].isAbility(p, name)
def funcReturnAbilityOrArrow(name: String): Boolean =
Picker[A].funcReturnAbilityOrArrow(p, name)
def funcAcceptAbility(name: String): Boolean = Picker[A].funcAcceptAbility(p, name)
def declares: Set[String] = Picker[A].declares(p) def declares: Set[String] = Picker[A].declares(p)
def setAbility(name: String, ctx: A): A = Picker[A].setAbility(p, name, ctx) def setAbility(name: String, ctx: A): A = Picker[A].setAbility(p, name, ctx)
def setInit(ctx: Option[A]): A = Picker[A].setInit(p, ctx) def setInit(ctx: Option[A]): A = Picker[A].setInit(p, ctx)
@ -51,30 +62,46 @@ final class PickerOps[A: Picker](p: A) {
def setExports(exports: Map[String, Option[String]]): A = def setExports(exports: Map[String, Option[String]]): A =
Picker[A].setExports(p, exports) Picker[A].setExports(p, exports)
} }
object Picker { private def returnsAbilityOrArrow(arrowType: ArrowType): Boolean =
def returnsAbilityOrArrow(arrowType: ArrowType): Boolean = {
arrowType.codomain.toList.exists { arrowType.codomain.toList.exists {
case _: AbilityType => true case _: AbilityType => true
case _: ArrowType => true case _: ArrowType => true
case _ => false case _ => false
} }
private def acceptsAbility(arrowType: ArrowType): Boolean =
arrowType.domain.toList.exists {
case _: AbilityType => true
case _ => false
} }
implicit final def apply[A](implicit ev: Picker[A]): Picker[A] = ev private def isAbilityType(`type`: Type): Boolean =
`type` match {
case _: AbilityType => true
case _ => false
}
implicit final def syntaxPicker[A: Picker](a: A): PickerOps[A] = final def apply[A](using ev: Picker[A]): Picker[A] = ev
new PickerOps[A](a)
given Picker[RawContext] with { given Picker[RawContext] with {
override def blank: RawContext = RawContext.blank override def blank: RawContext = RawContext.blank
override def exports(ctx: RawContext): Option[Map[String, Option[String]]] = ctx.exports override def exports(ctx: RawContext): Map[String, Option[String]] = ctx.exports
override def isAbility(ctx: RawContext, name: String): Boolean =
ctx.types.get(name).exists(isAbilityType)
override def funcReturnAbilityOrArrow(ctx: RawContext, name: String): Boolean = override def funcReturnAbilityOrArrow(ctx: RawContext, name: String): Boolean =
ctx.funcs.get(name).map(_.arrow.`type`).exists(returnsAbilityOrArrow) ctx.funcs.get(name).map(_.arrow.`type`).exists(returnsAbilityOrArrow)
override def funcNames(ctx: RawContext): List[String] = ctx.funcs.keys.toList
override def funcAcceptAbility(ctx: RawContext, name: String): Boolean =
ctx.funcs.get(name).map(_.arrow.`type`).exists(acceptsAbility)
override def funcNames(ctx: RawContext): Set[String] = ctx.funcs.keySet
override def definedAbilityNames(ctx: RawContext): Set[String] = ctx.definedAbilities.keySet
override def addPart(ctx: RawContext, part: (RawContext, RawPart)): RawContext = override def addPart(ctx: RawContext, part: (RawContext, RawPart)): RawContext =
ctx.copy(parts = ctx.parts :+ part) ctx.copy(parts = ctx.parts :+ part)
@ -98,7 +125,7 @@ object Picker {
ctx.copy(module = name, declares = declares) ctx.copy(module = name, declares = declares)
override def setExports(ctx: RawContext, exports: Map[String, Option[String]]): RawContext = override def setExports(ctx: RawContext, exports: Map[String, Option[String]]): RawContext =
ctx.copy(exports = Some(exports)) ctx.copy(exports = exports)
override def pick( override def pick(
ctx: RawContext, ctx: RawContext,

View File

@ -138,7 +138,16 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](implicit
case ct @ CollectionToken(_, values) => case ct @ CollectionToken(_, values) =>
for { for {
maybeValuesRaw <- values.traverse(valueToRaw).map(_.sequence) maybeValuesRaw <- values.traverse(valueToRaw).map(_.sequence)
raw = maybeValuesRaw.map(raws => valuesRawChecked <- maybeValuesRaw.flatTraverse(raws =>
raws
.zip(values)
.traverse { case (raw, token) =>
T.ensureTypeIsCollectible(token, raw.`type`)
.map(Option.when(_)(raw))
}
.map(_.sequence)
)
raw = valuesRawChecked.map(raws =>
NonEmptyList NonEmptyList
.fromList(raws) .fromList(raws)
.fold(ValueRaw.Nil) { nonEmpty => .fold(ValueRaw.Nil) { nonEmpty =>

View File

@ -1,6 +1,6 @@
package aqua.semantics.rules.abilities package aqua.semantics.rules.abilities
import aqua.parser.lexer.{Ability, NamedTypeToken, Name, Token, ValueToken} import aqua.parser.lexer.{Name, NamedTypeToken, Token, ValueToken}
import aqua.raw.value.ValueRaw import aqua.raw.value.ValueRaw
import aqua.types.ArrowType import aqua.types.ArrowType
import cats.InjectK import cats.InjectK

View File

@ -2,7 +2,7 @@ package aqua.semantics.rules.abilities
import aqua.raw.{RawContext, ServiceRaw} import aqua.raw.{RawContext, ServiceRaw}
import aqua.raw.value.ValueRaw import aqua.raw.value.ValueRaw
import aqua.parser.lexer.{Ability, NamedTypeToken, Name, Token, ValueToken} import aqua.parser.lexer.{Name, NamedTypeToken, Token, ValueToken}
import aqua.types.ArrowType import aqua.types.ArrowType
import cats.Monoid import cats.Monoid
import cats.data.NonEmptyList import cats.data.NonEmptyList
@ -13,8 +13,7 @@ case class AbilitiesState[S[_]](
abilities: Map[String, RawContext] = Map.empty, abilities: Map[String, RawContext] = Map.empty,
rootServiceIds: Map[String, (ValueToken[S], ValueRaw)] = rootServiceIds: Map[String, (ValueToken[S], ValueRaw)] =
Map.empty[String, (ValueToken[S], ValueRaw)], Map.empty[String, (ValueToken[S], ValueRaw)],
definitions: Map[String, NamedTypeToken[S]] = definitions: Map[String, NamedTypeToken[S]] = Map.empty[String, NamedTypeToken[S]]
Map.empty[String, NamedTypeToken[S]]
) { ) {
def purgeArrows: Option[(NonEmptyList[(Name[S], ArrowType)], AbilitiesState[S])] = def purgeArrows: Option[(NonEmptyList[(Name[S], ArrowType)], AbilitiesState[S])] =

View File

@ -1,13 +1,15 @@
package aqua.semantics.rules.definitions package aqua.semantics.rules.definitions
import aqua.parser.lexer.{NamedTypeToken, Name, Token}
import aqua.parser.lexer.{Name, NamedTypeToken, Token}
import aqua.types.{ArrowType, Type} import aqua.types.{ArrowType, Type}
import cats.data.{NonEmptyList, NonEmptyMap} import cats.data.{NonEmptyList, NonEmptyMap}
// Collect and purge arrows/values from structures, services, etc // Collect and purge arrows/values from structures, services, etc
trait DefinitionsAlgebra[S[_], Alg[_]] { trait DefinitionsAlgebra[S[_], Alg[_]] {
def defineDef(name: Name[S], `type`: Type): Alg[Boolean] def defineDef(name: Name[S], `type`: Type): Alg[Boolean]
def purgeDefs(token: NamedTypeToken[S]): Alg[Option[NonEmptyMap[String, Type]]] def purgeDefs(token: NamedTypeToken[S]): Alg[Map[String, DefinitionsState.Def[S]]]
def defineArrow(arrow: Name[S], `type`: ArrowType): Alg[Boolean] def defineArrow(arrow: Name[S], `type`: ArrowType): Alg[Boolean]

View File

@ -7,6 +7,7 @@ import aqua.semantics.rules.abilities.AbilitiesState
import aqua.semantics.rules.locations.{LocationsAlgebra, LocationsState} import aqua.semantics.rules.locations.{LocationsAlgebra, LocationsState}
import aqua.semantics.rules.types.TypesState import aqua.semantics.rules.types.TypesState
import aqua.types.{ArrowType, Type} import aqua.types.{ArrowType, Type}
import cats.data.{NonEmptyList, NonEmptyMap, State} import cats.data.{NonEmptyList, NonEmptyMap, State}
import monocle.Lens import monocle.Lens
import monocle.macros.GenLens import monocle.macros.GenLens
@ -14,6 +15,7 @@ import cats.syntax.applicative.*
import cats.syntax.apply.* import cats.syntax.apply.*
import cats.syntax.flatMap.* import cats.syntax.flatMap.*
import cats.syntax.functor.* import cats.syntax.functor.*
import cats.syntax.option.*
import scala.collection.immutable.SortedMap import scala.collection.immutable.SortedMap
@ -35,7 +37,14 @@ class DefinitionsInterpreter[S[_], X](implicit
def define(name: Name[S], `type`: Type, defName: String): SX[Boolean] = def define(name: Name[S], `type`: Type, defName: String): SX[Boolean] =
getState.map(_.definitions.get(name.value)).flatMap { getState.map(_.definitions.get(name.value)).flatMap {
case None => case None =>
modify(st => st.copy(definitions = st.definitions.updated(name.value, name -> `type`))) modify(st =>
st.copy(definitions =
st.definitions.updated(
name.value,
DefinitionsState.Def(name, `type`)
)
)
)
.as(true) .as(true)
case Some(_) => case Some(_) =>
report(name, s"Cannot define $defName `${name.value}`, it was already defined above") report(name, s"Cannot define $defName `${name.value}`, it was already defined above")
@ -50,41 +59,31 @@ class DefinitionsInterpreter[S[_], X](implicit
override def purgeDefs( override def purgeDefs(
token: NamedTypeToken[S] token: NamedTypeToken[S]
): SX[Option[NonEmptyMap[String, Type]]] = ): SX[Map[String, DefinitionsState.Def[S]]] =
getState.map(_.definitions).flatMap { defs => getState.map(_.definitions).flatMap { defs =>
NonEmptyMap.fromMap(SortedMap.from(defs.view.mapValues(_._2))) match { val names = defs.view.mapValues(_.name)
case Some(fs) =>
val fields = defs.map { case (n, (tt, _)) =>
n -> tt
}.toList
locations
.addTokenWithFields(token.value, token, fields)
.flatMap { _ =>
modify { st =>
st.copy(definitions = Map.empty)
}.map { _ =>
Some(fs)
}
}
case None => report(token, "Cannot define a data type without fields").as(None) for {
} _ <- locations
.addTokenWithFields(token.value, token, names.toList)
.whenA(defs.nonEmpty)
_ <- modify(_.copy(definitions = Map.empty))
} yield defs
} }
def purgeArrows(token: Token[S]): SX[Option[NonEmptyList[(Name[S], ArrowType)]]] = def purgeArrows(token: Token[S]): SX[Option[NonEmptyList[(Name[S], ArrowType)]]] =
getState.map(_.definitions).flatMap { definitions => getState.map(_.definitions).flatMap { defs =>
val values = definitions.values val arrows = defs.values.collect { case DefinitionsState.Def(name, t: ArrowType) =>
val arrows = NonEmptyList.fromList(values.collect { case (n, at @ ArrowType(_, _)) => name -> t
(n, at) }
}.toList) NonEmptyList.fromList(arrows.toList) match {
arrows match {
case Some(arrs) => case Some(arrs) =>
modify { st => modify { st =>
st.copy(definitions = Map.empty) st.copy(definitions = Map.empty)
}.as(Option[NonEmptyList[(Name[S], ArrowType)]](arrs)) }.as(arrs.some)
case None => case None =>
report(token, "Cannot purge arrows, no arrows provided") report(token, "Cannot purge arrows, no arrows provided")
.as(Option.empty[NonEmptyList[(Name[S], ArrowType)]]) .as(none)
} }
} }
} }

View File

@ -3,6 +3,16 @@ package aqua.semantics.rules.definitions
import aqua.parser.lexer.{Name, Token} import aqua.parser.lexer.{Name, Token}
import aqua.types.Type import aqua.types.Type
import DefinitionsState.Def
case class DefinitionsState[S[_]]( case class DefinitionsState[S[_]](
definitions: Map[String, (Name[S], Type)] = Map.empty[String, (Name[S], Type)] definitions: Map[String, Def[S]] = Map.empty[String, Def[S]]
) )
object DefinitionsState {
final case class Def[S[_]](
name: Name[S],
`type`: Type
)
}

View File

@ -2,7 +2,8 @@ package aqua.semantics.rules.types
import aqua.parser.lexer.* import aqua.parser.lexer.*
import aqua.raw.value.{PropertyRaw, ValueRaw} import aqua.raw.value.{PropertyRaw, ValueRaw}
import aqua.types.{ArrowType, StructType, Type} import aqua.types.{AbilityType, ArrowType, NamedType, StructType, Type}
import cats.data.NonEmptyMap import cats.data.NonEmptyMap
import cats.data.NonEmptyList import cats.data.NonEmptyList
@ -14,10 +15,15 @@ trait TypesAlgebra[S[_], Alg[_]] {
def resolveArrowDef(arrowDef: ArrowTypeToken[S]): Alg[Option[ArrowType]] def resolveArrowDef(arrowDef: ArrowTypeToken[S]): Alg[Option[ArrowType]]
def defineNamedType( def defineAbilityType(
name: NamedTypeToken[S], name: NamedTypeToken[S],
`type`: Type fields: Map[String, (Name[S], Type)]
): Alg[Boolean] ): Alg[Option[AbilityType]]
def defineStructType(
name: NamedTypeToken[S],
fields: Map[String, (Name[S], Type)]
): Alg[Option[StructType]]
def defineAlias(name: NamedTypeToken[S], target: Type): Alg[Boolean] def defineAlias(name: NamedTypeToken[S], target: Type): Alg[Boolean]
@ -30,12 +36,18 @@ trait TypesAlgebra[S[_], Alg[_]] {
): Alg[Option[PropertyRaw]] ): Alg[Option[PropertyRaw]]
def resolveField(rootT: Type, op: IntoField[S]): Alg[Option[PropertyRaw]] def resolveField(rootT: Type, op: IntoField[S]): Alg[Option[PropertyRaw]]
def resolveArrow(rootT: Type, op: IntoArrow[S], arguments: List[ValueRaw]): Alg[Option[PropertyRaw]] def resolveArrow(
rootT: Type,
op: IntoArrow[S],
arguments: List[ValueRaw]
): Alg[Option[PropertyRaw]]
def ensureValuesComparable(token: Token[S], left: Type, right: Type): Alg[Boolean] def ensureValuesComparable(token: Token[S], left: Type, right: Type): Alg[Boolean]
def ensureTypeMatches(token: Token[S], expected: Type, givenType: Type): Alg[Boolean] def ensureTypeMatches(token: Token[S], expected: Type, givenType: Type): Alg[Boolean]
def ensureTypeIsCollectible(token: Token[S], givenType: Type): Alg[Boolean]
def ensureTypeOneOf[T <: Type]( def ensureTypeOneOf[T <: Type](
token: Token[S], token: Token[S],
expected: Set[T], expected: Set[T],

View File

@ -54,13 +54,12 @@ class TypesInterpreter[S[_], X](implicit
override def resolveType(token: TypeToken[S]): State[X, Option[Type]] = override def resolveType(token: TypeToken[S]): State[X, Option[Type]] =
getState.map(st => TypesStateHelper.resolveTypeToken(token, st, resolver)).flatMap { getState.map(st => TypesStateHelper.resolveTypeToken(token, st, resolver)).flatMap {
case Some(t) => case Some((typ, tokens)) =>
val (tt, tokens) = t val tokensLocs = tokens.map { case (t, n) => n.value -> t }
val tokensLocs = tokens.map { case (t, n) => locations.pointLocations(tokensLocs).as(typ.some)
n.value -> t case None =>
} // TODO: Give more specific error message
locations.pointLocations(tokensLocs).map(_ => Some(tt)) report(token, s"Unresolved type").as(None)
case None => report(token, s"Unresolved type").as(None)
} }
override def resolveArrowDef(arrowDef: ArrowTypeToken[S]): State[X, Option[ArrowType]] = override def resolveArrowDef(arrowDef: ArrowTypeToken[S]): State[X, Option[ArrowType]] =
@ -78,21 +77,61 @@ class TypesInterpreter[S[_], X](implicit
} }
} }
override def defineNamedType( override def defineAbilityType(
name: NamedTypeToken[S], name: NamedTypeToken[S],
`type`: Type fields: Map[String, (Name[S], Type)]
): State[X, Boolean] = ): State[X, Option[AbilityType]] =
getState.map(_.definitions.get(name.value)).flatMap { getState.map(_.definitions.get(name.value)).flatMap {
case Some(n) if n == name => State.pure(true) case Some(_) => report(name, s"Ability `${name.value}` was already defined").as(none)
case Some(_) =>
report(name, s"Type `${name.value}` was already defined").as(false)
case None => case None =>
val types = fields.view.mapValues { case (_, t) => t }.toMap
NonEmptyMap
.fromMap(SortedMap.from(types))
.fold(report(name, s"Ability `${name.value}` has no fields").as(none))(nonEmptyFields =>
val `type` = AbilityType(name.value, nonEmptyFields)
modify { st => modify { st =>
st.copy( st.copy(
strict = st.strict.updated(name.value, `type`), strict = st.strict.updated(name.value, `type`),
definitions = st.definitions.updated(name.value, name) definitions = st.definitions.updated(name.value, name)
) )
}.as(true) }.as(`type`.some)
)
}
override def defineStructType(
name: NamedTypeToken[S],
fields: Map[String, (Name[S], Type)]
): State[X, Option[StructType]] =
getState.map(_.definitions.get(name.value)).flatMap {
case Some(_) => report(name, s"Data `${name.value}` was already defined").as(none)
case None =>
fields.toList.traverse {
case (field, (fieldName, t: DataType)) =>
t match {
case _: StreamType => report(fieldName, s"Field '$field' has stream type").as(none)
case _ => (field -> t).some.pure[ST]
}
case (field, (fieldName, t)) =>
report(
fieldName,
s"Field '$field' has unacceptable for struct field type '$t'"
).as(none)
}.map(_.sequence.map(_.toMap))
.flatMap(
_.map(SortedMap.from)
.flatMap(NonEmptyMap.fromMap)
.fold(
report(name, s"Struct `${name.value}` has no fields").as(none)
)(nonEmptyFields =>
val `type` = StructType(name.value, nonEmptyFields)
modify { st =>
st.copy(
strict = st.strict.updated(name.value, `type`),
definitions = st.definitions.updated(name.value, name)
)
}.as(`type`.some)
)
)
} }
override def defineAlias(name: NamedTypeToken[S], target: Type): State[X, Boolean] = override def defineAlias(name: NamedTypeToken[S], target: Type): State[X, Boolean] =
@ -299,6 +338,16 @@ class TypesInterpreter[S[_], X](implicit
} }
} }
override def ensureTypeIsCollectible(token: Token[S], givenType: Type): State[X, Boolean] =
givenType match {
case _: DataType => true.pure
case _ =>
report(
token,
s"Value of type '$givenType' could not be put into a collection"
).as(false)
}
override def ensureTypeOneOf[T <: Type]( override def ensureTypeOneOf[T <: Type](
token: Token[S], token: Token[S],
expected: Set[T], expected: Set[T],

View File

@ -1,37 +1,14 @@
package aqua.semantics.rules.types package aqua.semantics.rules.types
import aqua.raw.value.{FunctorRaw, IntoIndexRaw, LiteralRaw, PropertyRaw, ValueRaw} import aqua.raw.value.{FunctorRaw, IntoIndexRaw, LiteralRaw, PropertyRaw, ValueRaw}
import aqua.parser.lexer.{ import aqua.parser.lexer.*
ArrayTypeToken, import aqua.types.*
ArrowTypeToken, import aqua.raw.RawContext
BasicTypeToken,
NamedTypeToken,
IntoField,
IntoIndex,
Name,
OptionTypeToken,
PropertyOp,
StreamTypeToken,
Token,
TopBottomToken,
TypeToken
}
import aqua.types.{
ArrayType,
ArrowType,
BottomType,
DataType,
OptionType,
ProductType,
StreamType,
StructType,
TopType,
Type
}
import cats.data.Validated.{Invalid, Valid} import cats.data.Validated.{Invalid, Valid}
import cats.data.{Chain, NonEmptyChain, ValidatedNec} import cats.data.{Chain, NonEmptyChain, ValidatedNec}
import cats.kernel.Monoid import cats.kernel.Monoid
import aqua.raw.RawContext import cats.syntax.option.*
case class TypesState[S[_]]( case class TypesState[S[_]](
fields: Map[String, (Name[S], Type)] = Map.empty[String, (Name[S], Type)], fields: Map[String, (Name[S], Type)] = Map.empty[String, (Name[S], Type)],
@ -56,7 +33,7 @@ object TypesStateHelper {
): Option[(Type, List[(Token[S], NamedTypeToken[S])])] = ): Option[(Type, List[(Token[S], NamedTypeToken[S])])] =
tt match { tt match {
case TopBottomToken(_, isTop) => case TopBottomToken(_, isTop) =>
Option((if (isTop) TopType else BottomType, Nil)) (if (isTop) TopType else BottomType, Nil).some
case ArrayTypeToken(_, dtt) => case ArrayTypeToken(_, dtt) =>
resolveTypeToken(dtt, state, resolver).collect { case (it: DataType, t) => resolveTypeToken(dtt, state, resolver).collect { case (it: DataType, t) =>
(ArrayType(it), t) (ArrayType(it), t)
@ -71,7 +48,7 @@ object TypesStateHelper {
} }
case ctt: NamedTypeToken[S] => case ctt: NamedTypeToken[S] =>
resolver(state, ctt) resolver(state, ctt)
case btt: BasicTypeToken[S] => Some((btt.value, Nil)) case btt: BasicTypeToken[S] => (btt.value, Nil).some
case ArrowTypeToken(_, args, res) => case ArrowTypeToken(_, args, res) =>
val strictArgs = val strictArgs =
args.map(_._2).map(resolveTypeToken(_, state, resolver)).collect { args.map(_._2).map(resolveTypeToken(_, state, resolver)).collect {

View File

@ -1,50 +1,93 @@
package aqua.semantics package aqua.semantics
import aqua.parser.head.{ExportExpr, FromExpr, HeaderExpr} import aqua.parser.head.{ExportExpr, FromExpr, HeaderExpr, ModuleExpr}
import aqua.parser.lexer.Name import aqua.parser.lexer.{Ability, Name}
import aqua.raw.RawContext import aqua.raw.RawContext
import aqua.raw.arrow.{ArrowRaw, FuncRaw} import aqua.raw.arrow.{ArrowRaw, FuncRaw}
import aqua.raw.ops.RawTag import aqua.raw.ops.RawTag
import aqua.raw.value.VarRaw import aqua.raw.value.VarRaw
import aqua.semantics.header.{HeaderHandler, HeaderSem} import aqua.semantics.header.{HeaderHandler, HeaderSem}
import aqua.types.{ArrowType, NilType, ProductType} import aqua.types.{AbilityType, ArrowType, NilType, ProductType, ScalarType}
import cats.data.{Chain, NonEmptyList, Validated}
import cats.data.{Chain, NonEmptyList, NonEmptyMap, Validated}
import cats.free.Cofree import cats.free.Cofree
import cats.{Eval, Id, Monoid} import cats.{Eval, Id, Monoid}
import cats.syntax.applicative.*
import org.scalatest.Inside import org.scalatest.Inside
import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers import org.scalatest.matchers.should.Matchers
class HeaderSpec extends AnyFlatSpec with Matchers with Inside { class HeaderSpec extends AnyFlatSpec with Matchers with Inside {
"header handler" should "generate an error on exported function that returns arrow or ability" in { given Monoid[RawContext] = RawContext.implicits(RawContext.blank).rawContextMonoid
implicit val rc: Monoid[RawContext] = RawContext.implicits(RawContext.blank).rawContextMonoid
val handler = new HeaderHandler[Id, RawContext]() val handler = new HeaderHandler[Id, RawContext]()
val funcName = "funcName" def exportHeader(funcName: String): Cofree[Chain, HeaderExpr[Id]] = {
val exp: FromExpr.NameOrAbAs[Id] = Left((Name(funcName), None))
val exp: FromExpr.NameOrAbAs[Id] = Left((Name[Id](funcName), None)) /**
val ast = * aqua TestModule
Cofree[Chain, HeaderExpr[Id]](ExportExpr[Id](NonEmptyList.of(exp)), Eval.now(Chain.empty)) * export <funcName>
*/
Cofree(
ModuleExpr(
name = Ability[Id]("TestModule"),
declareAll = None,
declareNames = Nil,
declareCustom = Nil
),
Chain(
Cofree(
ExportExpr(NonEmptyList.of(exp)),
Chain.empty.pure
)
).pure
)
}
def funcCtx(funcName: String, arrowType: ArrowType): RawContext =
RawContext(parts =
Chain.one(
(
RawContext.blank,
FuncRaw(
funcName,
ArrowRaw(arrowType, Nil, RawTag.empty)
)
)
)
)
"header handler" should "generate an error on exported function that returns arrow or ability" in {
val funcName = "funcName"
val ast = exportHeader(funcName)
val retArrowType = ArrowType(NilType, NilType) val retArrowType = ArrowType(NilType, NilType)
val arrowType = ArrowType(NilType, ProductType.apply(retArrowType :: Nil)) val arrowType = ArrowType(NilType, ProductType.apply(retArrowType :: Nil))
val initCtx = RawContext(parts = val initCtx = funcCtx(funcName, arrowType)
Chain.one(
(
RawContext.blank,
FuncRaw(funcName, ArrowRaw(arrowType, VarRaw("", retArrowType) :: Nil, RawTag.empty))
)
)
)
val result = handler.sem(Map.empty, ast).andThen(_.finCtx(initCtx)) val result = handler.sem(Map.empty, ast).andThen(_.finCtx(initCtx))
inside(result) { inside(result) { case Validated.Invalid(errors) =>
case Validated.Invalid(errors) => atLeast(1, errors.toChain.toList) shouldBe a[HeaderError[Id]]
errors.head shouldBe a [HeaderError[Id]] }
}
it should "generate an error on exported function that accepts an ability" in {
val funcName = "funcName"
val ast = exportHeader(funcName)
val abilityType = AbilityType("Ab", NonEmptyMap.of("field" -> ScalarType.i8))
val arrowType = ArrowType(ProductType(abilityType :: Nil), NilType)
val initCtx = funcCtx(funcName, arrowType)
val result = handler.sem(Map.empty, ast).andThen(_.finCtx(initCtx))
inside(result) { case Validated.Invalid(errors) =>
atLeast(1, errors.toChain.toList) shouldBe a[HeaderError[Id]]
} }
} }
} }

View File

@ -586,4 +586,29 @@ class SemanticsSpec extends AnyFlatSpec with Matchers with Inside {
} }
} }
} }
it should "forbid abilities or streams in struct fields" in {
val scriptAbility =
"""
|ability Ab:
| a: string
|
|data St:
| a: Ab
|""".stripMargin
val scriptStream =
"""
|data St:
| s: *i8
|""".stripMargin
insideSemErrors(scriptAbility) { errors =>
atLeast(1, errors.toChain.toList) shouldBe a[RulesViolated[Span.S]]
}
insideSemErrors(scriptStream) { errors =>
atLeast(1, errors.toChain.toList) shouldBe a[RulesViolated[Span.S]]
}
}
} }

View File

@ -17,7 +17,15 @@ import aqua.semantics.rules.locations.DummyLocationsInterpreter
import aqua.raw.value.{ApplyBinaryOpRaw, LiteralRaw} import aqua.raw.value.{ApplyBinaryOpRaw, LiteralRaw}
import aqua.raw.RawContext import aqua.raw.RawContext
import aqua.types.* import aqua.types.*
import aqua.parser.lexer.{InfixToken, LiteralToken, Name, PrefixToken, ValueToken, VarToken} import aqua.parser.lexer.{
CollectionToken,
InfixToken,
LiteralToken,
Name,
PrefixToken,
ValueToken,
VarToken
}
import aqua.raw.value.ApplyUnaryOpRaw import aqua.raw.value.ApplyUnaryOpRaw
import aqua.parser.lexer.ValueToken.string import aqua.parser.lexer.ValueToken.string
@ -60,6 +68,15 @@ class ValuesAlgebraSpec extends AnyFlatSpec with Matchers with Inside {
def variable(name: String): VarToken[Id] = def variable(name: String): VarToken[Id] =
VarToken[Id](Name[Id](name)) VarToken[Id](Name[Id](name))
def option(value: ValueToken[Id]): CollectionToken[Id] =
CollectionToken[Id](CollectionToken.Mode.OptionMode, List(value))
def array(values: ValueToken[Id]*): CollectionToken[Id] =
CollectionToken[Id](CollectionToken.Mode.ArrayMode, values.toList)
def stream(values: ValueToken[Id]*): CollectionToken[Id] =
CollectionToken[Id](CollectionToken.Mode.StreamMode, values.toList)
def allPairs[A](list: List[A]): List[(A, A)] = for { def allPairs[A](list: List[A]): List[(A, A)] = for {
a <- list a <- list
b <- list b <- list
@ -511,4 +528,40 @@ class ValuesAlgebraSpec extends AnyFlatSpec with Matchers with Inside {
} }
} }
} }
it should "forbid collections with abilities or arrows" in {
val ability = variable("ab")
val abilityType = AbilityType("Ab", NonEmptyMap.of("field" -> ScalarType.i8))
val arrow = variable("arr")
val arrowType = ArrowType(
ProductType(ScalarType.i8 :: Nil),
ProductType(ScalarType.i8 :: Nil)
)
val alg = algebra()
val state = genState(
vars = Map(
ability.name.value -> abilityType,
arrow.name.value -> arrowType
)
)
List(
option(ability),
array(ability),
stream(ability),
option(arrow),
array(arrow),
stream(arrow)
).foreach { coll =>
val (st, res) = alg
.valueToRaw(coll)
.run(state)
.value
res shouldBe None
atLeast(1, st.errors.toList) shouldBe a[RulesViolated[Id]]
}
}
} }

View File

@ -246,6 +246,7 @@ sealed trait NamedType extends Type {
} }
// Struct is an unordered collection of labelled types // Struct is an unordered collection of labelled types
// TODO: Make fields type `DataType`
case class StructType(name: String, fields: NonEmptyMap[String, Type]) case class StructType(name: String, fields: NonEmptyMap[String, Type])
extends DataType with NamedType { extends DataType with NamedType {