feat(compiler): Restrict abilities usage [fixes LNG-208] (#854)

* Refactor

* Refactor

* Refactor

* Remove ScopeRaw

* Refactor, forbid exporting

* Add export checks

* Refactor

* Forbid exporting abilities

* Fix integration tests

* Forbid implicit ability export

* Simplify exports

* Fordbid using non data types in collections

* Forbid unappropriate struct field types

* Refactor

* Add export tests

* Add collection tests

* Add struct fields test

* Fixes

* Fix
This commit is contained in:
InversionSpaces 2023-08-24 15:09:39 +02:00 committed by GitHub
parent 3b033852f9
commit 2a0b207633
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
31 changed files with 555 additions and 321 deletions

View File

@ -1,3 +1,5 @@
aqua ClosureReturnRename
export lng193Bug
func getClosure(arg: u16, peer: string) -> u16 -> u16:

View File

@ -1,3 +1,5 @@
aqua ReturnArrow
import "@fluencelabs/aqua-lib/builtin.aqua"
export callReturnedArrow, callReturnedChainArrow

View File

@ -3,10 +3,11 @@ package aqua.lsp
import aqua.parser.lexer.{Ability, LiteralToken, Name, NamedTypeToken, Token}
import aqua.raw.{RawContext, RawPart}
import aqua.types.{ArrowType, Type}
import RawContext.semiRC
import aqua.semantics.header.Picker
import cats.{Monoid, Semigroup}
import cats.syntax.monoid.*
import RawContext.semiRC
import aqua.semantics.header.{Picker, PickerOps}
// Context with info that necessary for language server
case class LspContext[S[_]](
@ -51,31 +52,40 @@ object LspContext {
}
given [S[_]]: Picker[LspContext[S]] with {
private def ops[S[_]](ctx: LspContext[S]) = PickerOps[RawContext](ctx.raw)
import aqua.semantics.header.Picker.*
override def blank: LspContext[S] = LspContext[S](Picker[RawContext].blank, Map.empty)
override def exports(ctx: LspContext[S]): Option[Map[String, Option[String]]] = ops(ctx).exports
override def exports(ctx: LspContext[S]): Map[String, Option[String]] = ctx.raw.exports
override def isAbility(ctx: LspContext[S], name: String): Boolean =
ctx.raw.isAbility(name)
override def funcReturnAbilityOrArrow(ctx: LspContext[S], name: String): Boolean =
ops(ctx).funcReturnAbilityOrArrow(name)
override def funcNames(ctx: LspContext[S]): List[String] = ops(ctx).funcNames
ctx.raw.funcReturnAbilityOrArrow(name)
override def funcAcceptAbility(ctx: LspContext[S], name: String): Boolean =
ctx.raw.funcAcceptAbility(name)
override def funcNames(ctx: LspContext[S]): Set[String] = ctx.raw.funcNames
override def definedAbilityNames(ctx: LspContext[S]): Set[String] =
ctx.raw.definedAbilityNames
override def addPart(ctx: LspContext[S], part: (LspContext[S], RawPart)): LspContext[S] =
ctx.copy(raw = ops(ctx).addPart(part._1.raw -> part._2))
ctx.copy(raw = ctx.raw.addPart(part._1.raw -> part._2))
override def setInit(ctx: LspContext[S], ctxInit: Option[LspContext[S]]): LspContext[S] =
ctx.copy(raw = ops(ctx).setInit(ctxInit.map(_.raw)))
ctx.copy(raw = ctx.raw.setInit(ctxInit.map(_.raw)))
override def all(ctx: LspContext[S]): Set[String] =
ops(ctx).all
override def module(ctx: LspContext[S]): Option[String] = ops(ctx).module
override def declares(ctx: LspContext[S]): Set[String] = ops(ctx).declares
ctx.raw.all
override def module(ctx: LspContext[S]): Option[String] = ctx.raw.module
override def declares(ctx: LspContext[S]): Set[String] = ctx.raw.declares
override def setAbility(ctx: LspContext[S], name: String, ctxAb: LspContext[S]): LspContext[S] =
val prefix = name + "."
ctx.copy(
raw = ops(ctx).setAbility(name, ctxAb.raw),
raw = ctx.raw.setAbility(name, ctxAb.raw),
tokens = ctx.tokens ++ ctxAb.tokens.map(kv => (prefix + kv._1) -> kv._2)
)
@ -84,13 +94,13 @@ object LspContext {
name: Option[String],
declares: Set[String]
): LspContext[S] =
ctx.copy(raw = ops(ctx).setOptModule(name, declares))
ctx.copy(raw = ctx.raw.setOptModule(name, declares))
override def setExports(
ctx: LspContext[S],
exports: Map[String, Option[String]]
): LspContext[S] =
ctx.copy(raw = ops(ctx).setExports(exports))
ctx.copy(raw = ctx.raw.setExports(exports))
override def pick(
ctx: LspContext[S],
@ -107,7 +117,7 @@ object LspContext {
}
}.getOrElse(ctx.tokens)
ops(ctx)
ctx.raw
.pick(name, rename, declared)
.map(rc =>
ctx.copy(
@ -122,11 +132,11 @@ object LspContext {
)
)
override def pickHeader(ctx: LspContext[S]): LspContext[S] = ctx.copy(raw = ops(ctx).pickHeader)
override def pickHeader(ctx: LspContext[S]): LspContext[S] = ctx.copy(raw = ctx.raw.pickHeader)
override def pickDeclared(
ctx: LspContext[S]
)(using Semigroup[LspContext[S]]): LspContext[S] =
ctx.copy(raw = ops(ctx).pickDeclared)
ctx.copy(raw = ctx.raw.pickDeclared)
}
}

View File

@ -1,13 +1,15 @@
package aqua.raw
import aqua.raw.arrow.FuncRaw
import aqua.raw.value.ValueRaw
import aqua.types.{StructType, Type, AbilityType}
import cats.Monoid
import cats.Semigroup
import cats.data.Chain
import cats.data.NonEmptyMap
import aqua.raw.arrow.FuncRaw
import aqua.raw.value.ValueRaw
import aqua.types.{StructType, Type}
import cats.syntax.monoid.*
import cats.syntax.option.*
import scala.collection.immutable.SortedMap
@ -31,7 +33,7 @@ case class RawContext(
init: Option[RawContext] = None,
module: Option[String] = None,
declares: Set[String] = Set.empty,
exports: Option[Map[String, Option[String]]] = None,
exports: Map[String, Option[String]] = Map.empty,
parts: Chain[(RawContext, RawPart)] = Chain.empty,
abilities: Map[String, RawContext] = Map.empty
) {
@ -46,6 +48,13 @@ case class RawContext(
private def collectPartsMap[T](f: PartialFunction[RawPart, T]): Map[String, T] =
parts.collect { case (_, p) if f.isDefinedAt(p) => p.name -> f(p) }.toList.toMap
private def all[T](what: RawContext => Map[String, T], prefix: String = ""): Map[String, T] =
abilities
.foldLeft(what(this)) { case (ts, (k, v)) =>
ts ++ v.all(what, k + ".")
}
.map(prefixFirst(prefix, _))
lazy val services: Map[String, ServiceRaw] = collectPartsMap { case srv: ServiceRaw => srv }
lazy val allServices: Map[String, ServiceRaw] =
@ -72,15 +81,14 @@ case class RawContext(
c.value
}
private def all[T](what: RawContext => Map[String, T], prefix: String = ""): Map[String, T] =
abilities
.foldLeft(what(this)) { case (ts, (k, v)) =>
ts ++ v.all(what, k + ".")
}
.map(prefixFirst(prefix, _))
lazy val allValues: Map[String, ValueRaw] = all(_.values)
lazy val definedAbilities: Map[String, AbilityType] =
collectPartsMap { case TypeRaw(_, at: AbilityType) => at }
lazy val allDefinedAbilities: Map[String, AbilityType] =
all(_.definedAbilities)
def `type`(name: String): Option[StructType] =
NonEmptyMap
.fromMap(
@ -92,7 +100,14 @@ case class RawContext(
)
.map(StructType(name, _))
override def toString: String = s"module: $module\ndeclares: $declares\nexports: $exports"
override def toString: String =
s"""|module: ${module.getOrElse("unnamed")}
|declares: ${declares.mkString(", ")}
|exports: ${exports.map { case (name, rename) =>
rename.fold(name)(name + " as " + _)
}.mkString(", ")}
|parts: ${parts.map { case (_, part) => part.name }.toList.mkString(", ")}
|abilities: ${abilities.keys.mkString(", ")}""".stripMargin
}
object RawContext {
@ -104,7 +119,7 @@ object RawContext {
x.init.flatMap(xi => y.init.map(xi |+| _)) orElse x.init orElse y.init,
x.module orElse y.module,
x.declares ++ y.declares,
x.exports.flatMap(xe => y.exports.map(xe ++ _)) orElse x.exports orElse y.exports,
x.exports ++ y.exports,
x.parts ++ y.parts,
x.abilities ++ y.abilities
)

View File

@ -1,19 +0,0 @@
package aqua.raw
import aqua.raw.arrow.FuncRaw
import aqua.types.{ArrowType, StructType, Type}
import cats.data.NonEmptyMap
import aqua.raw.value.ValueRaw
import scala.collection.immutable.SortedMap
case class ScopeRaw(
name: String,
fieldsAndArrows: NonEmptyMap[String, Type]
) extends RawPart {
lazy val rawPartType: StructType = StructType(name, fieldsAndArrows)
override def rename(s: String): RawPart = copy(name = s)
}

View File

@ -118,11 +118,7 @@ object AquaContext extends Logging {
logger.trace("raw: " + rawContext)
logger.trace("ctx: " + ctx)
rawContext.module
.fold(
// if `module` header is not defined, then export everything defined in rawContext
rawContext.parts.map(_._2).map(_.name).map(_ -> Option.empty[String]).toList.toMap
)(_ => rawContext.exports.getOrElse(Map.empty))
rawContext.exports
.foldLeft(
// Module name is what persists
blank.copy(

View File

@ -11,7 +11,7 @@ import aqua.parser.lift.Span
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
case class FieldTypeExpr[F[_]](name: Name[F], `type`: DataTypeToken[F])
extends Expr[F](FieldTypeExpr, name) {
extends Expr[F](FieldTypeExpr, name) {
override def mapK[K[_]: Comonad](fk: F ~> K): FieldTypeExpr[K] =
copy(name.mapK(fk), `type`.mapK(fk))
@ -20,11 +20,7 @@ case class FieldTypeExpr[F[_]](name: Name[F], `type`: DataTypeToken[F])
object FieldTypeExpr extends Expr.Leaf {
override val p: Parser[FieldTypeExpr[Span.S]] =
((Name.p <* ` : `) ~ (Parser
.not(StreamTypeToken.`streamtypedef`)
.withContext(
"Data fields cannot be of stream type (stream is designated by '*')."
) *> DataTypeToken.`datatypedef`)).map { case (name, t) =>
((Name.p <* ` : `) ~ DataTypeToken.`datatypedef`).map { case (name, t) =>
FieldTypeExpr(name, t)
}
}

View File

@ -2,7 +2,7 @@ package aqua.parser.expr
import aqua.parser.Expr
import aqua.parser.lexer.Token.*
import aqua.parser.lexer.{Ability, NamedTypeToken, ValueToken}
import aqua.parser.lexer.{NamedTypeToken, ValueToken}
import aqua.parser.lift.LiftParser
import cats.Comonad
import cats.parse.Parser

View File

@ -3,10 +3,10 @@ package aqua.parser.expr.func
import aqua.parser.Expr
import aqua.parser.expr.func.AbilityIdExpr
import aqua.parser.lexer.Token.*
import aqua.parser.lexer.{Ability, NamedTypeToken, ValueToken}
import aqua.parser.lexer.{NamedTypeToken, ValueToken}
import aqua.parser.lift.LiftParser
import cats.parse.Parser as P
import cats.{Comonad, ~>}
import cats.{~>, Comonad}
import aqua.parser.lift.Span
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}

View File

@ -3,7 +3,7 @@ package aqua.parser.expr.func
import aqua.parser.Expr
import aqua.parser.expr.func.CallArrowExpr
import aqua.parser.lexer.Token.*
import aqua.parser.lexer.{Ability, CallArrowToken, Name, ValueToken, VarToken}
import aqua.parser.lexer.{CallArrowToken, Name, ValueToken, VarToken}
import aqua.parser.lift.{LiftParser, Span}
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
import cats.data.NonEmptyList

View File

@ -1,12 +1,14 @@
package aqua.parser.head
import aqua.parser.lexer.Token.*
import aqua.parser.lexer.{Ability, Name}
import aqua.parser.lift.LiftParser
import cats.Comonad
import cats.data.NonEmptyList
import cats.parse.Parser as P
import cats.~>
import cats.syntax.bifunctor.*
import aqua.parser.lexer.Token.*
import aqua.parser.lexer.{Ability, Name}
import aqua.parser.lift.LiftParser
import aqua.parser.lift.Span
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
@ -16,7 +18,9 @@ trait FromExpr[F[_]] {
object FromExpr {
def mapK[F[_], K[_]: Comonad](imports: NonEmptyList[FromExpr.NameOrAbAs[F]])(fk: F ~> K): NonEmptyList[FromExpr.NameOrAbAs[K]] =
def mapK[F[_], K[_]: Comonad](
imports: NonEmptyList[FromExpr.NameOrAbAs[F]]
)(fk: F ~> K): NonEmptyList[FromExpr.NameOrAbAs[K]] =
imports.map {
case Left((n, nOp)) => Left((n.mapK(fk), nOp.map(_.mapK(fk))))
case Right(a, aOp) => Right((a.mapK(fk), aOp.map(_.mapK(fk))))
@ -28,11 +32,17 @@ object FromExpr {
Name.nameAs.map(Left(_)) | Ability.abAs.map(Right(_))
val importFrom: P[NonEmptyList[NameOrAbAs[Span.S]]] =
comma[NameOrAbAs[Span.S]](nameOrAbAs) <* ` ` <* `from`
comma(nameOrAbAs) <* ` ` <* `from`
def show[F[_]](ne: NonEmptyList[NameOrAbAs[F]]): String =
ne.toList.map(_.fold(
non => non._1.value + non._2.map(_.value).fold("")(" as "+_),
non => non._1.value + non._2.map(_.value).fold("")(" as "+_)
)).mkString(", ")
ne.toList
.map(
_.bimap(
_.bimap(_.value, _.map(_.value)),
_.bimap(_.value, _.map(_.value))
).map { case (name, rename) =>
s"$name${rename.fold("")(" as " + _)}"
}
)
.mkString(", ")
}

View File

@ -43,7 +43,7 @@ case class StreamTypeToken[S[_]: Comonad](override val unit: S[Unit], data: Data
object StreamTypeToken {
val `streamtypedef`: P[StreamTypeToken[Span.S]] =
((`*`.lift <* P.not(`*`).withContext("Nested streams '**type' is prohibited"))
((`*`.lift <* P.not(`*`).withContext("Nested streams '**type' are prohibited"))
~ DataTypeToken.`withoutstreamdatatypedef`)
.map(ud => StreamTypeToken(ud._1, ud._2))

View File

@ -3,15 +3,7 @@ package aqua.parser
import aqua.AquaSpec
import aqua.parser.expr.{FuncExpr, RootExpr}
import aqua.parser.expr.func.{ArrowExpr, AssignmentExpr, CallArrowExpr, ClosureExpr, ReturnExpr}
import aqua.parser.lexer.{
Ability,
CallArrowToken,
IntoArrow,
NamedTypeToken,
PropertyToken,
Token,
VarToken
}
import aqua.parser.lexer.{CallArrowToken, IntoArrow, NamedTypeToken, PropertyToken, Token, VarToken}
import aqua.types.ScalarType.string
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers

View File

@ -5,7 +5,6 @@ import aqua.AquaSpec.{toNumber, toStr, toVar}
import aqua.parser.expr.ConstantExpr
import aqua.parser.expr.func.AssignmentExpr
import aqua.parser.lexer.{
Ability,
CallArrowToken,
CollectionToken,
IntoArrow,

View File

@ -1,7 +1,7 @@
package aqua.semantics.expr
import aqua.parser.expr.AbilityExpr
import aqua.raw.{Raw, ScopeRaw, ServiceRaw, TypeRaw}
import aqua.raw.{Raw, ServiceRaw, TypeRaw}
import aqua.parser.lexer.{Name, NamedTypeToken}
import aqua.raw.{Raw, ServiceRaw}
import aqua.semantics.Prog
@ -29,11 +29,9 @@ class AbilitySem[S[_]](val expr: AbilityExpr[S]) extends AnyVal {
Prog.after_(
for {
defs <- D.purgeDefs(expr.name)
abType = defs.map(fields => AbilityType(expr.name.value, fields))
result <- abType.flatTraverse(t =>
T.defineNamedType(expr.name, t)
.map(Option.when(_)(TypeRaw(expr.name.value, t)))
)
fields = defs.view.mapValues(d => d.name -> d.`type`).toMap
abilityType <- T.defineAbilityType(expr.name, fields)
result = abilityType.map(st => TypeRaw(expr.name.value, st))
} yield result.getOrElse(Raw.error("Ability types unresolved"))
)
}

View File

@ -7,8 +7,10 @@ import aqua.semantics.rules.definitions.DefinitionsAlgebra
import aqua.semantics.rules.names.NamesAlgebra
import aqua.semantics.rules.types.TypesAlgebra
import aqua.types.StructType
import cats.syntax.functor.*
import cats.syntax.applicative.*
import cats.syntax.traverse.*
import cats.syntax.flatMap.*
import cats.Monad
@ -19,20 +21,12 @@ class DataStructSem[S[_]](val expr: DataStructExpr[S]) extends AnyVal {
T: TypesAlgebra[S, Alg]
): Prog[Alg, Raw] =
Prog.after((_: Raw) =>
D.purgeDefs(expr.name).flatMap {
case Some(fields) =>
val t = StructType(expr.name.value, fields)
T.defineNamedType(expr.name, t).map {
case true =>
TypeRaw(
expr.name.value,
t
): Raw
case false =>
Raw.error("Data struct types unresolved")
}
case None => Raw.error("Data struct types unresolved").pure[Alg]
}
for {
defs <- D.purgeDefs(expr.name)
fields = defs.view.mapValues(d => d.name -> d.`type`).toMap
structType <- T.defineStructType(expr.name, fields)
result = structType.map(st => TypeRaw(expr.name.value, st))
} yield result.getOrElse(Raw.error("Data struct types unresolved"))
)
}

View File

@ -2,22 +2,26 @@ package aqua.semantics.header
import aqua.parser.Ast
import aqua.parser.head.*
import aqua.parser.lexer.{Ability, Token}
import aqua.parser.lexer.{Ability, Name, Token}
import aqua.semantics.header.Picker.*
import aqua.semantics.{HeaderError, SemanticError}
import cats.data.*
import cats.data.Validated.{invalidNec, validNec}
import cats.data.Validated.*
import cats.free.Cofree
import cats.instances.list.*
import cats.instances.option.*
import cats.kernel.Semigroup
import cats.syntax.option.*
import cats.syntax.foldable.*
import cats.syntax.functor.*
import cats.syntax.semigroup.*
import cats.syntax.validated.*
import cats.syntax.bifunctor.*
import cats.syntax.apply.*
import cats.{Comonad, Eval, Monoid}
class HeaderHandler[S[_]: Comonad, C](implicit
class HeaderHandler[S[_]: Comonad, C](using
acm: Monoid[C],
headMonoid: Monoid[HeaderSem[S, C]],
picker: Picker[C]
@ -34,8 +38,27 @@ class HeaderHandler[S[_]: Comonad, C](implicit
Eval.later(parent |+| children.combineAll)
// Error generator with token pointer
private def error[T](token: Token[S], msg: String): ValidatedNec[SemanticError[S], T] =
invalidNec(HeaderError(token, msg))
private def error[T](
token: Token[S],
msg: String
): SemanticError[S] = HeaderError(token, msg)
private def exportFuncChecks(ctx: C, token: Token[S], name: String): ResT[S, Unit] =
Validated.condNec(
!ctx.funcReturnAbilityOrArrow(name),
(),
error(
token,
s"The function '$name' cannot be exported, because it returns an arrow or an ability"
)
) combine Validated.condNec(
!ctx.funcAcceptAbility(name),
(),
error(
token,
s"The function '$name' cannot be exported, because it accepts an ability"
)
)
def sem(imports: Map[String, C], header: Ast.Head[S]): Res[S, C] = {
// Resolve a filename from given imports or fail
@ -43,52 +66,43 @@ class HeaderHandler[S[_]: Comonad, C](implicit
imports
.get(f.fileValue)
.map(_.pickDeclared)
.fold[ResAC[S]](
.toValidNec(
error(f.token, "Cannot resolve the import")
)(validNec)
)
// Get part of the declared context (for import/use ... from ... expressions)
def getFrom(f: FromExpr[S], ctx: C): ResAC[S] =
f.imports
.map[ResAC[S]](
_.fold[ResAC[S]](
{ case (n, rn) =>
ctx.pickHeader.validNec |+| f.imports
.map(
_.bimap(
_.bimap(n => (n, n.value), _.map(_.value)),
_.bimap(n => (n, n.value), _.map(_.value))
).merge match {
case ((token, name), rename) =>
ctx
.pick(n.value, rn.map(_.value), ctx.module.nonEmpty)
.map(validNec)
.getOrElse(
.pick(name, rename, ctx.module.nonEmpty)
.toValidNec(
error(
n,
s"Imported file `declares ${ctx.declares.mkString(", ")}`, no ${n.value} declared. Try adding `declares ${n.value}` to that file."
token,
s"Imported file `declares ${ctx.declares.mkString(", ")}`, no $name declared. Try adding `declares $name` to that file."
)
)
},
{ case (n, rn) =>
ctx
.pick(n.value, rn.map(_.value), ctx.module.nonEmpty)
.map(validNec)
.getOrElse(
error(
n,
s"Imported file `declares ${ctx.declares.mkString(", ")}`, no ${n.value} declared. Try adding `declares ${n.value}` to that file."
)
)
}
)
}
)
.foldLeft[ResAC[S]](validNec(ctx.pickHeader))(_ |+| _)
.combineAll
// Convert an imported context into a module (ability)
def toModule(ctx: C, tkn: Token[S], rename: Option[Ability[S]]): ResAC[S] =
rename
.map(_.value)
.orElse(ctx.module)
.fold[ResAC[S]](
.map(modName => picker.blank.setAbility(modName, ctx))
.toValidNec(
error(
tkn,
s"Used module has no `module` header. Please add `module` header or use ... as ModuleName, or switch to import"
)
)(modName => validNec(picker.blank.setAbility(modName, ctx)))
)
// Handler for every header expression, will be combined later
val onExpr: PartialFunction[HeaderExpr[S], Res[S, C]] = {
@ -110,18 +124,17 @@ class HeaderHandler[S[_]: Comonad, C](implicit
)
} else
(
declareNames.map(n => n.value -> n) ::: declareCustom.map(a => a.value -> a)
).map[ValidatedNec[SemanticError[S], Int]] { case (n, t) =>
declareNames.fproductLeft(_.value) ::: declareCustom.fproductLeft(_.value)
).map { case (n, t) =>
ctx
.pick(n, None, ctx.module.nonEmpty)
// We just validate, nothing more
.as(validNec(1))
.getOrElse(
.toValidNec(
error(
t,
s"`$n` is expected to be declared, but declaration is not found in the file"
)
)
.void
}.combineAll
.as(
// TODO: why module name and declares is lost? where is it lost?
@ -133,6 +146,7 @@ class HeaderHandler[S[_]: Comonad, C](implicit
case f @ ImportExpr(_) =>
// Import everything from a file
resolve(f).map(fc => HeaderSem[S, C](fc, (c, _) => validNec(c)))
case f @ ImportFromExpr(_, _) =>
// Import, map declarations
resolve(f)
@ -165,37 +179,37 @@ class HeaderHandler[S[_]: Comonad, C](implicit
// Nothing there
picker.blank,
(ctx, initCtx) =>
val sumCtx = initCtx |+| ctx
pubs
.map(
_.fold[(Token[S], String, Option[String])](
nrn => (nrn._1, nrn._1.value, nrn._2.map(_.value)),
nrn => (nrn._1, nrn._1.value, nrn._2.map(_.value))
)
_.bimap(
_.bimap(n => (n, n.value), _.map(_.value)),
_.bimap(n => (n, n.value), _.map(_.value))
).merge
)
.map { case (token, name, rename) =>
val sumCtx = initCtx |+| ctx
if (sumCtx.funcReturnAbilityOrArrow(name))
error(
token,
s"The function '$name' cannot be exported, because it returns arrow type or ability type"
)
else
sumCtx
.pick(name, rename, declared = false)
.as(Map(name -> rename).validNec)
.getOrElse(
error(
token,
s"File has no $name declaration or import, cannot export, available funcs: ${sumCtx.funcNames
.mkString(", ")}"
)
.map { case ((token, name), rename) =>
sumCtx
.pick(name, rename, declared = false)
.as(Map(name -> rename))
.toValid(
error(
token,
s"File has no $name declaration or import, " +
s"cannot export, available functions: ${sumCtx.funcNames.mkString(", ")}"
)
)
.ensure(
error(
token,
s"Can not export '$name' as it is an ability"
)
)(_ => !sumCtx.isAbility(name))
.toValidatedNec <* exportFuncChecks(sumCtx, token, name)
}
.foldLeft[ResT[S, Map[String, Option[String]]]](
validNec(ctx.exports.getOrElse(Map.empty))
)(_ |+| _)
.map(expCtx => ctx.setExports(expCtx))
.prepend(validNec(ctx.exports))
.combineAll
.map(ctx.setExports)
)
)
@ -204,7 +218,26 @@ class HeaderHandler[S[_]: Comonad, C](implicit
validNec(
HeaderSem[S, C](
acm.empty,
(ctx, _) => validNec(ctx.setExports(Map.empty))
(ctx, initCtx) => {
val sumCtx = initCtx |+| ctx
ctx.funcNames.toList
.traverse_(name =>
// TODO: Provide better token for this error
exportFuncChecks(sumCtx, token, name)
)
.combine(
ctx.definedAbilityNames.toList.traverse_(name =>
// TODO: Provide better token for this error
error(token, s"Can not export '$name' as it is an ability ").invalidNec
)
)
.as(
// Export everything
ctx.setExports(
ctx.all.map(_ -> None).toMap
)
)
}
)
)
@ -214,7 +247,7 @@ class HeaderHandler[S[_]: Comonad, C](implicit
Cofree
.cata[Chain, HeaderExpr[S], Res[S, C]](header) { case (expr, children) =>
onExpr.lift.apply(expr).fold(Eval.later(children.combineAll))(combineAnd(children)(_))
onExpr.lift.apply(expr).fold(Eval.later(children.combineAll))(combineAnd(children))
}
.value
}

View File

@ -1,7 +1,7 @@
package aqua.semantics.header
import aqua.raw.{RawContext, RawPart}
import aqua.types.{AbilityType, ArrowType}
import aqua.types.{AbilityType, ArrowType, Type}
import cats.Semigroup
import cats.syntax.semigroup.*
@ -9,14 +9,17 @@ import cats.syntax.semigroup.*
trait Picker[A] {
def all(ctx: A): Set[String]
def funcNames(ctx: A): List[String]
def funcNames(ctx: A): Set[String]
def definedAbilityNames(ctx: A): Set[String]
def blank: A
def pick(ctx: A, name: String, rename: Option[String], declared: Boolean): Option[A]
def pickDeclared(ctx: A)(implicit semi: Semigroup[A]): A
def pickHeader(ctx: A): A
def module(ctx: A): Option[String]
def exports(ctx: A): Option[Map[String, Option[String]]]
def exports(ctx: A): Map[String, Option[String]]
def isAbility(ctx: A, name: String): Boolean
def funcReturnAbilityOrArrow(ctx: A, name: String): Boolean
def funcAcceptAbility(ctx: A, name: String): Boolean
def declares(ctx: A): Set[String]
def setAbility(ctx: A, name: String, ctxAb: A): A
def setModule(ctx: A, name: Option[String], declares: Set[String]): A
@ -25,56 +28,80 @@ trait Picker[A] {
def addPart(ctx: A, part: (A, RawPart)): A
}
final class PickerOps[A: Picker](p: A) {
def blank: A = Picker[A].blank
def all: Set[String] = Picker[A].all(p)
def funcNames: List[String] = Picker[A].funcNames(p)
def pick(name: String, rename: Option[String], declared: Boolean): Option[A] =
Picker[A].pick(p, name, rename, declared)
def pickDeclared(implicit semi: Semigroup[A]): A = Picker[A].pickDeclared(p)
def pickHeader: A = Picker[A].pickHeader(p)
def module: Option[String] = Picker[A].module(p)
def exports: Option[Map[String, Option[String]]] = Picker[A].exports(p)
def funcReturnAbilityOrArrow(name: String): Boolean = Picker[A].funcReturnAbilityOrArrow(p, name)
def declares: Set[String] = Picker[A].declares(p)
def setAbility(name: String, ctx: A): A = Picker[A].setAbility(p, name, ctx)
def setInit(ctx: Option[A]): A = Picker[A].setInit(p, ctx)
def addPart(part: (A, RawPart)): A = Picker[A].addPart(p, part)
def setModule(name: String, declares: Set[String]): A =
Picker[A].setModule(p, Some(name), declares)
def setOptModule(name: Option[String], declares: Set[String]): A =
Picker[A].setModule(p, name, declares)
def setExports(exports: Map[String, Option[String]]): A =
Picker[A].setExports(p, exports)
}
object Picker {
def returnsAbilityOrArrow(arrowType: ArrowType): Boolean = {
extension [A: Picker](p: A) {
def blank: A = Picker[A].blank
def all: Set[String] = Picker[A].all(p)
def funcNames: Set[String] = Picker[A].funcNames(p)
def definedAbilityNames: Set[String] = Picker[A].definedAbilityNames(p)
def pick(name: String, rename: Option[String], declared: Boolean): Option[A] =
Picker[A].pick(p, name, rename, declared)
def pickDeclared(implicit semi: Semigroup[A]): A = Picker[A].pickDeclared(p)
def pickHeader: A = Picker[A].pickHeader(p)
def module: Option[String] = Picker[A].module(p)
def exports: Map[String, Option[String]] = Picker[A].exports(p)
def isAbility(name: String): Boolean = Picker[A].isAbility(p, name)
def funcReturnAbilityOrArrow(name: String): Boolean =
Picker[A].funcReturnAbilityOrArrow(p, name)
def funcAcceptAbility(name: String): Boolean = Picker[A].funcAcceptAbility(p, name)
def declares: Set[String] = Picker[A].declares(p)
def setAbility(name: String, ctx: A): A = Picker[A].setAbility(p, name, ctx)
def setInit(ctx: Option[A]): A = Picker[A].setInit(p, ctx)
def addPart(part: (A, RawPart)): A = Picker[A].addPart(p, part)
def setModule(name: String, declares: Set[String]): A =
Picker[A].setModule(p, Some(name), declares)
def setOptModule(name: Option[String], declares: Set[String]): A =
Picker[A].setModule(p, name, declares)
def setExports(exports: Map[String, Option[String]]): A =
Picker[A].setExports(p, exports)
}
private def returnsAbilityOrArrow(arrowType: ArrowType): Boolean =
arrowType.codomain.toList.exists {
case _: AbilityType => true
case _: ArrowType => true
case _ => false
}
}
implicit final def apply[A](implicit ev: Picker[A]): Picker[A] = ev
private def acceptsAbility(arrowType: ArrowType): Boolean =
arrowType.domain.toList.exists {
case _: AbilityType => true
case _ => false
}
implicit final def syntaxPicker[A: Picker](a: A): PickerOps[A] =
new PickerOps[A](a)
private def isAbilityType(`type`: Type): Boolean =
`type` match {
case _: AbilityType => true
case _ => false
}
final def apply[A](using ev: Picker[A]): Picker[A] = ev
given Picker[RawContext] with {
override def blank: RawContext = RawContext.blank
override def exports(ctx: RawContext): Option[Map[String, Option[String]]] = ctx.exports
override def exports(ctx: RawContext): Map[String, Option[String]] = ctx.exports
override def isAbility(ctx: RawContext, name: String): Boolean =
ctx.types.get(name).exists(isAbilityType)
override def funcReturnAbilityOrArrow(ctx: RawContext, name: String): Boolean =
ctx.funcs.get(name).map(_.arrow.`type`).exists(returnsAbilityOrArrow)
override def funcNames(ctx: RawContext): List[String] = ctx.funcs.keys.toList
override def funcAcceptAbility(ctx: RawContext, name: String): Boolean =
ctx.funcs.get(name).map(_.arrow.`type`).exists(acceptsAbility)
override def funcNames(ctx: RawContext): Set[String] = ctx.funcs.keySet
override def definedAbilityNames(ctx: RawContext): Set[String] = ctx.definedAbilities.keySet
override def addPart(ctx: RawContext, part: (RawContext, RawPart)): RawContext =
ctx.copy(parts = ctx.parts :+ part)
@ -98,7 +125,7 @@ object Picker {
ctx.copy(module = name, declares = declares)
override def setExports(ctx: RawContext, exports: Map[String, Option[String]]): RawContext =
ctx.copy(exports = Some(exports))
ctx.copy(exports = exports)
override def pick(
ctx: RawContext,

View File

@ -138,7 +138,16 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](implicit
case ct @ CollectionToken(_, values) =>
for {
maybeValuesRaw <- values.traverse(valueToRaw).map(_.sequence)
raw = maybeValuesRaw.map(raws =>
valuesRawChecked <- maybeValuesRaw.flatTraverse(raws =>
raws
.zip(values)
.traverse { case (raw, token) =>
T.ensureTypeIsCollectible(token, raw.`type`)
.map(Option.when(_)(raw))
}
.map(_.sequence)
)
raw = valuesRawChecked.map(raws =>
NonEmptyList
.fromList(raws)
.fold(ValueRaw.Nil) { nonEmpty =>

View File

@ -1,6 +1,6 @@
package aqua.semantics.rules.abilities
import aqua.parser.lexer.{Ability, NamedTypeToken, Name, Token, ValueToken}
import aqua.parser.lexer.{Name, NamedTypeToken, Token, ValueToken}
import aqua.raw.value.ValueRaw
import aqua.types.ArrowType
import cats.InjectK

View File

@ -2,7 +2,7 @@ package aqua.semantics.rules.abilities
import aqua.raw.{RawContext, ServiceRaw}
import aqua.raw.value.ValueRaw
import aqua.parser.lexer.{Ability, NamedTypeToken, Name, Token, ValueToken}
import aqua.parser.lexer.{Name, NamedTypeToken, Token, ValueToken}
import aqua.types.ArrowType
import cats.Monoid
import cats.data.NonEmptyList
@ -13,8 +13,7 @@ case class AbilitiesState[S[_]](
abilities: Map[String, RawContext] = Map.empty,
rootServiceIds: Map[String, (ValueToken[S], ValueRaw)] =
Map.empty[String, (ValueToken[S], ValueRaw)],
definitions: Map[String, NamedTypeToken[S]] =
Map.empty[String, NamedTypeToken[S]]
definitions: Map[String, NamedTypeToken[S]] = Map.empty[String, NamedTypeToken[S]]
) {
def purgeArrows: Option[(NonEmptyList[(Name[S], ArrowType)], AbilitiesState[S])] =

View File

@ -1,13 +1,15 @@
package aqua.semantics.rules.definitions
import aqua.parser.lexer.{NamedTypeToken, Name, Token}
import aqua.parser.lexer.{Name, NamedTypeToken, Token}
import aqua.types.{ArrowType, Type}
import cats.data.{NonEmptyList, NonEmptyMap}
// Collect and purge arrows/values from structures, services, etc
trait DefinitionsAlgebra[S[_], Alg[_]] {
def defineDef(name: Name[S], `type`: Type): Alg[Boolean]
def purgeDefs(token: NamedTypeToken[S]): Alg[Option[NonEmptyMap[String, Type]]]
def purgeDefs(token: NamedTypeToken[S]): Alg[Map[String, DefinitionsState.Def[S]]]
def defineArrow(arrow: Name[S], `type`: ArrowType): Alg[Boolean]

View File

@ -7,6 +7,7 @@ import aqua.semantics.rules.abilities.AbilitiesState
import aqua.semantics.rules.locations.{LocationsAlgebra, LocationsState}
import aqua.semantics.rules.types.TypesState
import aqua.types.{ArrowType, Type}
import cats.data.{NonEmptyList, NonEmptyMap, State}
import monocle.Lens
import monocle.macros.GenLens
@ -14,6 +15,7 @@ import cats.syntax.applicative.*
import cats.syntax.apply.*
import cats.syntax.flatMap.*
import cats.syntax.functor.*
import cats.syntax.option.*
import scala.collection.immutable.SortedMap
@ -35,7 +37,14 @@ class DefinitionsInterpreter[S[_], X](implicit
def define(name: Name[S], `type`: Type, defName: String): SX[Boolean] =
getState.map(_.definitions.get(name.value)).flatMap {
case None =>
modify(st => st.copy(definitions = st.definitions.updated(name.value, name -> `type`)))
modify(st =>
st.copy(definitions =
st.definitions.updated(
name.value,
DefinitionsState.Def(name, `type`)
)
)
)
.as(true)
case Some(_) =>
report(name, s"Cannot define $defName `${name.value}`, it was already defined above")
@ -50,41 +59,31 @@ class DefinitionsInterpreter[S[_], X](implicit
override def purgeDefs(
token: NamedTypeToken[S]
): SX[Option[NonEmptyMap[String, Type]]] =
): SX[Map[String, DefinitionsState.Def[S]]] =
getState.map(_.definitions).flatMap { defs =>
NonEmptyMap.fromMap(SortedMap.from(defs.view.mapValues(_._2))) match {
case Some(fs) =>
val fields = defs.map { case (n, (tt, _)) =>
n -> tt
}.toList
locations
.addTokenWithFields(token.value, token, fields)
.flatMap { _ =>
modify { st =>
st.copy(definitions = Map.empty)
}.map { _ =>
Some(fs)
}
}
val names = defs.view.mapValues(_.name)
case None => report(token, "Cannot define a data type without fields").as(None)
}
for {
_ <- locations
.addTokenWithFields(token.value, token, names.toList)
.whenA(defs.nonEmpty)
_ <- modify(_.copy(definitions = Map.empty))
} yield defs
}
def purgeArrows(token: Token[S]): SX[Option[NonEmptyList[(Name[S], ArrowType)]]] =
getState.map(_.definitions).flatMap { definitions =>
val values = definitions.values
val arrows = NonEmptyList.fromList(values.collect { case (n, at @ ArrowType(_, _)) =>
(n, at)
}.toList)
arrows match {
getState.map(_.definitions).flatMap { defs =>
val arrows = defs.values.collect { case DefinitionsState.Def(name, t: ArrowType) =>
name -> t
}
NonEmptyList.fromList(arrows.toList) match {
case Some(arrs) =>
modify { st =>
st.copy(definitions = Map.empty)
}.as(Option[NonEmptyList[(Name[S], ArrowType)]](arrs))
}.as(arrs.some)
case None =>
report(token, "Cannot purge arrows, no arrows provided")
.as(Option.empty[NonEmptyList[(Name[S], ArrowType)]])
.as(none)
}
}
}

View File

@ -3,6 +3,16 @@ package aqua.semantics.rules.definitions
import aqua.parser.lexer.{Name, Token}
import aqua.types.Type
import DefinitionsState.Def
case class DefinitionsState[S[_]](
definitions: Map[String, (Name[S], Type)] = Map.empty[String, (Name[S], Type)]
definitions: Map[String, Def[S]] = Map.empty[String, Def[S]]
)
object DefinitionsState {
final case class Def[S[_]](
name: Name[S],
`type`: Type
)
}

View File

@ -2,7 +2,8 @@ package aqua.semantics.rules.types
import aqua.parser.lexer.*
import aqua.raw.value.{PropertyRaw, ValueRaw}
import aqua.types.{ArrowType, StructType, Type}
import aqua.types.{AbilityType, ArrowType, NamedType, StructType, Type}
import cats.data.NonEmptyMap
import cats.data.NonEmptyList
@ -14,10 +15,15 @@ trait TypesAlgebra[S[_], Alg[_]] {
def resolveArrowDef(arrowDef: ArrowTypeToken[S]): Alg[Option[ArrowType]]
def defineNamedType(
def defineAbilityType(
name: NamedTypeToken[S],
`type`: Type
): Alg[Boolean]
fields: Map[String, (Name[S], Type)]
): Alg[Option[AbilityType]]
def defineStructType(
name: NamedTypeToken[S],
fields: Map[String, (Name[S], Type)]
): Alg[Option[StructType]]
def defineAlias(name: NamedTypeToken[S], target: Type): Alg[Boolean]
@ -30,12 +36,18 @@ trait TypesAlgebra[S[_], Alg[_]] {
): Alg[Option[PropertyRaw]]
def resolveField(rootT: Type, op: IntoField[S]): Alg[Option[PropertyRaw]]
def resolveArrow(rootT: Type, op: IntoArrow[S], arguments: List[ValueRaw]): Alg[Option[PropertyRaw]]
def resolveArrow(
rootT: Type,
op: IntoArrow[S],
arguments: List[ValueRaw]
): Alg[Option[PropertyRaw]]
def ensureValuesComparable(token: Token[S], left: Type, right: Type): Alg[Boolean]
def ensureTypeMatches(token: Token[S], expected: Type, givenType: Type): Alg[Boolean]
def ensureTypeIsCollectible(token: Token[S], givenType: Type): Alg[Boolean]
def ensureTypeOneOf[T <: Type](
token: Token[S],
expected: Set[T],

View File

@ -54,13 +54,12 @@ class TypesInterpreter[S[_], X](implicit
override def resolveType(token: TypeToken[S]): State[X, Option[Type]] =
getState.map(st => TypesStateHelper.resolveTypeToken(token, st, resolver)).flatMap {
case Some(t) =>
val (tt, tokens) = t
val tokensLocs = tokens.map { case (t, n) =>
n.value -> t
}
locations.pointLocations(tokensLocs).map(_ => Some(tt))
case None => report(token, s"Unresolved type").as(None)
case Some((typ, tokens)) =>
val tokensLocs = tokens.map { case (t, n) => n.value -> t }
locations.pointLocations(tokensLocs).as(typ.some)
case None =>
// TODO: Give more specific error message
report(token, s"Unresolved type").as(None)
}
override def resolveArrowDef(arrowDef: ArrowTypeToken[S]): State[X, Option[ArrowType]] =
@ -78,21 +77,61 @@ class TypesInterpreter[S[_], X](implicit
}
}
override def defineNamedType(
override def defineAbilityType(
name: NamedTypeToken[S],
`type`: Type
): State[X, Boolean] =
fields: Map[String, (Name[S], Type)]
): State[X, Option[AbilityType]] =
getState.map(_.definitions.get(name.value)).flatMap {
case Some(n) if n == name => State.pure(true)
case Some(_) =>
report(name, s"Type `${name.value}` was already defined").as(false)
case Some(_) => report(name, s"Ability `${name.value}` was already defined").as(none)
case None =>
modify { st =>
st.copy(
strict = st.strict.updated(name.value, `type`),
definitions = st.definitions.updated(name.value, name)
val types = fields.view.mapValues { case (_, t) => t }.toMap
NonEmptyMap
.fromMap(SortedMap.from(types))
.fold(report(name, s"Ability `${name.value}` has no fields").as(none))(nonEmptyFields =>
val `type` = AbilityType(name.value, nonEmptyFields)
modify { st =>
st.copy(
strict = st.strict.updated(name.value, `type`),
definitions = st.definitions.updated(name.value, name)
)
}.as(`type`.some)
)
}
override def defineStructType(
name: NamedTypeToken[S],
fields: Map[String, (Name[S], Type)]
): State[X, Option[StructType]] =
getState.map(_.definitions.get(name.value)).flatMap {
case Some(_) => report(name, s"Data `${name.value}` was already defined").as(none)
case None =>
fields.toList.traverse {
case (field, (fieldName, t: DataType)) =>
t match {
case _: StreamType => report(fieldName, s"Field '$field' has stream type").as(none)
case _ => (field -> t).some.pure[ST]
}
case (field, (fieldName, t)) =>
report(
fieldName,
s"Field '$field' has unacceptable for struct field type '$t'"
).as(none)
}.map(_.sequence.map(_.toMap))
.flatMap(
_.map(SortedMap.from)
.flatMap(NonEmptyMap.fromMap)
.fold(
report(name, s"Struct `${name.value}` has no fields").as(none)
)(nonEmptyFields =>
val `type` = StructType(name.value, nonEmptyFields)
modify { st =>
st.copy(
strict = st.strict.updated(name.value, `type`),
definitions = st.definitions.updated(name.value, name)
)
}.as(`type`.some)
)
)
}.as(true)
}
override def defineAlias(name: NamedTypeToken[S], target: Type): State[X, Boolean] =
@ -299,6 +338,16 @@ class TypesInterpreter[S[_], X](implicit
}
}
override def ensureTypeIsCollectible(token: Token[S], givenType: Type): State[X, Boolean] =
givenType match {
case _: DataType => true.pure
case _ =>
report(
token,
s"Value of type '$givenType' could not be put into a collection"
).as(false)
}
override def ensureTypeOneOf[T <: Type](
token: Token[S],
expected: Set[T],

View File

@ -1,37 +1,14 @@
package aqua.semantics.rules.types
import aqua.raw.value.{FunctorRaw, IntoIndexRaw, LiteralRaw, PropertyRaw, ValueRaw}
import aqua.parser.lexer.{
ArrayTypeToken,
ArrowTypeToken,
BasicTypeToken,
NamedTypeToken,
IntoField,
IntoIndex,
Name,
OptionTypeToken,
PropertyOp,
StreamTypeToken,
Token,
TopBottomToken,
TypeToken
}
import aqua.types.{
ArrayType,
ArrowType,
BottomType,
DataType,
OptionType,
ProductType,
StreamType,
StructType,
TopType,
Type
}
import aqua.parser.lexer.*
import aqua.types.*
import aqua.raw.RawContext
import cats.data.Validated.{Invalid, Valid}
import cats.data.{Chain, NonEmptyChain, ValidatedNec}
import cats.kernel.Monoid
import aqua.raw.RawContext
import cats.syntax.option.*
case class TypesState[S[_]](
fields: Map[String, (Name[S], Type)] = Map.empty[String, (Name[S], Type)],
@ -56,7 +33,7 @@ object TypesStateHelper {
): Option[(Type, List[(Token[S], NamedTypeToken[S])])] =
tt match {
case TopBottomToken(_, isTop) =>
Option((if (isTop) TopType else BottomType, Nil))
(if (isTop) TopType else BottomType, Nil).some
case ArrayTypeToken(_, dtt) =>
resolveTypeToken(dtt, state, resolver).collect { case (it: DataType, t) =>
(ArrayType(it), t)
@ -71,7 +48,7 @@ object TypesStateHelper {
}
case ctt: NamedTypeToken[S] =>
resolver(state, ctt)
case btt: BasicTypeToken[S] => Some((btt.value, Nil))
case btt: BasicTypeToken[S] => (btt.value, Nil).some
case ArrowTypeToken(_, args, res) =>
val strictArgs =
args.map(_._2).map(resolveTypeToken(_, state, resolver)).collect {

View File

@ -1,50 +1,93 @@
package aqua.semantics
import aqua.parser.head.{ExportExpr, FromExpr, HeaderExpr}
import aqua.parser.lexer.Name
import aqua.parser.head.{ExportExpr, FromExpr, HeaderExpr, ModuleExpr}
import aqua.parser.lexer.{Ability, Name}
import aqua.raw.RawContext
import aqua.raw.arrow.{ArrowRaw, FuncRaw}
import aqua.raw.ops.RawTag
import aqua.raw.value.VarRaw
import aqua.semantics.header.{HeaderHandler, HeaderSem}
import aqua.types.{ArrowType, NilType, ProductType}
import cats.data.{Chain, NonEmptyList, Validated}
import aqua.types.{AbilityType, ArrowType, NilType, ProductType, ScalarType}
import cats.data.{Chain, NonEmptyList, NonEmptyMap, Validated}
import cats.free.Cofree
import cats.{Eval, Id, Monoid}
import cats.syntax.applicative.*
import org.scalatest.Inside
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
class HeaderSpec extends AnyFlatSpec with Matchers with Inside {
"header handler" should "generate an error on exported function that returns arrow or ability" in {
implicit val rc: Monoid[RawContext] = RawContext.implicits(RawContext.blank).rawContextMonoid
given Monoid[RawContext] = RawContext.implicits(RawContext.blank).rawContextMonoid
val handler = new HeaderHandler[Id, RawContext]()
val handler = new HeaderHandler[Id, RawContext]()
val funcName = "funcName"
def exportHeader(funcName: String): Cofree[Chain, HeaderExpr[Id]] = {
val exp: FromExpr.NameOrAbAs[Id] = Left((Name(funcName), None))
val exp: FromExpr.NameOrAbAs[Id] = Left((Name[Id](funcName), None))
val ast =
Cofree[Chain, HeaderExpr[Id]](ExportExpr[Id](NonEmptyList.of(exp)), Eval.now(Chain.empty))
/**
* aqua TestModule
* export <funcName>
*/
Cofree(
ModuleExpr(
name = Ability[Id]("TestModule"),
declareAll = None,
declareNames = Nil,
declareCustom = Nil
),
Chain(
Cofree(
ExportExpr(NonEmptyList.of(exp)),
Chain.empty.pure
)
).pure
)
}
val retArrowType = ArrowType(NilType, NilType)
val arrowType = ArrowType(NilType, ProductType.apply(retArrowType :: Nil))
val initCtx = RawContext(parts =
def funcCtx(funcName: String, arrowType: ArrowType): RawContext =
RawContext(parts =
Chain.one(
(
RawContext.blank,
FuncRaw(funcName, ArrowRaw(arrowType, VarRaw("", retArrowType) :: Nil, RawTag.empty))
FuncRaw(
funcName,
ArrowRaw(arrowType, Nil, RawTag.empty)
)
)
)
)
"header handler" should "generate an error on exported function that returns arrow or ability" in {
val funcName = "funcName"
val ast = exportHeader(funcName)
val retArrowType = ArrowType(NilType, NilType)
val arrowType = ArrowType(NilType, ProductType.apply(retArrowType :: Nil))
val initCtx = funcCtx(funcName, arrowType)
val result = handler.sem(Map.empty, ast).andThen(_.finCtx(initCtx))
inside(result) {
case Validated.Invalid(errors) =>
errors.head shouldBe a [HeaderError[Id]]
inside(result) { case Validated.Invalid(errors) =>
atLeast(1, errors.toChain.toList) shouldBe a[HeaderError[Id]]
}
}
it should "generate an error on exported function that accepts an ability" in {
val funcName = "funcName"
val ast = exportHeader(funcName)
val abilityType = AbilityType("Ab", NonEmptyMap.of("field" -> ScalarType.i8))
val arrowType = ArrowType(ProductType(abilityType :: Nil), NilType)
val initCtx = funcCtx(funcName, arrowType)
val result = handler.sem(Map.empty, ast).andThen(_.finCtx(initCtx))
inside(result) { case Validated.Invalid(errors) =>
atLeast(1, errors.toChain.toList) shouldBe a[HeaderError[Id]]
}
}
}

View File

@ -586,4 +586,29 @@ class SemanticsSpec extends AnyFlatSpec with Matchers with Inside {
}
}
}
it should "forbid abilities or streams in struct fields" in {
val scriptAbility =
"""
|ability Ab:
| a: string
|
|data St:
| a: Ab
|""".stripMargin
val scriptStream =
"""
|data St:
| s: *i8
|""".stripMargin
insideSemErrors(scriptAbility) { errors =>
atLeast(1, errors.toChain.toList) shouldBe a[RulesViolated[Span.S]]
}
insideSemErrors(scriptStream) { errors =>
atLeast(1, errors.toChain.toList) shouldBe a[RulesViolated[Span.S]]
}
}
}

View File

@ -17,7 +17,15 @@ import aqua.semantics.rules.locations.DummyLocationsInterpreter
import aqua.raw.value.{ApplyBinaryOpRaw, LiteralRaw}
import aqua.raw.RawContext
import aqua.types.*
import aqua.parser.lexer.{InfixToken, LiteralToken, Name, PrefixToken, ValueToken, VarToken}
import aqua.parser.lexer.{
CollectionToken,
InfixToken,
LiteralToken,
Name,
PrefixToken,
ValueToken,
VarToken
}
import aqua.raw.value.ApplyUnaryOpRaw
import aqua.parser.lexer.ValueToken.string
@ -60,6 +68,15 @@ class ValuesAlgebraSpec extends AnyFlatSpec with Matchers with Inside {
def variable(name: String): VarToken[Id] =
VarToken[Id](Name[Id](name))
def option(value: ValueToken[Id]): CollectionToken[Id] =
CollectionToken[Id](CollectionToken.Mode.OptionMode, List(value))
def array(values: ValueToken[Id]*): CollectionToken[Id] =
CollectionToken[Id](CollectionToken.Mode.ArrayMode, values.toList)
def stream(values: ValueToken[Id]*): CollectionToken[Id] =
CollectionToken[Id](CollectionToken.Mode.StreamMode, values.toList)
def allPairs[A](list: List[A]): List[(A, A)] = for {
a <- list
b <- list
@ -511,4 +528,40 @@ class ValuesAlgebraSpec extends AnyFlatSpec with Matchers with Inside {
}
}
}
it should "forbid collections with abilities or arrows" in {
val ability = variable("ab")
val abilityType = AbilityType("Ab", NonEmptyMap.of("field" -> ScalarType.i8))
val arrow = variable("arr")
val arrowType = ArrowType(
ProductType(ScalarType.i8 :: Nil),
ProductType(ScalarType.i8 :: Nil)
)
val alg = algebra()
val state = genState(
vars = Map(
ability.name.value -> abilityType,
arrow.name.value -> arrowType
)
)
List(
option(ability),
array(ability),
stream(ability),
option(arrow),
array(arrow),
stream(arrow)
).foreach { coll =>
val (st, res) = alg
.valueToRaw(coll)
.run(state)
.value
res shouldBe None
atLeast(1, st.errors.toList) shouldBe a[RulesViolated[Id]]
}
}
}

View File

@ -246,6 +246,7 @@ sealed trait NamedType extends Type {
}
// Struct is an unordered collection of labelled types
// TODO: Make fields type `DataType`
case class StructType(name: String, fields: NonEmptyMap[String, Type])
extends DataType with NamedType {