checking types for their validity

This commit is contained in:
DieMyst 2023-11-06 17:41:10 +07:00
parent a1576efad9
commit 4231c2f55c
7 changed files with 61 additions and 43 deletions

View File

@ -67,4 +67,3 @@ rewrite.imports.sort = ascii
rewrite.imports.groups = [ rewrite.imports.groups = [
["aqua\\..*"] ["aqua\\..*"]
] ]
#runner.dialect = scala3

View File

@ -1,2 +1,7 @@
func arr(strs: []string) -> []string data A:
<- strs b: []*string
func test():
inner: *string
arr = [inner]
a = A(b = arr)

View File

@ -43,8 +43,8 @@ case class StreamTypeToken[S[_]: Comonad](override val unit: S[Unit], data: Data
object StreamTypeToken { object StreamTypeToken {
val `streamtypedef`: P[StreamTypeToken[Span.S]] = val `streamtypedef`: P[StreamTypeToken[Span.S]] =
((`*`.lift <* P.not(`*`).withContext("Nested streams '**type' are prohibited")) (`*`.lift
~ DataTypeToken.`withoutstreamdatatypedef`) ~ DataTypeToken.`datatypedef`)
.map(ud => StreamTypeToken(ud._1, ud._2)) .map(ud => StreamTypeToken(ud._1, ud._2))
} }
@ -60,7 +60,7 @@ case class OptionTypeToken[F[_]: Comonad](override val unit: F[Unit], data: Data
object OptionTypeToken { object OptionTypeToken {
val `optiontypedef`: P[OptionTypeToken[Span.S]] = val `optiontypedef`: P[OptionTypeToken[Span.S]] =
(`?`.lift ~ DataTypeToken.`withoutstreamdatatypedef`).map(ud => OptionTypeToken(ud._1, ud._2)) (`?`.lift ~ DataTypeToken.`datatypedef`).map(ud => OptionTypeToken(ud._1, ud._2))
} }
@ -156,19 +156,12 @@ object ArrowTypeToken {
object DataTypeToken { object DataTypeToken {
val `arraytypedef`: P[ArrayTypeToken[Span.S]] = val `arraytypedef`: P[ArrayTypeToken[Span.S]] =
(`[]`.lift ~ `withoutstreamdatatypedef`).map(ud => ArrayTypeToken(ud._1, ud._2)) (`[]`.lift ~ `datatypedef`).map(ud => ArrayTypeToken(ud._1, ud._2))
val `topbottomdef`: P[TopBottomToken[Span.S]] = val `topbottomdef`: P[TopBottomToken[Span.S]] =
`⊥`.lift.map(TopBottomToken(_, isTop = false)) | `⊥`.lift.map(TopBottomToken(_, isTop = false)) |
``.lift.map(TopBottomToken(_, isTop = true)) ``.lift.map(TopBottomToken(_, isTop = true))
def `withoutstreamdatatypedef`: P[DataTypeToken[Span.S]] =
P.oneOf(
P.defer(`topbottomdef`) :: P.defer(`arraytypedef`) :: P.defer(
OptionTypeToken.`optiontypedef`
) :: BasicTypeToken.`basictypedef` :: NamedTypeToken.dotted :: Nil
)
def `datatypedef`: P[DataTypeToken[Span.S]] = def `datatypedef`: P[DataTypeToken[Span.S]] =
P.oneOf( P.oneOf(
P.defer(`topbottomdef`) :: P.defer(`arraytypedef`) :: P.defer( P.defer(`topbottomdef`) :: P.defer(`arraytypedef`) :: P.defer(

View File

@ -1,15 +1,15 @@
package aqua.semantics.expr.func package aqua.semantics.expr.func
import aqua.raw.ops.DeclareStreamTag
import aqua.parser.expr.func.DeclareStreamExpr import aqua.parser.expr.func.DeclareStreamExpr
import aqua.raw.Raw import aqua.raw.Raw
import aqua.raw.ops.DeclareStreamTag
import aqua.raw.value.VarRaw import aqua.raw.value.VarRaw
import aqua.semantics.Prog import aqua.semantics.Prog
import aqua.semantics.rules.names.NamesAlgebra import aqua.semantics.rules.names.NamesAlgebra
import aqua.semantics.rules.types.TypesAlgebra import aqua.semantics.rules.types.TypesAlgebra
import aqua.types.{ArrayType, OptionType, StreamType} import aqua.types.{ArrayType, OptionType, StreamType}
import cats.Monad import cats.Monad
import cats.data.Chain
import cats.syntax.applicative.* import cats.syntax.applicative.*
import cats.syntax.flatMap.* import cats.syntax.flatMap.*
import cats.syntax.functor.* import cats.syntax.functor.*

View File

@ -5,13 +5,11 @@ import aqua.semantics.Levenshtein
import aqua.semantics.rules.StackInterpreter import aqua.semantics.rules.StackInterpreter
import aqua.semantics.rules.locations.LocationsAlgebra import aqua.semantics.rules.locations.LocationsAlgebra
import aqua.semantics.rules.report.ReportAlgebra import aqua.semantics.rules.report.ReportAlgebra
import aqua.semantics.rules.types.TypesChecker
import aqua.types.{ArrowType, StreamType, Type} import aqua.types.{ArrowType, StreamType, Type}
import cats.data.{OptionT, State} import cats.data.{OptionT, State}
import cats.syntax.all.* import cats.syntax.all.*
import cats.syntax.applicative.*
import cats.syntax.flatMap.*
import cats.syntax.functor.*
import monocle.Lens import monocle.Lens
import monocle.macros.GenLens import monocle.macros.GenLens
@ -102,9 +100,9 @@ class NamesInterpreter[S[_], X](using
case false => report.error(name, "This name was already defined in the scope").as(false) case false => report.error(name, "This name was already defined in the scope").as(false)
} }
case None => case None =>
mapStackHeadM(report.error(name, "Cannot define a variable in the root scope").as(false))( TypesChecker.checkType(name, `type`) >> mapStackHeadM(
fr => (fr.addName(name, `type`) -> true).pure report.error(name, "Cannot define a variable in the root scope").as(false)
) <* locations.addToken(name.value, name) )(fr => (fr.addName(name, `type`) -> true).pure) <* locations.addToken(name.value, name)
} }
override def derive(name: Name[S], `type`: Type, derivedFrom: Set[String]): State[X, Boolean] = override def derive(name: Name[S], `type`: Type, derivedFrom: Set[String]): State[X, Boolean] =

View File

@ -0,0 +1,28 @@
package aqua.semantics.rules.types
import aqua.parser.lexer.Token
import aqua.semantics.rules.report.ReportAlgebra
import aqua.types.{BoxType, StreamType, Type}
import cats.data.State
object TypesChecker {
// check if this type can exist in aqua code
def checkType[S[_], X](name: Token[S], `type`: Type)(using report: ReportAlgebra[S, State[X, *]]): State[X, Unit] = {
`type` match {
case StreamType(StreamType(_)) =>
report.error(name, "Cannot define stream of streams")
case b: BoxType =>
b.element match {
case _: StreamType =>
report.error(name, "Cannot define type. Stream cannot be in a boxed type")
case bt: BoxType =>
checkType(name, bt)
case _ =>
State.pure(())
}
case t =>
State.pure(())
}
}
}

View File

@ -1,34 +1,25 @@
package aqua.semantics.rules.types package aqua.semantics.rules.types
import aqua.parser.lexer.* import aqua.parser.lexer.*
import aqua.raw.value.{ import aqua.raw.value.*
FunctorRaw,
IntoArrowRaw,
IntoCopyRaw,
IntoFieldRaw,
IntoIndexRaw,
PropertyRaw,
ValueRaw
}
import aqua.semantics.rules.locations.LocationsAlgebra
import aqua.semantics.rules.StackInterpreter import aqua.semantics.rules.StackInterpreter
import aqua.semantics.rules.locations.LocationsAlgebra
import aqua.semantics.rules.report.ReportAlgebra import aqua.semantics.rules.report.ReportAlgebra
import aqua.semantics.rules.types.TypesStateHelper.{TypeResolution, TypeResolutionError} import aqua.semantics.rules.types.TypesStateHelper.{TypeResolution, TypeResolutionError}
import aqua.types.* import aqua.types.*
import cats.Applicative
import cats.data.*
import cats.data.Validated.{Invalid, Valid} import cats.data.Validated.{Invalid, Valid}
import cats.data.{Chain, NonEmptyList, NonEmptyMap, OptionT, State}
import cats.syntax.applicative.* import cats.syntax.applicative.*
import cats.syntax.apply.* import cats.syntax.apply.*
import cats.syntax.flatMap.* import cats.syntax.flatMap.*
import cats.syntax.functor.*
import cats.syntax.traverse.*
import cats.syntax.foldable.* import cats.syntax.foldable.*
import cats.{~>, Applicative} import cats.syntax.functor.*
import cats.syntax.option.* import cats.syntax.option.*
import cats.syntax.traverse.*
import monocle.Lens import monocle.Lens
import monocle.macros.GenLens import monocle.macros.GenLens
import scala.collection.immutable.SortedMap import scala.collection.immutable.SortedMap
class TypesInterpreter[S[_], X](using class TypesInterpreter[S[_], X](using
@ -45,6 +36,9 @@ class TypesInterpreter[S[_], X](using
type ST[A] = State[X, A] type ST[A] = State[X, A]
private def defineType(name: NamedTypeToken[S], `type`: Type): State[X, Unit] =
TypesChecker.checkType(name, `type`) >> modify(_.defineType(name, `type`))
override def getType(name: String): State[X, Option[Type]] = override def getType(name: String): State[X, Option[Type]] =
getState.map(st => st.strict.get(name)) getState.map(st => st.strict.get(name))
@ -97,7 +91,7 @@ class TypesInterpreter[S[_], X](using
nonEmptyFields => nonEmptyFields =>
val `type` = AbilityType(name.value, nonEmptyFields) val `type` = AbilityType(name.value, nonEmptyFields)
modify(_.defineType(name, `type`)).as(`type`.some) defineType(name, `type`).as(`type`.some)
) )
} }
@ -131,7 +125,7 @@ class TypesInterpreter[S[_], X](using
).semiflatMap(nonEmptyArrows => ).semiflatMap(nonEmptyArrows =>
val `type` = ServiceType(name.value, nonEmptyArrows) val `type` = ServiceType(name.value, nonEmptyArrows)
modify(_.defineType(name, `type`)).as(`type`) defineType(name, `type`).as(`type`)
).value ).value
) )
@ -144,8 +138,9 @@ class TypesInterpreter[S[_], X](using
case (field, (fieldName, t: DataType)) => case (field, (fieldName, t: DataType)) =>
t match { t match {
case _: StreamType => case _: StreamType =>
report.error(fieldName, s"Field '$field' has stream type").as(none) report.error(fieldName, s"Field '$field' has stream type. This is forbidden for data structures.").as(none)
case _ => (field -> t).some.pure[ST] case _ =>
TypesChecker.checkType(fieldName, t) >> (field -> t).some.pure[ST]
} }
case (field, (fieldName, t)) => case (field, (fieldName, t)) =>
report report
@ -163,7 +158,7 @@ class TypesInterpreter[S[_], X](using
)(nonEmptyFields => )(nonEmptyFields =>
val `type` = StructType(name.value, nonEmptyFields) val `type` = StructType(name.value, nonEmptyFields)
modify(_.defineType(name, `type`)).as(`type`.some) defineType(name, `type`).as(`type`.some)
) )
) )
) )
@ -173,7 +168,7 @@ class TypesInterpreter[S[_], X](using
case Some(n) if n == name => State.pure(false) case Some(n) if n == name => State.pure(false)
case Some(_) => report.error(name, s"Type `${name.value}` was already defined").as(false) case Some(_) => report.error(name, s"Type `${name.value}` was already defined").as(false)
case None => case None =>
modify(_.defineType(name, target)) defineType(name, target)
.productL(locations.addToken(name.value, name)) .productL(locations.addToken(name.value, name))
.as(true) .as(true)
} }