StackInterpreter factored out

This commit is contained in:
dmitry 2021-03-15 11:45:27 +03:00
parent 5306514e92
commit b7306e1fa5
19 changed files with 206 additions and 133 deletions

View File

@ -2,6 +2,7 @@ val dottyVersion = "2.13.5"
//val dottyVersion = "3.0.0-RC1"
val catsV = "2.4.2"
val monocleV = "3.0.0-M3"
lazy val root = project
.in(file("."))
@ -11,10 +12,12 @@ lazy val root = project
scalaVersion := dottyVersion,
mainClass in (Compile, run) := Some("aqua.Main"),
libraryDependencies ++= Seq(
"org.typelevel" %% "cats-effect" % "3.0.0-RC2",
"org.typelevel" %% "cats-parse" % "0.3.1",
"org.typelevel" %% "cats-free" % catsV,
"com.chuusai" %% "shapeless" % "2.3.3"
"org.typelevel" %% "cats-effect" % "3.0.0-RC2",
"org.typelevel" %% "cats-parse" % "0.3.1",
"org.typelevel" %% "cats-free" % catsV,
"com.chuusai" %% "shapeless" % "2.3.3",
"com.github.julien-truffaut" %% "monocle-core" % monocleV,
"com.github.julien-truffaut" %% "monocle-macro" % monocleV
),
libraryDependencies += "org.scalatest" %% "scalatest" % "3.2.5" % Test
)

View File

@ -1,5 +1,6 @@
package aqua.ast
import aqua.ast.algebra.types.ArrowType
import cats.free.Free
case class Gen(log: String) {
@ -8,4 +9,6 @@ case class Gen(log: String) {
object Gen {
def noop = new Gen("noop")
case class Arrow(`type`: ArrowType, gen: Gen)
}

View File

@ -0,0 +1,46 @@
package aqua.ast.algebra
import aqua.parser.lexer.Token
import cats.data.State
import monocle.Lens
import cats.syntax.functor._
abstract class StackInterpreter[F[_], X, St, Fr](stackLens: Lens[St, List[Fr]])(implicit
lens: Lens[X, St],
error: ReportError[F, X]
) {
type S[A] = State[X, A]
protected def getState: S[St] = State.get.map(lens.get)
protected def setState(st: St): S[Unit] = State.modify(s => lens.replace(st)(s))
protected def mapStackHead[A](ifStackEmpty: S[A])(f: Fr => (Fr, A)): S[A] =
getState.map(stackLens.get).flatMap {
case h :: tail =>
val (updated, result) = f(h)
modify(stackLens.replace(updated :: tail)).as(result)
case Nil =>
ifStackEmpty
}
protected def mapStackHeadE[A](
ifStackEmpty: S[A]
)(f: Fr => Either[(Token[F], String, A), (Fr, A)]): S[A] =
getState.map(stackLens.get).flatMap {
case h :: tail =>
f(h) match {
case Right((updated, result)) =>
modify(stackLens.replace(updated :: tail)).as(result)
case Left((tkn, hint, result)) =>
report(tkn, hint).as(result)
}
case Nil =>
ifStackEmpty
}
protected def report(t: Token[F], hint: String): S[Unit] =
State.modify(error(_, t, hint))
protected def modify(f: St => St): S[Unit] =
State.modify(s => lens.modify(f)(s))
}

View File

@ -1,27 +1,20 @@
package aqua.ast.algebra.abilities
import aqua.ast.algebra.ReportError
import aqua.ast.algebra.{ReportError, StackInterpreter}
import aqua.ast.algebra.types.ArrowType
import aqua.parser.lexer.{Name, Token, Value}
import cats.data.{NonEmptyList, NonEmptyMap, State}
import cats.{~>, Comonad}
import shapeless.Lens
import cats.~>
import cats.syntax.functor._
import cats.syntax.comonad._
import monocle.Lens
import monocle.macros.GenLens
class AbilitiesInterpreter[F[_]: Comonad, X](implicit lens: Lens[X, AbState[F]], error: ReportError[F, X])
extends (AbilityOp[F, *] ~> State[X, *]) {
class AbilitiesInterpreter[F[_], X](implicit lens: Lens[X, AbilitiesState[F]], error: ReportError[F, X])
extends StackInterpreter[F, X, AbilitiesState[F], AbilityStackFrame[F]](GenLens[AbilitiesState[F]](_.stack))
with (AbilityOp[F, *] ~> State[X, *]) {
type S[A] = State[X, A]
private def getState: S[AbState[F]] = State.get.map(lens.get)
private def setState(st: AbState[F]): S[Unit] = State.modify(s => lens.set(s)(st))
private def report(t: Token[F], hint: String): S[Unit] =
State.modify(error(_, t, hint))
private def modify(f: AbState[F] => AbState[F]): S[Unit] =
State.modify(s => lens.set(s)(f(lens.get(s))))
private def getService(name: String): S[Option[NonEmptyMap[String, ArrowType]]] =
getState.map(_.services.get(name))
override def apply[A](fa: AbilityOp[F, A]): State[X, A] =
(fa match {
@ -40,9 +33,9 @@ class AbilitiesInterpreter[F[_]: Comonad, X](implicit lens: Lens[X, AbState[F]],
}
case ga: GetArrow[F] =>
getState.map(_.services.get(ga.name.name.extract)).flatMap {
getService(ga.name.value).flatMap {
case Some(arrows) =>
arrows(ga.arrow.name.extract)
arrows(ga.arrow.value)
.fold(
report(
ga.arrow,
@ -54,53 +47,44 @@ class AbilitiesInterpreter[F[_]: Comonad, X](implicit lens: Lens[X, AbState[F]],
}
case s: SetServiceId[F] =>
getState.map(_.services.get(s.name.name.extract)).flatMap {
getService(s.name.value).flatMap {
case Some(_) =>
getState.map(_.stack).flatMap {
case h :: tail =>
modify(_.copy(stack = h.copy(serviceIds = h.serviceIds.updated(s.name.name.extract, s.id)) :: tail))
.as(true)
mapStackHead(
report(s.name, "Trying to set service ID while out of the scope").as(false)
)(h => h.copy(serviceIds = h.serviceIds.updated(s.name.value, s.id)) -> true)
case _ =>
report(s.name, "Trying to set service ID while out of the scope").as(false)
}
case None =>
report(s.name, "Service with this name is not registered, can't set its ID").as(false)
}
case da: DefineArrow[F] =>
getState.map(_.stack).flatMap {
case h :: tail =>
h.arrows.get(da.arrow.name.extract) match {
case Some(_) => report(da.arrow, "Arrow with this name was already defined above").as(false)
case None =>
modify(
_.copy(stack =
h.copy(arrows = h.arrows.updated(da.arrow.name.extract, da.arrow -> da.`type`)) :: tail
)
).as(true)
}
case _ =>
report(da.arrow, "No abilities definition scope is found").as(false)
}
mapStackHeadE(
report(da.arrow, "No abilities definition scope is found").as(false)
)(h =>
h.arrows.get(da.arrow.value) match {
case Some(_) => Left((da.arrow, "Arrow with this name was already defined above", false))
case None =>
Right(h.copy(arrows = h.arrows.updated(da.arrow.value, da.arrow -> da.`type`)) -> true)
}
)
case ds: DefineService[F] =>
getState.map(_.services.get(ds.name.name.extract)).flatMap {
getService(ds.name.value).flatMap {
case Some(_) => report(ds.name, "Service with this name was already defined").as(false)
case None => modify(s => s.copy(services = s.services.updated(ds.name.name.extract, ds.arrows))).as(true)
case None => modify(s => s.copy(services = s.services.updated(ds.name.value, ds.arrows))).as(true)
}
}).asInstanceOf[State[X, A]]
}
case class AbState[F[_]](
stack: List[AbScope[F]] = Nil,
case class AbilitiesState[F[_]](
stack: List[AbilityStackFrame[F]] = Nil,
services: Map[String, NonEmptyMap[String, ArrowType]] = Map.empty
) {
def beginScope(token: Token[F]): AbState[F] = copy[F](AbScope[F](token) :: stack)
def endScope: AbState[F] = copy[F](stack.tail)
def beginScope(token: Token[F]): AbilitiesState[F] = copy[F](AbilityStackFrame[F](token) :: stack)
def endScope: AbilitiesState[F] = copy[F](stack.tail)
def purgeArrows: Option[(NonEmptyList[(Name[F], ArrowType)], AbState[F])] =
def purgeArrows: Option[(NonEmptyList[(Name[F], ArrowType)], AbilitiesState[F])] =
stack match {
case sc :: tail =>
NonEmptyList.fromList(sc.arrows.values.toList).map(_ -> copy[F](sc.copy(arrows = Map.empty) :: tail))
@ -108,7 +92,7 @@ case class AbState[F[_]](
}
}
case class AbScope[F[_]](
case class AbilityStackFrame[F[_]](
token: Token[F],
arrows: Map[String, (Name[F], ArrowType)] = Map.empty[String, (Name[F], ArrowType)],
serviceIds: Map[String, Value[F]] = Map.empty[String, Value[F]]

View File

@ -0,0 +1,18 @@
package aqua.ast.algebra.names
import aqua.ast.algebra.{ReportError, StackInterpreter}
import cats.data.State
import cats.~>
import monocle.Lens
import monocle.macros.GenLens
class NamesInterpreter[F[_], X](implicit lens: Lens[X, NamesState[F]], error: ReportError[F, X])
extends StackInterpreter[F, X, NamesState[F], NamesFrame[F]](GenLens[NamesState[F]](_.stack))
with (NameOp[F, *] ~> State[X, *]) {
override def apply[A](fa: NameOp[F, A]): State[X, A] = ???
}
case class NamesState[F[_]](stack: List[NamesFrame[F]])
case class NamesFrame[F[_]]()

View File

@ -8,19 +8,17 @@ import aqua.parser.lexer.Token._
import aqua.parser.lift.LiftParser
import cats.Comonad
import cats.parse.Parser
import cats.syntax.comonad._
import cats.syntax.functor._
case class DataStructExpr[F[_]](name: CustomTypeToken[F]) extends Expr[F] {
def program[Alg[_]](implicit
N: NamesAlgebra[F, Alg],
T: TypesAlgebra[F, Alg],
F: Comonad[F]
T: TypesAlgebra[F, Alg]
): Prog[Alg, Gen] =
Prog after T
.purgeFields()
.map(_.map(kv => kv._1.name.extract -> kv._2).toNem)
.map(_.map(kv => kv._1.value -> kv._2).toNem)
.flatMap(T.defineDataType(name, _))
.as(Gen("Data struct created"))

View File

@ -12,7 +12,6 @@ import cats.Comonad
import cats.free.Free
import cats.parse.Parser
import cats.syntax.apply._
import cats.syntax.comonad._
import cats.syntax.flatMap._
import cats.syntax.functor._
@ -22,15 +21,14 @@ case class ServiceExpr[F[_]](name: Ability[F], id: Option[Value[F]]) extends Exp
A: AbilitiesAlgebra[F, Alg],
N: NamesAlgebra[F, Alg],
T: TypesAlgebra[F, Alg],
V: ValuesAlgebra[F, Alg],
F: Comonad[F]
V: ValuesAlgebra[F, Alg]
): Prog[Alg, Gen] =
Prog.around(
A.beginScope(name),
(_: Unit, body: Gen) =>
(A.purgeArrows(name) <* A.endScope()).flatMap {
case Some(nel) =>
A.defineService(name, nel.map(kv => kv._1.name.extract -> kv._2).toNem) >>
A.defineService(name, nel.map(kv => kv._1.value -> kv._2).toNem) >>
id.fold(Free.pure[Alg, Gen](Gen.noop))(idV =>
V.ensureIsString(idV) >> A.setServiceId(name, idV) as Gen.noop
)

View File

@ -29,7 +29,7 @@ object Types {
def resolveTypeToken[F[_]: Comonad](strict: Map[String, Type], tt: TypeToken[F]): Option[Type] =
tt match {
case ArrayTypeToken(dtt) =>
case ArrayTypeToken(_, dtt) =>
resolveTypeToken(strict, dtt).collect {
case it: DataType => ArrayType(it)
}

View File

@ -40,10 +40,10 @@ case class DefAlias[F[_], L](alias: CustomTypeToken[F], target: TypeToken[F], co
object DefType {
def `dname`[F[_]: LiftParser]: P[CustomTypeToken[F]] =
def `dname`[F[_]: LiftParser: Comonad]: P[CustomTypeToken[F]] =
`data` *> ` ` *> CustomTypeToken.ct[F] <* ` `.? <* `:` <* ` \n+`
def `dataname`[F[_]: LiftParser](indent: String): P[(Name[F], DataTypeToken[F])] =
def `dataname`[F[_]: LiftParser: Comonad](indent: String): P[(Name[F], DataTypeToken[F])] =
(Name.p[F] <* ` : `) ~ `datatypedef`
def `deftype`[F[_]: LiftParser: Comonad]: P[DefType[F, HNil]] =
@ -54,7 +54,7 @@ object DefType {
object DefFunc {
def `funcname`[F[_]: LiftParser]: P[Name[F]] = ` `.?.with1 *> `func` *> ` ` *> Name.p <* ` `.?
def `funcname`[F[_]: LiftParser: Comonad]: P[Name[F]] = ` `.?.with1 *> `func` *> ` ` *> Name.p <* ` `.?
def `funcargs`[F[_]: LiftParser: Comonad]: P[List[(String, F[String], TypeToken[F])]] =
`(` *> comma0((`name`.lift <* ` : `) ~ `typedef`).map(_.map(kv => (kv._1.extract, kv._1, kv._2))) <* `)`
@ -75,13 +75,14 @@ object DefFunc {
object DefService {
// TODO use name's [F] for ArrowName
def `funcdef`[F[_]: LiftParser]: P[(String, ArrowTypeToken[F])] =
def `funcdef`[F[_]: LiftParser: Comonad]: P[(String, ArrowTypeToken[F])] =
(`name` <* ` : `) ~ ArrowTypeToken.`arrowdef`
def `servicename`[F[_]: LiftParser]: P[Ability[F]] = `service` *> ` ` *> Ability.ab[F] <* ` `.? <* `:` <* ` \n+`
def `servicename`[F[_]: LiftParser: Comonad]: P[Ability[F]] =
`service` *> ` ` *> Ability.ab[F] <* ` `.? <* `:` <* ` \n+`
// TODO switch to funchead?
def `defservice`[F[_]: LiftParser]: P[DefService[F, HNil]] =
def `defservice`[F[_]: LiftParser: Comonad]: P[DefService[F, HNil]] =
(`servicename` ~ indented(_ => `funcdef`, "").map(_.toNem)).map {
case (n, f) DefService(n, f, HNil)
}
@ -89,7 +90,7 @@ object DefService {
object DefAlias {
def `defalias`[F[_]: LiftParser]: P[DefAlias[F, HNil]] =
def `defalias`[F[_]: LiftParser: Comonad]: P[DefAlias[F, HNil]] =
((`alias` *> ` ` *> CustomTypeToken.ct[F] <* ` : `) ~ `typedef`).map {
case (ct, t) => DefAlias(ct, t, HNil)
}

View File

@ -3,16 +3,19 @@ package aqua.parser.lexer
import aqua.parser.lexer.Token._
import aqua.parser.lift.LiftParser
import aqua.parser.lift.LiftParser._
import cats.Functor
import cats.Comonad
import cats.parse.{Parser => P}
import cats.syntax.functor._
import cats.syntax.comonad._
case class Ability[F[_]](name: F[String]) extends Token[F] {
override def as[T](v: T)(implicit F: Functor[F]): F[T] = name.as(v)
case class Ability[F[_]: Comonad](name: F[String]) extends Token[F] {
override def as[T](v: T): F[T] = name.as(v)
def value: String = name.extract
}
object Ability {
def ab[F[_]: LiftParser]: P[Ability[F]] =
def ab[F[_]: LiftParser: Comonad]: P[Ability[F]] =
`Class`.lift.map(Ability(_))
}

View File

@ -3,12 +3,13 @@ package aqua.parser.lexer
import aqua.parser.lift.LiftParser
import cats.parse.{Parser => P}
import Token._
import cats.Comonad
case class Arg[F[_]](name: Name[F], `type`: TypeToken[F])
object Arg {
def p[F[_]: LiftParser]: P[Arg[F]] =
def p[F[_]: LiftParser: Comonad]: P[Arg[F]] =
((Name.p[F] <* ` : `) ~ TypeToken.`typedef`[F]).map {
case (name, t) => Arg(name, t)
}

View File

@ -3,28 +3,30 @@ package aqua.parser.lexer
import Token._
import aqua.parser.lift.LiftParser
import aqua.parser.lift.LiftParser._
import cats.Functor
import cats.{Comonad, Functor}
import cats.data.NonEmptyList
import cats.parse.{Parser => P}
import cats.syntax.functor._
import cats.syntax.comonad._
sealed trait LambdaOp[F[_]] extends Token[F]
case class IntoField[F[_]](name: F[String]) extends LambdaOp[F] {
override def as[T](v: T)(implicit F: Functor[F]): F[T] = name.as(v)
case class IntoField[F[_]: Comonad](name: F[String]) extends LambdaOp[F] {
override def as[T](v: T): F[T] = name.as(v)
def value: String = name.extract
}
case class IntoArray[F[_]](f: F[Unit]) extends LambdaOp[F] {
override def as[T](v: T)(implicit F: Functor[F]): F[T] = f.as(v)
case class IntoArray[F[_]: Functor](override val unit: F[Unit]) extends LambdaOp[F] {
override def as[T](v: T): F[T] = unit.as(v)
}
object LambdaOp {
private def parseField[F[_]: LiftParser]: P[LambdaOp[F]] = (`.` *> `name`).lift.map(IntoField(_))
private def parseArr[F[_]: LiftParser]: P[LambdaOp[F]] = `*`.lift.map(IntoArray(_))
private def parseOp[F[_]: LiftParser]: P[LambdaOp[F]] = P.oneOf(parseField.backtrack :: parseArr :: Nil)
private def parseField[F[_]: LiftParser: Comonad]: P[LambdaOp[F]] = (`.` *> `name`).lift.map(IntoField(_))
private def parseArr[F[_]: LiftParser: Comonad]: P[LambdaOp[F]] = `*`.lift.map(IntoArray(_))
private def parseOp[F[_]: LiftParser: Comonad]: P[LambdaOp[F]] = P.oneOf(parseField.backtrack :: parseArr :: Nil)
def ops[F[_]: LiftParser]: P[NonEmptyList[LambdaOp[F]]] =
def ops[F[_]: LiftParser: Comonad]: P[NonEmptyList[LambdaOp[F]]] =
parseOp.rep
}

View File

@ -3,16 +3,19 @@ package aqua.parser.lexer
import aqua.parser.lexer.Token._
import aqua.parser.lift.LiftParser
import aqua.parser.lift.LiftParser._
import cats.Functor
import cats.Comonad
import cats.parse.{Parser => P}
import cats.syntax.functor._
import cats.syntax.comonad._
case class Name[F[_]](name: F[String]) extends Token[F] {
override def as[T](v: T)(implicit F: Functor[F]): F[T] = name.as(v)
case class Name[F[_]: Comonad](name: F[String]) extends Token[F] {
override def as[T](v: T): F[T] = name.as(v)
def value: String = name.extract
}
object Name {
def p[F[_]: LiftParser]: P[Name[F]] =
def p[F[_]: LiftParser: Comonad]: P[Name[F]] =
`name`.lift.map(Name(_))
}

View File

@ -1,13 +1,12 @@
package aqua.parser.lexer
import cats.Functor
import cats.data.NonEmptyList
import cats.parse.{Accumulator0, Parser => P, Parser0 => P0}
trait Token[F[_]] {
def as[T](v: T)(implicit F: Functor[F]): F[T]
def as[T](v: T): F[T]
def unit(implicit F: Functor[F]): F[Unit] = as(())
def unit: F[Unit] = as(())
}
object Token {

View File

@ -4,33 +4,37 @@ import aqua.ast.algebra.types.ScalarType
import aqua.parser.lexer.Token._
import aqua.parser.lift.LiftParser
import aqua.parser.lift.LiftParser._
import cats.Functor
import cats.{Comonad, Functor}
import cats.parse.{Parser => P}
import cats.syntax.functor._
import cats.syntax.comonad._
sealed trait TypeToken[F[_]] extends Token[F]
sealed trait DataTypeToken[F[_]] extends TypeToken[F]
// TODO add F[Unit]
case class ArrayTypeToken[F[_]](data: DataTypeToken[F]) extends DataTypeToken[F] {
override def as[T](v: T)(implicit F: Functor[F]): F[T] = data.as(v)
case class ArrayTypeToken[F[_]: Comonad](override val unit: F[Unit], data: DataTypeToken[F]) extends DataTypeToken[F] {
override def as[T](v: T): F[T] = unit.as(v)
}
case class CustomTypeToken[F[_]](name: F[String]) extends DataTypeToken[F] {
override def as[T](v: T)(implicit F: Functor[F]): F[T] = name.as(v)
case class CustomTypeToken[F[_]: Comonad](name: F[String]) extends DataTypeToken[F] {
override def as[T](v: T): F[T] = name.as(v)
def value: String = name.extract
}
object CustomTypeToken {
def ct[F[_]: LiftParser]: P[CustomTypeToken[F]] = `Class`.lift.map(CustomTypeToken(_))
def ct[F[_]: LiftParser: Comonad]: P[CustomTypeToken[F]] = `Class`.lift.map(CustomTypeToken(_))
}
case class BasicTypeToken[F[_]](value: F[ScalarType]) extends DataTypeToken[F] {
override def as[T](v: T)(implicit F: Functor[F]): F[T] = value.as(v)
case class BasicTypeToken[F[_]: Comonad](scalarType: F[ScalarType]) extends DataTypeToken[F] {
override def as[T](v: T): F[T] = scalarType.as(v)
def value: ScalarType = scalarType.extract
}
object BasicTypeToken {
def `basictypedef`[F[_]: LiftParser]: P[BasicTypeToken[F]] =
def `basictypedef`[F[_]: LiftParser: Comonad]: P[BasicTypeToken[F]] =
P.oneOf(
ScalarType.all.map(n P.string(n.name).as(n)).toList
)
@ -43,9 +47,12 @@ sealed trait ArrowDef[F[_]] {
def resType: Option[DataTypeToken[F]]
}
case class ArrowTypeToken[F[_]](point: F[Unit], args: List[DataTypeToken[F]], res: Option[DataTypeToken[F]])
extends TypeToken[F] with ArrowDef[F] {
override def as[T](v: T)(implicit F: Functor[F]): F[T] = point.as(v)
case class ArrowTypeToken[F[_]: Comonad](
override val unit: F[Unit],
args: List[DataTypeToken[F]],
res: Option[DataTypeToken[F]]
) extends TypeToken[F] with ArrowDef[F] {
override def as[T](v: T): F[T] = unit.as(v)
override def argTypes: List[TypeToken[F]] = args
@ -54,7 +61,7 @@ case class ArrowTypeToken[F[_]](point: F[Unit], args: List[DataTypeToken[F]], re
object ArrowTypeToken {
def `arrowdef`[F[_]: LiftParser]: P[ArrowTypeToken[F]] =
def `arrowdef`[F[_]: LiftParser: Comonad]: P[ArrowTypeToken[F]] =
(comma0(DataTypeToken.`datatypedef`).with1 ~ `->`.lift ~
(DataTypeToken.`datatypedef`
.map(Some(_)) | P.string("()").as(None))).map {
@ -70,16 +77,16 @@ case class AquaArrowType[F[_]](args: List[TypeToken[F]], res: Option[DataTypeTok
object DataTypeToken {
def `arraytypedef`[F[_]: LiftParser]: P[ArrayTypeToken[F]] =
(P.string("[]") *> `datatypedef`[F]).map(ArrayTypeToken(_))
def `arraytypedef`[F[_]: LiftParser: Comonad]: P[ArrayTypeToken[F]] =
(P.string("[]").lift ~ `datatypedef`[F]).map(ud => ArrayTypeToken(ud._1, ud._2))
def `datatypedef`[F[_]: LiftParser]: P[DataTypeToken[F]] =
def `datatypedef`[F[_]: LiftParser: Comonad]: P[DataTypeToken[F]] =
P.oneOf(P.defer(`arraytypedef`[F]) :: BasicTypeToken.`basictypedef`[F] :: CustomTypeToken.ct[F] :: Nil)
}
object TypeToken {
def `typedef`[F[_]: LiftParser]: P[TypeToken[F]] =
def `typedef`[F[_]: LiftParser: Comonad]: P[TypeToken[F]] =
P.oneOf(ArrowTypeToken.`arrowdef`.backtrack :: DataTypeToken.`datatypedef` :: Nil)
}

View File

@ -12,22 +12,24 @@ import cats.syntax.comonad._
sealed trait Value[F[_]] extends Token[F]
case class VarLambda[F[_]](name: Name[F], lambda: List[LambdaOp[F]] = Nil) extends Value[F] {
override def as[T](v: T)(implicit F: Functor[F]): F[T] = name.as(v)
override def as[T](v: T): F[T] = name.as(v)
}
case class Literal[F[_]](value: F[String], ts: LiteralType) extends Value[F] {
override def as[T](v: T)(implicit F: Functor[F]): F[T] = value.as(v)
case class Literal[F[_]: Comonad](valueToken: F[String], ts: LiteralType) extends Value[F] {
override def as[T](v: T): F[T] = valueToken.as(v)
def value: String = valueToken.extract
}
object Value {
val notLambdaSymbols = Set(' ', ',', '\n', ')', ':')
def varLambda[F[_]: LiftParser]: P[VarLambda[F]] =
def varLambda[F[_]: LiftParser: Comonad]: P[VarLambda[F]] =
(Name.p[F] ~ LambdaOp.ops[F].?).map {
case (n, l) VarLambda(n, l.fold[List[LambdaOp[F]]](Nil)(_.toList))
}
def bool[F[_]: LiftParser: Functor]: P[Literal[F]] =
def bool[F[_]: LiftParser: Functor: Comonad]: P[Literal[F]] =
P.oneOf(
("true" :: "false" :: Nil)
.map(t P.string(t).lift.map(fu => Literal(fu.as(t), LiteralType.bool)))
@ -41,12 +43,12 @@ object Value {
}
)
def float[F[_]: LiftParser]: P[Literal[F]] =
def float[F[_]: LiftParser: Comonad]: P[Literal[F]] =
(P.char('-').?.with1 ~ (Numbers.nonNegativeIntString <* P.char('.')) ~ Numbers.nonNegativeIntString).string.lift
.map(Literal(_, LiteralType.float))
// TODO make more sophisticated escaping/unescaping
def string[F[_]: LiftParser]: P[Literal[F]] =
def string[F[_]: LiftParser: Comonad]: P[Literal[F]] =
(`"` *> P.charsWhile0(_ != '"') <* `"`).string.lift
.map(Literal(_, LiteralType.string))

View File

@ -12,8 +12,8 @@ class LambdaOpSpec extends AnyFlatSpec with Matchers with EitherValues {
"lambda ops" should "parse" in {
val opsP = (s: String) => LambdaOp.ops[Id].parseAll(s).right.value
opsP(".field") should be(NonEmptyList.of(IntoField("field")))
opsP(".field.sub") should be(NonEmptyList.of(IntoField("field"), IntoField("sub")))
opsP(".field") should be(NonEmptyList.of(IntoField[Id]("field")))
opsP(".field.sub") should be(NonEmptyList.of(IntoField[Id]("field"), IntoField[Id]("sub")))
opsP(".field*.sub") should be(NonEmptyList.of(IntoField[Id]("field"), IntoArray[Id](()), IntoField[Id]("sub")))
}

View File

@ -44,7 +44,7 @@ class TypeSpec extends AnyFlatSpec with Matchers with EitherValues {
ArrowTypeToken.`arrowdef`.parseAll("[]Absolutely, u32 -> B").right.value should be(
ArrowTypeToken[Id](
(),
ArrayTypeToken(CustomTypeToken[Id]("Absolutely")) :: (u32: BasicTypeToken[Id]) :: Nil,
ArrayTypeToken[Id]((), CustomTypeToken[Id]("Absolutely")) :: (u32: BasicTypeToken[Id]) :: Nil,
Some(CustomTypeToken[Id]("B"))
)
)
@ -52,10 +52,12 @@ class TypeSpec extends AnyFlatSpec with Matchers with EitherValues {
}
"Array type" should "parse" in {
TypeToken.`typedef`.parseAll("[]Something") should be(Right(ArrayTypeToken(CustomTypeToken[Id]("Something"))))
TypeToken.`typedef`.parseAll("[]u32") should be(Right(ArrayTypeToken(u32: BasicTypeToken[Id])))
TypeToken.`typedef`.parseAll("[]Something") should be(
Right(ArrayTypeToken[Id]((), CustomTypeToken[Id]("Something")))
)
TypeToken.`typedef`.parseAll("[]u32") should be(Right(ArrayTypeToken[Id]((), u32: BasicTypeToken[Id])))
TypeToken.`typedef`.parseAll("[][]u32") should be(
Right(ArrayTypeToken[Id](ArrayTypeToken[Id](u32: BasicTypeToken[Id])))
Right(ArrayTypeToken[Id]((), ArrayTypeToken[Id]((), u32: BasicTypeToken[Id])))
)
}

View File

@ -5,39 +5,42 @@ import org.scalatest.EitherValues
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import aqua.parser.lift.LiftParser.Implicits.idLiftParser
import cats.Id
class ValueSpec extends AnyFlatSpec with Matchers with EitherValues {
"var getter" should "parse" in {
Value.`value`.parseAll("varname").right.value should be(VarLambda(Name("varname"), Nil))
Value.`value`.parseAll("varname.field").right.value should be(VarLambda(Name("varname"), IntoField("field") :: Nil))
Value.`value`.parseAll("varname").right.value should be(VarLambda(Name[Id]("varname"), Nil))
Value.`value`.parseAll("varname.field").right.value should be(
VarLambda(Name[Id]("varname"), IntoField[Id]("field") :: Nil)
)
Value.`value`.parseAll("varname.field.sub").right.value should be(
VarLambda(Name("varname"), IntoField("field") :: IntoField("sub") :: Nil)
VarLambda(Name[Id]("varname"), IntoField[Id]("field") :: IntoField[Id]("sub") :: Nil)
)
}
"literals" should "parse" in {
Value.`value`.parseAll("true").right.value should be(Literal("true", LiteralType.bool))
Value.`value`.parseAll("false").right.value should be(Literal("false", LiteralType.bool))
Value.`value`.parseAll("true").right.value should be(Literal[Id]("true", LiteralType.bool))
Value.`value`.parseAll("false").right.value should be(Literal[Id]("false", LiteralType.bool))
Value.`value`.parseAll("1").right.value should be(Literal("1", LiteralType.number))
Value.`value`.parseAll("1111").right.value should be(Literal("1111", LiteralType.number))
Value.`value`.parseAll("1").right.value should be(Literal[Id]("1", LiteralType.number))
Value.`value`.parseAll("1111").right.value should be(Literal[Id]("1111", LiteralType.number))
Value.`value`.parseAll("-1543").right.value should be(Literal("-1543", LiteralType.signed))
Value.`value`.parseAll("-1543").right.value should be(Literal[Id]("-1543", LiteralType.signed))
Value.`value`.parseAll("1.0").right.value should be(Literal("1.0", LiteralType.float))
Value.`value`.parseAll("1.23").right.value should be(Literal("1.23", LiteralType.float))
Value.`value`.parseAll("-1.23").right.value should be(Literal("-1.23", LiteralType.float))
Value.`value`.parseAll("1.0").right.value should be(Literal[Id]("1.0", LiteralType.float))
Value.`value`.parseAll("1.23").right.value should be(Literal[Id]("1.23", LiteralType.float))
Value.`value`.parseAll("-1.23").right.value should be(Literal[Id]("-1.23", LiteralType.float))
Value.`value`.parseAll("\"some crazy string\"").right.value should be(
Literal("\"some crazy string\"", LiteralType.string)
Literal[Id]("\"some crazy string\"", LiteralType.string)
)
// This does not work :(
// Value.`value`.parseAll("\"some crazy string with escaped \\\" quote\"").right.value should be(
// Literal("\"some crazy string with escaped \\\" quote\"", BasicType.string)
// )
Value.`value`.parse("\"just string\" ").right.value should be(
(" ", Literal("\"just string\"", LiteralType.string))
(" ", Literal[Id]("\"just string\"", LiteralType.string))
)
}