Migration to Scala 3 (#228)

This commit is contained in:
Dima 2021-08-06 13:33:58 +03:00 committed by GitHub
parent ee67d038ad
commit 4ccac9bf0e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
31 changed files with 198 additions and 181 deletions

View File

@ -1,6 +1,4 @@
val dottyVersion = "2.13.5" val dottyVersion = "3.0.1"
//val dottyVersion = "3.0.0"
scalaVersion := dottyVersion scalaVersion := dottyVersion
@ -8,16 +6,14 @@ val baseAquaVersion = settingKey[String]("base aqua version")
val catsV = "2.6.1" val catsV = "2.6.1"
val catsParseV = "0.3.4" val catsParseV = "0.3.4"
val monocleV = "3.0.0-M5" val monocleV = "3.0.0-M6"
val scalaTestV = "3.2.9" val scalaTestV = "3.2.9"
val fs2V = "3.0.4" val fs2V = "3.0.6"
val catsEffectV = "3.1.1" val catsEffectV = "3.2.1"
val airframeLogV = "21.5.4" val airframeLogV = "21.5.4"
val log4catsV = "2.1.1" val log4catsV = "2.1.1"
val enumeratumV = "1.6.1" // Scala3 issue: https://github.com/lloydmeta/enumeratum/issues/300
val slf4jV = "1.7.30" val slf4jV = "1.7.30"
val declineV = "2.0.0-RC1" // Scala3 issue: https://github.com/bkirwi/decline/issues/260 val declineV = "2.1.0"
val declineEnumV = "1.3.0"
val airframeLog = "org.wvlet.airframe" %% "airframe-log" % airframeLogV val airframeLog = "org.wvlet.airframe" %% "airframe-log" % airframeLogV
val catsEffect = "org.typelevel" %% "cats-effect" % catsEffectV val catsEffect = "org.typelevel" %% "cats-effect" % catsEffectV
@ -36,7 +32,17 @@ val commons = Seq(
airframeLog, airframeLog,
"org.scalatest" %% "scalatest" % scalaTestV % Test "org.scalatest" %% "scalatest" % scalaTestV % Test
), ),
addCompilerPlugin("org.typelevel" %% "kind-projector" % "0.11.3" cross CrossVersion.full) scalacOptions ++= {
Seq(
"-encoding",
"UTF-8",
"-feature",
"-language:implicitConversions",
"-unchecked",
"-Ykind-projector"
// "-Xfatal-warnings"
)
}
) )
commons commons
@ -52,10 +58,8 @@ lazy val cli = project
"com.monovore" %% "decline-effect" % declineV, "com.monovore" %% "decline-effect" % declineV,
catsEffect, catsEffect,
fs2Io, fs2Io,
"org.typelevel" %% "log4cats-slf4j" % log4catsV, "org.typelevel" %% "log4cats-slf4j" % log4catsV,
"com.beachape" %% "enumeratum" % enumeratumV, "org.slf4j" % "slf4j-jdk14" % slf4jV
"org.slf4j" % "slf4j-jdk14" % slf4jV,
"com.monovore" %% "decline-enumeratum" % declineEnumV
) )
) )
.dependsOn(compiler, `backend-air`, `backend-ts`, `backend-js`) .dependsOn(compiler, `backend-air`, `backend-ts`, `backend-js`)

View File

@ -8,12 +8,12 @@ import cats.data.Validated.{Invalid, Valid}
import cats.data.{NonEmptyList, Validated, ValidatedNel} import cats.data.{NonEmptyList, Validated, ValidatedNel}
import cats.effect.ExitCode import cats.effect.ExitCode
import cats.effect.std.Console import cats.effect.std.Console
import cats.syntax.functor._ import cats.syntax.functor.*
import cats.syntax.traverse._ import cats.syntax.traverse.*
import cats.{Comonad, Functor} import cats.{Comonad, Functor}
import com.monovore.decline.Opts.help import com.monovore.decline.Opts.help
import com.monovore.decline.enumeratum._
import com.monovore.decline.{Opts, Visibility} import com.monovore.decline.{Opts, Visibility}
import wvlet.log.{LogLevel => WLogLevel}
import java.nio.file.Path import java.nio.file.Path
@ -25,8 +25,22 @@ object AppOps {
val versionOpt: Opts[Unit] = val versionOpt: Opts[Unit] =
Opts.flag("version", help = "Show version", "v", Visibility.Partial) Opts.flag("version", help = "Show version", "v", Visibility.Partial)
val logLevelOpt: Opts[LogLevel] = val logLevelOpt: Opts[WLogLevel] =
Opts.option[LogLevel]("log-level", help = "Set log level").withDefault(LogLevel.Info) Opts.option[String]("log-level", help = "Set log level").withDefault("info").mapValidated {
str =>
Validated.fromEither(toLogLevel(str))
}
def toLogLevel(logLevel: String): Either[NonEmptyList[String], WLogLevel] = {
LogLevel.stringToLogLevel
.get(logLevel.toLowerCase)
.toRight(
NonEmptyList(
"log-level could be only 'all', 'trace', 'debug', 'info', 'warn', 'error', 'off'",
Nil
)
)
}
def checkPath: Path => ValidatedNel[String, Path] = { p => def checkPath: Path => ValidatedNel[String, Path] = { p =>
Validated Validated

View File

@ -59,7 +59,7 @@ object AquaCli extends IOApp with LogSupport {
constantOpts[Id] constantOpts[Id]
).mapN { ).mapN {
case (input, imports, output, toAir, toJs, noRelay, noXor, h, v, logLevel, constants) => case (input, imports, output, toAir, toJs, noRelay, noXor, h, v, logLevel, constants) =>
WLogger.setDefaultLogLevel(LogLevel.toLogLevel(logLevel)) WLogger.setDefaultLogLevel(logLevel)
WLogger.setDefaultFormatter(CustomLogFormatter) WLogger.setDefaultFormatter(CustomLogFormatter)
implicit val aio: AquaIO[F] = new AquaFilesIO[F] implicit val aio: AquaIO[F] = new AquaFilesIO[F]

View File

@ -1,30 +1,16 @@
package aqua package aqua
import enumeratum._
import wvlet.log.{LogLevel => WLogLevel} import wvlet.log.{LogLevel => WLogLevel}
sealed trait LogLevel extends EnumEntry with EnumEntry.Lowercase object LogLevel {
object LogLevel extends Enum[LogLevel] { val stringToLogLevel: Map[String, WLogLevel] = Map(
case object Debug extends LogLevel ("debug" -> WLogLevel.DEBUG),
case object Trace extends LogLevel ("trace" -> WLogLevel.TRACE),
case object Info extends LogLevel ("info" -> WLogLevel.INFO),
case object Off extends LogLevel ("off" -> WLogLevel.OFF),
case object Warn extends LogLevel ("warn" -> WLogLevel.WARN),
case object Error extends LogLevel ("error" -> WLogLevel.ERROR),
case object All extends LogLevel ("all" -> WLogLevel.ALL)
)
val values = findValues
def toLogLevel(logLevel: LogLevel): WLogLevel = {
logLevel match {
case LogLevel.Debug => WLogLevel.DEBUG
case LogLevel.Trace => WLogLevel.TRACE
case LogLevel.Info => WLogLevel.INFO
case LogLevel.Off => WLogLevel.OFF
case LogLevel.Warn => WLogLevel.WARN
case LogLevel.Error => WLogLevel.ERROR
case LogLevel.All => WLogLevel.ALL
}
}
} }

View File

@ -28,9 +28,9 @@ object Test extends IOApp.Simple {
) )
.map { .map {
case Validated.Invalid(errs) => case Validated.Invalid(errs) =>
errs.map(System.err.println) errs.map(System.err.println): Unit
case Validated.Valid(res) => case Validated.Valid(res) =>
res.map(println) res.map(println): Unit
} }
} }

View File

@ -49,7 +49,7 @@ class AquaFileSources[F[_]: AquaIO: Monad](sourcesPath: Path, importFrom: List[P
from: FileModuleId, from: FileModuleId,
imp: String imp: String
): F[ValidatedNec[AquaFileError, FileModuleId]] = { ): F[ValidatedNec[AquaFileError, FileModuleId]] = {
Validated.fromEither(Try(Paths.get(imp)).toEither.leftMap(FileSystemError)) match { Validated.fromEither(Try(Paths.get(imp)).toEither.leftMap(FileSystemError.apply)) match {
case Validated.Valid(importP) => case Validated.Valid(importP) =>
filesIO filesIO
.resolve(importP, from.file.getParent +: importFrom) .resolve(importP, from.file.getParent +: importFrom)
@ -106,7 +106,7 @@ class AquaFileSources[F[_]: AquaIO: Monad](sourcesPath: Path, importFrom: List[P
ac.sourceId.file, ac.sourceId.file,
targetPath, targetPath,
compiled.suffix compiled.suffix
).leftMap(FileSystemError) ).leftMap(FileSystemError.apply)
.map { target => .map { target =>
filesIO filesIO
.writeFile( .writeFile(

View File

@ -33,7 +33,9 @@ class AquaFilesIO[F[_]: Files: Concurrent] extends AquaIO[F] {
.compile .compile
.last .last
.map( .map(
_.fold((EmptyFileError(file): AquaFileError).asLeft[String])(_.left.map(FileSystemError)) _.fold((EmptyFileError(file): AquaFileError).asLeft[String])(
_.left.map(FileSystemError.apply)
)
) )
) )
@ -49,13 +51,13 @@ class AquaFilesIO[F[_]: Files: Concurrent] extends AquaIO[F] {
EitherT( EitherT(
Concurrent[F].attempt(p.toFile.isFile.pure[F]) Concurrent[F].attempt(p.toFile.isFile.pure[F])
) )
.leftMap[AquaFileError](FileSystemError) .leftMap[AquaFileError](FileSystemError.apply)
.recover({ case _ => false }) .recover({ case _ => false })
.flatMap { .flatMap {
case true => case true =>
EitherT( EitherT(
Concurrent[F].attempt(p.toAbsolutePath.normalize().pure[F]) Concurrent[F].attempt(p.toAbsolutePath.normalize().pure[F])
).leftMap[AquaFileError](FileSystemError) ).leftMap[AquaFileError](FileSystemError.apply)
case false => case false =>
findFirstF(in.tail, notFound) findFirstF(in.tail, notFound)
} }
@ -86,7 +88,7 @@ class AquaFilesIO[F[_]: Files: Concurrent] extends AquaIO[F] {
} else { } else {
Right(f :: Nil) Right(f :: Nil)
} }
}.toEither.leftMap[AquaFileError](FileSystemError).flatMap(identity) }.toEither.leftMap[AquaFileError](FileSystemError.apply).flatMap(identity)
) )
.leftMap(NonEmptyChain.one) .leftMap(NonEmptyChain.one)
.pure[F] .pure[F]
@ -98,7 +100,7 @@ class AquaFilesIO[F[_]: Files: Concurrent] extends AquaIO[F] {
.fromTry( .fromTry(
Try(Chain.one(f.toPath.toAbsolutePath.normalize())) Try(Chain.one(f.toPath.toAbsolutePath.normalize()))
) )
.leftMap(FileSystemError) .leftMap(FileSystemError.apply)
.leftMap(NonEmptyChain.one) .leftMap(NonEmptyChain.one)
.pure[F] .pure[F]
case f if f.isDirectory => case f if f.isDirectory =>
@ -112,10 +114,10 @@ class AquaFilesIO[F[_]: Files: Concurrent] extends AquaIO[F] {
} }
private def deleteIfExists(file: Path): EitherT[F, AquaFileError, Boolean] = private def deleteIfExists(file: Path): EitherT[F, AquaFileError, Boolean] =
Files[F].deleteIfExists(file).attemptT.leftMap(FileSystemError) Files[F].deleteIfExists(file).attemptT.leftMap(FileSystemError.apply)
private def createDirectories(path: Path): EitherT[F, AquaFileError, Path] = private def createDirectories(path: Path): EitherT[F, AquaFileError, Path] =
Files[F].createDirectories(path).attemptT.leftMap(FileSystemError) Files[F].createDirectories(path).attemptT.leftMap(FileSystemError.apply)
// Writes to a file, creates directories if they do not exist // Writes to a file, creates directories if they do not exist
override def writeFile(file: Path, content: String): EitherT[F, AquaFileError, Unit] = override def writeFile(file: Path, content: String): EitherT[F, AquaFileError, Unit] =
@ -128,7 +130,7 @@ class AquaFilesIO[F[_]: Files: Concurrent] extends AquaIO[F] {
.attempt .attempt
.compile .compile
.last .last
.map(_.getOrElse(Right())) .map(_.getOrElse(Right(())))
) )
.leftMap(FileWriteError(file, _)) .leftMap(FileWriteError(file, _))

View File

@ -63,7 +63,7 @@ class AquaParser[F[_]: Monad, E, I, S[_]: Comonad](
) )
case Validated.Invalid(errs) => case Validated.Invalid(errs) =>
Validated.invalid[NonEmptyChain[Err], Chain[AquaModule[I, Err, Body]]](errs).pure[F] Validated.invalid[NonEmptyChain[Err], Chain[AquaModule[I, Err, Body]]](errs).pure[F]
}.map(_.map(_.foldLeft(Modules[I, Err, Body]())(_.add(_, export = true)))) }.map(_.map(_.foldLeft(Modules[I, Err, Body]())(_.add(_, toExport = true))))
def loadModule(imp: I): F[ValidatedNec[Err, AquaModule[I, Err, Ast[S]]]] = def loadModule(imp: I): F[ValidatedNec[Err, AquaModule[I, Err, Ast[S]]]] =
sources sources

View File

@ -30,7 +30,7 @@ object Linker extends LogSupport {
else { else {
val folded = canHandle.foldLeft(proc) { case (acc, m) => val folded = canHandle.foldLeft(proc) { case (acc, m) =>
val importKeys = m.dependsOn.keySet val importKeys = m.dependsOn.keySet
debug(m.id + " dependsOn " + importKeys) debug(s"${m.id} dependsOn $importKeys")
val deps: T => T = val deps: T => T =
importKeys.map(acc).foldLeft[T => T](identity) { case (fAcc, f) => importKeys.map(acc).foldLeft[T => T](identity) { case (fAcc, f) =>
debug("COMBINING ONE TIME ") debug("COMBINING ONE TIME ")

View File

@ -9,7 +9,7 @@ case class Modules[I, E, T](
exports: Set[I] = Set.empty[I] exports: Set[I] = Set.empty[I]
) { ) {
def add(aquaModule: AquaModule[I, E, T], export: Boolean = false): Modules[I, E, T] = def add(aquaModule: AquaModule[I, E, T], toExport: Boolean = false): Modules[I, E, T] =
if (loaded.contains(aquaModule.id)) this if (loaded.contains(aquaModule.id)) this
else else
copy( copy(
@ -20,7 +20,7 @@ case class Modules[I, E, T](
case (deps, (moduleId, err)) => case (deps, (moduleId, err)) =>
deps.updatedWith(moduleId)(_.fold(NonEmptyChain.one(err))(_.append(err)).some) deps.updatedWith(moduleId)(_.fold(NonEmptyChain.one(err))(_.append(err)).some)
}, },
exports = if (export) exports + aquaModule.id else exports exports = if (toExport) exports + aquaModule.id else exports
) )
def isResolved: Boolean = dependsOn.isEmpty def isResolved: Boolean = dependsOn.isEmpty

View File

@ -18,7 +18,7 @@ class LinkerSpec extends AnyFlatSpec with Matchers {
Map("mod2" -> "unresolved mod2 in mod1"), Map("mod2" -> "unresolved mod2 in mod1"),
_ ++ " | mod1" _ ++ " | mod1"
), ),
export = true toExport = true
) )
withMod1.isResolved should be(false) withMod1.isResolved should be(false)

View File

@ -5,9 +5,9 @@ import aqua.model.func.{ArgsCall, FuncCallable, FuncModel}
import aqua.types.{ProductType, Type} import aqua.types.{ProductType, Type}
import cats.Monoid import cats.Monoid
import cats.data.NonEmptyMap import cats.data.NonEmptyMap
import cats.syntax.apply._ import cats.syntax.apply.*
import cats.syntax.functor._ import cats.syntax.functor.*
import cats.syntax.monoid._ import cats.syntax.monoid.*
import wvlet.log.LogSupport import wvlet.log.LogSupport
import scala.collection.immutable.SortedMap import scala.collection.immutable.SortedMap
@ -20,34 +20,37 @@ case class AquaContext(
// TODO: merge this with abilities, when have ability resolution variance // TODO: merge this with abilities, when have ability resolution variance
services: Map[String, ServiceModel] services: Map[String, ServiceModel]
) { ) {
private def prefixFirst[T](prefix: String, pair: (String, T)): (String, T) =
(prefix + pair._1, pair._2)
def allTypes(prefix: String = ""): Map[String, Type] = def allTypes(prefix: String = ""): Map[String, Type] =
abilities abilities
.foldLeft(types) { case (ts, (k, v)) => .foldLeft(types) { case (ts, (k, v)) =>
ts ++ v.allTypes(k + ".") ts ++ v.allTypes(k + ".")
} }
.map(_.swap.map(prefix + _).swap) .map(prefixFirst(prefix, _))
def allFuncs(prefix: String = ""): Map[String, FuncCallable] = def allFuncs(prefix: String = ""): Map[String, FuncCallable] =
abilities abilities
.foldLeft(funcs) { case (ts, (k, v)) => .foldLeft(funcs) { case (ts, (k, v)) =>
ts ++ v.allFuncs(k + ".") ts ++ v.allFuncs(k + ".")
} }
.map(_.swap.map(prefix + _).swap) .map(prefixFirst(prefix, _))
def allValues(prefix: String = ""): Map[String, ValueModel] = def allValues(prefix: String = ""): Map[String, ValueModel] =
abilities abilities
.foldLeft(values) { case (ts, (k, v)) => .foldLeft(values) { case (ts, (k, v)) =>
ts ++ v.allValues(k + ".") ts ++ v.allValues(k + ".")
} }
.map(_.swap.map(prefix + _).swap) .map(prefixFirst(prefix, _))
def allServices(prefix: String = ""): Map[String, ServiceModel] = def allServices(prefix: String = ""): Map[String, ServiceModel] =
abilities abilities
.foldLeft(services) { case (ts, (k, v)) => .foldLeft(services) { case (ts, (k, v)) =>
ts ++ v.allServices(k + ".") ts ++ v.allServices(k + ".")
} }
.map(_.swap.map(prefix + _).swap) .map(prefixFirst(prefix, _))
def `type`(name: String): Option[ProductType] = def `type`(name: String): Option[ProductType] =
NonEmptyMap NonEmptyMap
@ -118,7 +121,7 @@ object AquaContext extends LogSupport {
): AquaContext = ): AquaContext =
sm.models sm.models
.foldLeft((init, Monoid.empty[AquaContext])) { .foldLeft((init, Monoid.empty[AquaContext])) {
case ((ctx, export), c: ConstantModel) => case ((ctx, exportContext), c: ConstantModel) =>
val add = val add =
Monoid Monoid
.empty[AquaContext] .empty[AquaContext]
@ -126,17 +129,17 @@ object AquaContext extends LogSupport {
if (c.allowOverrides && ctx.values.contains(c.name)) ctx.values if (c.allowOverrides && ctx.values.contains(c.name)) ctx.values
else ctx.values.updated(c.name, c.value.resolveWith(ctx.values)) else ctx.values.updated(c.name, c.value.resolveWith(ctx.values))
) )
(ctx |+| add, export |+| add) (ctx |+| add, exportContext |+| add)
case ((ctx, export), func: FuncModel) => case ((ctx, exportContext), func: FuncModel) =>
val fr = func.capture(ctx.funcs, ctx.values) val fr = func.capture(ctx.funcs, ctx.values)
val add = val add =
Monoid.empty[AquaContext].copy(funcs = ctx.funcs.updated(func.name, fr)) Monoid.empty[AquaContext].copy(funcs = ctx.funcs.updated(func.name, fr))
(ctx |+| add, export |+| add) (ctx |+| add, exportContext |+| add)
case ((ctx, export), t: TypeModel) => case ((ctx, exportContext), t: TypeModel) =>
val add = val add =
Monoid.empty[AquaContext].copy(types = ctx.types.updated(t.name, t.`type`)) Monoid.empty[AquaContext].copy(types = ctx.types.updated(t.name, t.`type`))
(ctx |+| add, export |+| add) (ctx |+| add, exportContext |+| add)
case ((ctx, export), m: ServiceModel) => case ((ctx, exportContext), m: ServiceModel) =>
val add = val add =
Monoid Monoid
.empty[AquaContext] .empty[AquaContext]
@ -146,7 +149,7 @@ object AquaContext extends LogSupport {
), ),
services = ctx.services.updated(m.name, m) services = ctx.services.updated(m.name, m)
) )
(ctx |+| add, export |+| add) (ctx |+| add, exportContext |+| add)
case (ce, _) => ce case (ce, _) => ce
} }
._2 ._2

View File

@ -2,7 +2,7 @@ package aqua.model.func
import aqua.model.{ValueModel, VarModel} import aqua.model.{ValueModel, VarModel}
import aqua.types.{ArrowType, DataType} import aqua.types.{ArrowType, DataType}
import cats.syntax.functor._ import cats.syntax.functor.*
/** /**
* Wraps argument definitions of a function, along with values provided when this function is called * Wraps argument definitions of a function, along with values provided when this function is called
@ -33,7 +33,7 @@ object ArgsCall {
argPrefix: String = "arg", argPrefix: String = "arg",
retName: String = "init_call_res" retName: String = "init_call_res"
): (ArgsDef, Call, Option[Call.Export]) = { ): (ArgsDef, Call, Option[Call.Export]) = {
val argNamesTypes = arrow.args.zipWithIndex.map(iv => iv.map(i => argPrefix + i).swap) val argNamesTypes = arrow.args.zipWithIndex.map { case (t, i) => (argPrefix + i, t) }
val argsDef = ArgsDef(argNamesTypes.map { val argsDef = ArgsDef(argNamesTypes.map {
case (a, t: DataType) => ArgDef.Data(a, t) case (a, t: DataType) => ArgDef.Data(a, t)

View File

@ -6,8 +6,9 @@ import cats.Eval
import cats.data.Chain import cats.data.Chain
import cats.free.Cofree import cats.free.Cofree
import cats.kernel.Semigroup import cats.kernel.Semigroup
import cats.syntax.apply._ import cats.syntax.apply.*
import cats.syntax.functor._ import cats.syntax.functor.*
import cats.instances.tuple.*
case class FuncOp(tree: Cofree[Chain, RawTag]) extends Model { case class FuncOp(tree: Cofree[Chain, RawTag]) extends Model {
def head: RawTag = tree.head def head: RawTag = tree.head
@ -22,19 +23,19 @@ case class FuncOp(tree: Cofree[Chain, RawTag]) extends Model {
Cofree.cata(tree)(folder) Cofree.cata(tree)(folder)
def definesVarNames: Eval[Set[String]] = cata[Set[String]] { def definesVarNames: Eval[Set[String]] = cata[Set[String]] {
case (CallArrowTag(_, Call(_, Some(export))), acc) => case (CallArrowTag(_, Call(_, Some(exportTo))), acc) =>
Eval.later(acc.foldLeft(Set(export.name))(_ ++ _)) Eval.later(acc.foldLeft(Set(exportTo.name))(_ ++ _))
case (CallServiceTag(_, _, Call(_, Some(export))), acc) => case (CallServiceTag(_, _, Call(_, Some(exportTo))), acc) =>
Eval.later(acc.foldLeft(Set(export.name))(_ ++ _)) Eval.later(acc.foldLeft(Set(exportTo.name))(_ ++ _))
case (NextTag(export), acc) => Eval.later(acc.foldLeft(Set(export))(_ ++ _)) case (NextTag(exportTo), acc) => Eval.later(acc.foldLeft(Set(exportTo))(_ ++ _))
case (_, acc) => Eval.later(acc.foldLeft(Set.empty[String])(_ ++ _)) case (_, acc) => Eval.later(acc.foldLeft(Set.empty[String])(_ ++ _))
} }
def exportsVarNames: Eval[Set[String]] = cata[Set[String]] { def exportsVarNames: Eval[Set[String]] = cata[Set[String]] {
case (CallArrowTag(_, Call(_, Some(export))), acc) => case (CallArrowTag(_, Call(_, Some(exportTo))), acc) =>
Eval.later(acc.foldLeft(Set(export.name))(_ ++ _)) Eval.later(acc.foldLeft(Set(exportTo.name))(_ ++ _))
case (CallServiceTag(_, _, Call(_, Some(export))), acc) => case (CallServiceTag(_, _, Call(_, Some(exportTo))), acc) =>
Eval.later(acc.foldLeft(Set(export.name))(_ ++ _)) Eval.later(acc.foldLeft(Set(exportTo.name))(_ ++ _))
case (_, acc) => Eval.later(acc.foldLeft(Set.empty[String])(_ ++ _)) case (_, acc) => Eval.later(acc.foldLeft(Set.empty[String])(_ ++ _))
} }
@ -106,9 +107,9 @@ object FuncOp {
cf.tail cf.tail
.map(_.foldLeft[(A, Chain[Tree])]((headA, head.tailForced)) { .map(_.foldLeft[(A, Chain[Tree])]((headA, head.tailForced)) {
case ((aggrA, aggrTail), child) => case ((aggrA, aggrTail), child) =>
traverseA(child, aggrA)(f).value.map(aggrTail.append) traverseA(child, aggrA)(f).value match { case (a, tree) => (a, aggrTail.append(tree)) }
}) })
.map(_.map(ch => head.copy(tail = Eval.now(ch)))) .map { case (a, ch) => (a, head.copy(tail = Eval.now(ch))) }
} }
// Semigroup for foldRight processing // Semigroup for foldRight processing

View File

@ -10,7 +10,7 @@ import wvlet.log.LogSupport
// Can be heavily optimized by caching parent cursors, not just list of zippers // Can be heavily optimized by caching parent cursors, not just list of zippers
case class RawCursor(tree: NonEmptyList[ChainZipper[FuncOp.Tree]]) case class RawCursor(tree: NonEmptyList[ChainZipper[FuncOp.Tree]])
extends ChainCursor[RawCursor, FuncOp.Tree](RawCursor) with LogSupport { extends ChainCursor[RawCursor, FuncOp.Tree](RawCursor.apply) with LogSupport {
def tag: RawTag = current.head def tag: RawTag = current.head
def parentTag: Option[RawTag] = parent.map(_.head) def parentTag: Option[RawTag] = parent.map(_.head)

View File

@ -90,7 +90,7 @@ class TopologySpec extends AnyFlatSpec with Matchers {
} }
"topology resolver" should "build return path in par if there are exported variables" in { "topology resolver" should "build return path in par if there are exported variables" in {
val export = Some(Call.Export("result", ScalarType.string)) val exportTo = Some(Call.Export("result", ScalarType.string))
val result = VarModel("result", ScalarType.string) val result = VarModel("result", ScalarType.string)
val init = on( val init = on(
@ -101,7 +101,7 @@ class TopologySpec extends AnyFlatSpec with Matchers {
on( on(
otherPeer, otherPeer,
otherRelay :: Nil, otherRelay :: Nil,
callTag(1, export) callTag(1, exportTo)
), ),
callTag(2) callTag(2)
), ),
@ -117,7 +117,7 @@ class TopologySpec extends AnyFlatSpec with Matchers {
MakeRes.seq( MakeRes.seq(
through(relay), through(relay),
through(otherRelay), through(otherRelay),
callRes(1, otherPeer, export), callRes(1, otherPeer, exportTo),
through(otherRelay), through(otherRelay),
through(relay), through(relay),
// we should return to a caller to continue execution // we should return to a caller to continue execution

View File

@ -6,7 +6,7 @@ import aqua.parser.lift.LiftParser
import aqua.parser.lift.LiftParser._ import aqua.parser.lift.LiftParser._
import cats.data.{Chain, Validated, ValidatedNec} import cats.data.{Chain, Validated, ValidatedNec}
import cats.free.Cofree import cats.free.Cofree
import cats.parse.{Parser0 => P0} import cats.parse.Parser0 as P0
import cats.{Comonad, Eval} import cats.{Comonad, Eval}
case class Ast[F[_]](head: Ast.Head[F], tree: Ast.Tree[F]) { case class Ast[F[_]](head: Ast.Head[F], tree: Ast.Tree[F]) {

View File

@ -1,16 +1,16 @@
package aqua.parser package aqua.parser
import aqua.parser.Ast.Tree import aqua.parser.Ast.Tree
import aqua.parser.lexer.Token._ import aqua.parser.lexer.Token
import aqua.parser.lexer.Token.*
import aqua.parser.lift.LiftParser import aqua.parser.lift.LiftParser
import aqua.parser.lift.LiftParser._ import aqua.parser.lift.LiftParser.*
import cats.data.Chain.:==
import cats.data.{Chain, NonEmptyChain, Validated, ValidatedNec} import cats.data.{Chain, NonEmptyChain, Validated, ValidatedNec}
import cats.free.Cofree import cats.free.Cofree
import cats.parse.{Parser => P} import cats.parse.Parser as P
import cats.syntax.comonad._ import cats.syntax.comonad.*
import cats.{Comonad, Eval} import cats.{Comonad, Eval}
import Chain.:==
import aqua.parser.lexer.Token
abstract class Expr[F[_]](val companion: Expr.Companion, val token: Token[F]) { abstract class Expr[F[_]](val companion: Expr.Companion, val token: Token[F]) {
@ -102,9 +102,9 @@ object Expr {
case class Acc[F[_]]( case class Acc[F[_]](
block: Option[(F[String], Tree[F])] = None, block: Option[(F[String], Tree[F])] = None,
window: Chain[(F[String], Tree[F])] = Chain.empty, window: Chain[(F[String], Tree[F])] = Chain.empty[(F[String], Tree[F])],
currentChildren: Chain[Ast.Tree[F]] = Chain.empty, currentChildren: Chain[Ast.Tree[F]] = Chain.empty[Ast.Tree[F]],
error: Chain[ParserError[F]] = Chain.empty error: Chain[ParserError[F]] = Chain.empty[ParserError[F]]
) )
// converts list of expressions to a tree // converts list of expressions to a tree
@ -119,7 +119,7 @@ object Expr {
val initialIndent = lHead._1.extract.length val initialIndent = lHead._1.extract.length
// recursively creating a tree // recursively creating a tree
// moving a window on a list depending on the nesting of the code // moving a window on a list depending on the nesting of the code
val acc = exprs.foldLeft( val acc = exprs.foldLeft[Acc[F]](
Acc[F]() Acc[F]()
) { ) {
case (acc, (indent, currentExpr)) if acc.error.isEmpty => case (acc, (indent, currentExpr)) if acc.error.isEmpty =>

View File

@ -1,13 +1,14 @@
package aqua.parser.lexer package aqua.parser.lexer
import aqua.parser.lexer.Token._ import aqua.parser.lexer.Token.*
import aqua.parser.lift.LiftParser import aqua.parser.lift.LiftParser
import aqua.parser.lift.LiftParser._ import aqua.parser.lift.LiftParser.*
import cats.data.NonEmptyList import cats.data.NonEmptyList
import cats.parse.{Numbers, Parser => P, Parser0 => P0} import cats.parse.{Numbers, Parser as P, Parser0 as P0}
import cats.syntax.comonad._ import cats.syntax.comonad.*
import cats.syntax.functor._ import cats.syntax.functor.*
import cats.{Comonad, Functor} import cats.{Comonad, Functor}
import scala.language.postfixOps
sealed trait LambdaOp[F[_]] extends Token[F] sealed trait LambdaOp[F[_]] extends Token[F]
@ -34,10 +35,11 @@ object LambdaOp {
private def parseArr[F[_]: LiftParser: Comonad]: P[LambdaOp[F]] = `*`.lift.map(IntoArray(_)) private def parseArr[F[_]: LiftParser: Comonad]: P[LambdaOp[F]] = `*`.lift.map(IntoArray(_))
private val intP0: P0[Int] = Numbers.nonNegativeIntString.map(_.toInt).?.map(_.getOrElse(0)) private val nonNegativeIntP0: P0[Int] =
Numbers.nonNegativeIntString.map(_.toInt).?.map(_.getOrElse(0))
private def parseIdx[F[_]: LiftParser: Comonad]: P[LambdaOp[F]] = private def parseIdx[F[_]: LiftParser: Comonad]: P[LambdaOp[F]] =
((`!`: P[Unit]) *> intP0).lift.map(IntoIndex(_)) (exclamation *> nonNegativeIntP0).lift.map(IntoIndex(_))
private def parseOp[F[_]: LiftParser: Comonad]: P[LambdaOp[F]] = private def parseOp[F[_]: LiftParser: Comonad]: P[LambdaOp[F]] =
P.oneOf(parseField.backtrack :: parseArr :: parseIdx :: Nil) P.oneOf(parseField.backtrack :: parseArr :: parseIdx :: Nil)

View File

@ -64,7 +64,7 @@ object Token {
val `.` : P[Unit] = P.char('.') val `.` : P[Unit] = P.char('.')
val `"` : P[Unit] = P.char('"') val `"` : P[Unit] = P.char('"')
val `*` : P[Unit] = P.char('*') val `*` : P[Unit] = P.char('*')
val `!` : P[Unit] = P.char('!') val exclamation : P[Unit] = P.char('!')
val `[]` : P[Unit] = P.string("[]") val `[]` : P[Unit] = P.string("[]")
val `` : P[Unit] = P.char('') val `` : P[Unit] = P.char('')
val `⊥` : P[Unit] = P.char('⊥') val `⊥` : P[Unit] = P.char('⊥')

View File

@ -2,14 +2,14 @@ package aqua.parser
import aqua.AquaSpec import aqua.AquaSpec
import aqua.parser.Ast.parser import aqua.parser.Ast.parser
import aqua.parser.expr._ import aqua.parser.expr.*
import aqua.parser.lexer.{ArrowTypeToken, BasicTypeToken, EqOp, Token} import aqua.parser.lexer.{ArrowTypeToken, BasicTypeToken, EqOp, Literal, Token, VarLambda}
import aqua.parser.lift.LiftParser.Implicits.idLiftParser import aqua.parser.lift.LiftParser.Implicits.idLiftParser
import aqua.types.ScalarType._ import aqua.types.ScalarType.*
import cats.Id import cats.Id
import cats.data.Chain import cats.data.Chain
import cats.free.Cofree import cats.free.Cofree
import cats.syntax.foldable._ import cats.syntax.foldable.*
import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers import org.scalatest.matchers.should.Matchers
@ -161,12 +161,17 @@ class FuncExprSpec extends AnyFlatSpec with Matchers with AquaSpec {
// Local service // Local service
qTree.d() shouldBe ServiceExpr(toAb("Local"), Some(toStr("local"))) qTree.d() shouldBe ServiceExpr(toAb("Local"), Some(toStr("local")))
qTree.d() shouldBe ArrowTypeExpr("gt", toArrowType(Nil, Some(scToBt(bool)))) qTree.d() shouldBe ArrowTypeExpr("gt", toArrowType(Nil, Some(scToBt(bool))))
qTree.d() shouldBe FuncExpr("tryGen", Nil, Some(scToBt(bool)), Some("v")) qTree.d() shouldBe FuncExpr("tryGen", Nil, Some(scToBt(bool)), Some("v": VarLambda[Id]))
qTree.d() shouldBe OnExpr(toStr("deeper"), List(toStr("deep"))) qTree.d() shouldBe OnExpr(toStr("deeper"), List(toStr("deep")))
qTree.d() shouldBe CallArrowExpr(Some("v"), Some(toAb("Local")), "gt", Nil) qTree.d() shouldBe CallArrowExpr(Some("v"), Some(toAb("Local")), "gt", Nil)
qTree.d() shouldBe ReturnExpr(toVar("v")) qTree.d() shouldBe ReturnExpr(toVar("v"))
// genC function // genC function
qTree.d() shouldBe FuncExpr("genC", List(toArgSc("val", string)), Some(boolSc), Some("two")) qTree.d() shouldBe FuncExpr(
"genC",
List(toArgSc("val", string)),
Some(boolSc),
Some("two": VarLambda[Id])
)
qTree.d() shouldBe CallArrowExpr(Some("one"), Some(toAb("Local")), "gt", List()) qTree.d() shouldBe CallArrowExpr(Some("one"), Some(toAb("Local")), "gt", List())
qTree.d() shouldBe OnExpr(toStr("smth"), List(toStr("else"))) qTree.d() shouldBe OnExpr(toStr("smth"), List(toStr("else")))
qTree.d() shouldBe CallArrowExpr(Some("two"), None, "tryGen", List()) qTree.d() shouldBe CallArrowExpr(Some("two"), None, "tryGen", List())

View File

@ -10,7 +10,7 @@ import org.scalatest.matchers.should.Matchers
class LambdaOpSpec extends AnyFlatSpec with Matchers with EitherValues { class LambdaOpSpec extends AnyFlatSpec with Matchers with EitherValues {
"lambda ops" should "parse" in { "lambda ops" should "parse" in {
val opsP = (s: String) => LambdaOp.ops[Id].parseAll(s).right.value val opsP = (s: String) => LambdaOp.ops[Id].parseAll(s).value
opsP(".field") should be(NonEmptyList.of(IntoField[Id]("field"))) opsP(".field") should be(NonEmptyList.of(IntoField[Id]("field")))
opsP(".field.sub") should be(NonEmptyList.of(IntoField[Id]("field"), IntoField[Id]("sub"))) opsP(".field.sub") should be(NonEmptyList.of(IntoField[Id]("field"), IntoField[Id]("sub")))

View File

@ -8,31 +8,31 @@ import org.scalatest.matchers.should.Matchers
class TokenSpec extends AnyFlatSpec with Matchers with EitherValues { class TokenSpec extends AnyFlatSpec with Matchers with EitherValues {
"\\n token" should "be parsed" in { "\\n token" should "be parsed" in {
` \n`.parseAll("\n") should be('right) ` \n`.parseAll("\n").isRight should be(true)
` \n`.parseAll(" \n") should be('right) ` \n`.parseAll(" \n").isRight should be(true)
` \n`.parseAll(" \n") should be('right) ` \n`.parseAll(" \n").isRight should be(true)
` \n`.parseAll(" \n") should be('right) ` \n`.parseAll(" \n").isRight should be(true)
` \n`.parseAll("--comment\n") should be('right) ` \n`.parseAll("--comment\n").isRight should be(true)
` \n`.parseAll(" --comment\n") should be('right) ` \n`.parseAll(" --comment\n").isRight should be(true)
` \n`.parseAll(" --comment\n") should be('right) ` \n`.parseAll(" --comment\n").isRight should be(true)
` \n`.parseAll(" --comment with many words\n") should be('right) ` \n`.parseAll(" --comment with many words\n").isRight should be(true)
` \n`.parseAll(" --comment with many words \n") should be('right) ` \n`.parseAll(" --comment with many words \n").isRight should be(true)
` \n`.parse(" --comment with many words \n").right.value should be(("", ())) ` \n`.parse(" --comment with many words \n").value should be(("", ()))
` \n`.parse(" --comment with many words \n ").right.value should be((" ", ())) ` \n`.parse(" --comment with many words \n ").value should be((" ", ()))
} }
"\\n* token" should "match the same strings" in { "\\n* token" should "match the same strings" in {
` \n+`.parseAll("\n") should be('right) ` \n+`.parseAll("\n").isRight should be(true)
` \n+`.parseAll(" \n") should be('right) ` \n+`.parseAll(" \n").isRight should be(true)
` \n+`.parseAll(" \n") should be('right) ` \n+`.parseAll(" \n").isRight should be(true)
` \n+`.parseAll(" \n") should be('right) ` \n+`.parseAll(" \n").isRight should be(true)
` \n+`.parseAll("--comment\n") should be('right) ` \n+`.parseAll("--comment\n").isRight should be(true)
` \n+`.parseAll(" --comment\n") should be('right) ` \n+`.parseAll(" --comment\n").isRight should be(true)
` \n+`.parseAll(" --comment\n") should be('right) ` \n+`.parseAll(" --comment\n").isRight should be(true)
` \n+`.parseAll(" --comment with many words\n") should be('right) ` \n+`.parseAll(" --comment with many words\n").isRight should be(true)
` \n+`.parseAll(" --comment with many words \n") should be('right) ` \n+`.parseAll(" --comment with many words \n").isRight should be(true)
` \n+`.parse(" --comment with many words \n").right.value should be(("", ())) ` \n+`.parse(" --comment with many words \n").value should be(("", ()))
` \n+`.parse(" --comment with many words \n ").right.value should be((" ", ())) ` \n+`.parse(" --comment with many words \n ").value should be((" ", ()))
} }
"\\n* token" should "match multi-line comments" in { "\\n* token" should "match multi-line comments" in {
@ -41,7 +41,7 @@ class TokenSpec extends AnyFlatSpec with Matchers with EitherValues {
| |
| -- line 3 | -- line 3
| -- line 4 | -- line 4
|""".stripMargin).right.value should be(()) |""".stripMargin).value should be(())
} }
} }

View File

@ -15,37 +15,37 @@ class TypeTokenSpec extends AnyFlatSpec with Matchers with EitherValues {
implicit def strToBt(st: ScalarType): BasicTypeToken[Id] = BasicTypeToken[Id](st) implicit def strToBt(st: ScalarType): BasicTypeToken[Id] = BasicTypeToken[Id](st)
"Basic type" should "parse" in { "Basic type" should "parse" in {
BasicTypeToken.`basictypedef`.parseAll("u32").right.value should be(u32: BasicTypeToken[Id]) BasicTypeToken.`basictypedef`.parseAll("u32").value should be(u32: BasicTypeToken[Id])
BasicTypeToken.`basictypedef`.parseAll("()") should be('left) BasicTypeToken.`basictypedef`.parseAll("()").isLeft should be(true)
} }
"Arrow type" should "parse" in { "Arrow type" should "parse" in {
ArrowTypeToken.`arrowdef`.parseAll("-> B").right.value should be( ArrowTypeToken.`arrowdef`.parseAll("-> B").value should be(
ArrowTypeToken[Id]((), Nil, Some(CustomTypeToken[Id]("B"))) ArrowTypeToken[Id]((), Nil, Some(CustomTypeToken[Id]("B")))
) )
ArrowTypeToken.`arrowdef`.parseAll("A -> B").right.value should be( ArrowTypeToken.`arrowdef`.parseAll("A -> B").value should be(
ArrowTypeToken[Id]((), CustomTypeToken[Id]("A") :: Nil, Some(CustomTypeToken[Id]("B"))) ArrowTypeToken[Id]((), CustomTypeToken[Id]("A") :: Nil, Some(CustomTypeToken[Id]("B")))
) )
ArrowTypeToken.`arrowWithNames`.parseAll("(a: A) -> B").right.value should be( ArrowTypeToken.`arrowWithNames`.parseAll("(a: A) -> B").value should be(
ArrowTypeToken[Id]((), CustomTypeToken[Id]("A") :: Nil, Some(CustomTypeToken[Id]("B"))) ArrowTypeToken[Id]((), CustomTypeToken[Id]("A") :: Nil, Some(CustomTypeToken[Id]("B")))
) )
ArrowTypeToken.`arrowdef`.parseAll("u32 -> Boo").right.value should be( ArrowTypeToken.`arrowdef`.parseAll("u32 -> Boo").value should be(
ArrowTypeToken[Id]((), (u32: BasicTypeToken[Id]) :: Nil, Some(CustomTypeToken[Id]("Boo"))) ArrowTypeToken[Id]((), (u32: BasicTypeToken[Id]) :: Nil, Some(CustomTypeToken[Id]("Boo")))
) )
TypeToken.`typedef`.parseAll("u32 -> ()").right.value should be( TypeToken.`typedef`.parseAll("u32 -> ()").value should be(
ArrowTypeToken[Id]((), (u32: BasicTypeToken[Id]) :: Nil, None) ArrowTypeToken[Id]((), (u32: BasicTypeToken[Id]) :: Nil, None)
) )
ArrowTypeToken.`arrowdef`.parseAll("A, u32 -> B").right.value should be( ArrowTypeToken.`arrowdef`.parseAll("A, u32 -> B").value should be(
ArrowTypeToken[Id]( ArrowTypeToken[Id](
(), (),
CustomTypeToken[Id]("A") :: (u32: BasicTypeToken[Id]) :: Nil, CustomTypeToken[Id]("A") :: (u32: BasicTypeToken[Id]) :: Nil,
Some(CustomTypeToken[Id]("B")) Some(CustomTypeToken[Id]("B"))
) )
) )
ArrowTypeToken.`arrowdef`.parseAll("[]Absolutely, u32 -> B").right.value should be( ArrowTypeToken.`arrowdef`.parseAll("[]Absolutely, u32 -> B").value should be(
ArrowTypeToken[Id]( ArrowTypeToken[Id](
(), (),
ArrayTypeToken[Id]((), CustomTypeToken[Id]("Absolutely")) :: (u32: BasicTypeToken[ ArrayTypeToken[Id]((), CustomTypeToken[Id]("Absolutely")) :: (u32: BasicTypeToken[

View File

@ -10,36 +10,36 @@ import cats.Id
class ValueSpec extends AnyFlatSpec with Matchers with EitherValues { class ValueSpec extends AnyFlatSpec with Matchers with EitherValues {
"var getter" should "parse" in { "var getter" should "parse" in {
Value.`value`.parseAll("varname").right.value should be(VarLambda(Name[Id]("varname"), Nil)) Value.`value`.parseAll("varname").value should be(VarLambda(Name[Id]("varname"), Nil))
Value.`value`.parseAll("varname.field").right.value should be( Value.`value`.parseAll("varname.field").value should be(
VarLambda(Name[Id]("varname"), IntoField[Id]("field") :: Nil) VarLambda(Name[Id]("varname"), IntoField[Id]("field") :: Nil)
) )
Value.`value`.parseAll("varname.field.sub").right.value should be( Value.`value`.parseAll("varname.field.sub").value should be(
VarLambda(Name[Id]("varname"), IntoField[Id]("field") :: IntoField[Id]("sub") :: Nil) VarLambda(Name[Id]("varname"), IntoField[Id]("field") :: IntoField[Id]("sub") :: Nil)
) )
} }
"literals" should "parse" in { "literals" should "parse" in {
Value.`value`.parseAll("true").right.value should be(Literal[Id]("true", LiteralType.bool)) Value.`value`.parseAll("true").value should be(Literal[Id]("true", LiteralType.bool))
Value.`value`.parseAll("false").right.value should be(Literal[Id]("false", LiteralType.bool)) Value.`value`.parseAll("false").value should be(Literal[Id]("false", LiteralType.bool))
Value.`value`.parseAll("1").right.value should be(Literal[Id]("1", LiteralType.number)) Value.`value`.parseAll("1").value should be(Literal[Id]("1", LiteralType.number))
Value.`value`.parseAll("1111").right.value should be(Literal[Id]("1111", LiteralType.number)) Value.`value`.parseAll("1111").value should be(Literal[Id]("1111", LiteralType.number))
Value.`value`.parseAll("-1543").right.value should be(Literal[Id]("-1543", LiteralType.signed)) Value.`value`.parseAll("-1543").value should be(Literal[Id]("-1543", LiteralType.signed))
Value.`value`.parseAll("1.0").right.value should be(Literal[Id]("1.0", LiteralType.float)) Value.`value`.parseAll("1.0").value should be(Literal[Id]("1.0", LiteralType.float))
Value.`value`.parseAll("1.23").right.value should be(Literal[Id]("1.23", LiteralType.float)) Value.`value`.parseAll("1.23").value should be(Literal[Id]("1.23", LiteralType.float))
Value.`value`.parseAll("-1.23").right.value should be(Literal[Id]("-1.23", LiteralType.float)) Value.`value`.parseAll("-1.23").value should be(Literal[Id]("-1.23", LiteralType.float))
Value.`value`.parseAll("\"some crazy string\"").right.value should be( Value.`value`.parseAll("\"some crazy string\"").value should be(
Literal[Id]("\"some crazy string\"", LiteralType.string) Literal[Id]("\"some crazy string\"", LiteralType.string)
) )
// This does not work :( // This does not work :(
// Value.`value`.parseAll("\"some crazy string with escaped \\\" quote\"").right.value should be( // Value.`value`.parseAll("\"some crazy string with escaped \\\" quote\"").value should be(
// Literal("\"some crazy string with escaped \\\" quote\"", BasicType.string) // Literal("\"some crazy string with escaped \\\" quote\"", BasicType.string)
// ) // )
Value.`value`.parse("\"just string\" ").right.value should be( Value.`value`.parse("\"just string\" ").value should be(
(" ", Literal[Id]("\"just string\"", LiteralType.string)) (" ", Literal[Id]("\"just string\"", LiteralType.string))
) )
} }

View File

@ -1 +1 @@
sbt.version=1.5.2 sbt.version=1.5.5

View File

@ -57,7 +57,7 @@ object Semantics extends LogSupport {
ast.cata(folder[F, Alg[F, *]]).value ast.cata(folder[F, Alg[F, *]]).value
def interpret[F[_]](free: Free[Alg[F, *], Model]): State[CompilerState[F], Model] = { def interpret[F[_]](free: Free[Alg[F, *], Model]): State[CompilerState[F], Model] = {
import monocle.macros.syntax.all._ import monocle.syntax.all._
implicit val re: ReportError[F, CompilerState[F]] = implicit val re: ReportError[F, CompilerState[F]] =
(st: CompilerState[F], token: Token[F], hint: String) => (st: CompilerState[F], token: Token[F], hint: String) =>

View File

@ -51,7 +51,7 @@ object NamesState {
def init[F[_]](context: AquaContext): NamesState[F] = def init[F[_]](context: AquaContext): NamesState[F] =
NamesState( NamesState(
rootArrows = context.allFuncs().map(_.map(_.arrowType)), rootArrows = context.allFuncs().map { case (s, fc) => (s, fc.arrowType) },
constants = context.allValues().map(_.map(_.lastType)) constants = context.allValues().map { case (s, vm) => (s, vm.lastType) }
) )
} }

View File

@ -20,10 +20,10 @@ case class DefineAlias[F[_]](name: CustomTypeToken[F], target: Type) extends Typ
case class ResolveLambda[F[_]](root: Type, ops: List[LambdaOp[F]]) case class ResolveLambda[F[_]](root: Type, ops: List[LambdaOp[F]])
extends TypeOp[F, List[LambdaModel]] extends TypeOp[F, List[LambdaModel]]
case class EnsureTypeMatches[F[_]](token: Token[F], expected: Type, given: Type) case class EnsureTypeMatches[F[_]](token: Token[F], expected: Type, givenType: Type)
extends TypeOp[F, Boolean] extends TypeOp[F, Boolean]
case class ExpectNoExport[F[_]](token: Token[F]) extends TypeOp[F, Unit] case class ExpectNoExport[F[_]](token: Token[F]) extends TypeOp[F, Unit]
case class CheckArgumentsNum[F[_]](token: Token[F], expected: Int, given: Int) case class CheckArgumentsNum[F[_]](token: Token[F], expected: Int, givenNum: Int)
extends TypeOp[F, Boolean] extends TypeOp[F, Boolean]

View File

@ -33,14 +33,14 @@ class TypesAlgebra[F[_], Alg[_]](implicit T: InjectK[TypeOp[F, *], Alg]) {
def resolveLambda(root: Type, ops: List[LambdaOp[F]]): Free[Alg, List[LambdaModel]] = def resolveLambda(root: Type, ops: List[LambdaOp[F]]): Free[Alg, List[LambdaModel]] =
Free.liftInject[Alg](ResolveLambda(root, ops)) Free.liftInject[Alg](ResolveLambda(root, ops))
def ensureTypeMatches(token: Token[F], expected: Type, given: Type): Free[Alg, Boolean] = def ensureTypeMatches(token: Token[F], expected: Type, givenType: Type): Free[Alg, Boolean] =
Free.liftInject[Alg](EnsureTypeMatches[F](token, expected, given)) Free.liftInject[Alg](EnsureTypeMatches[F](token, expected, givenType))
def expectNoExport(token: Token[F]): Free[Alg, Unit] = def expectNoExport(token: Token[F]): Free[Alg, Unit] =
Free.liftInject[Alg](ExpectNoExport[F](token)) Free.liftInject[Alg](ExpectNoExport[F](token))
def checkArgumentsNumber(token: Token[F], expected: Int, given: Int): Free[Alg, Boolean] = def checkArgumentsNumber(token: Token[F], expected: Int, givenNum: Int): Free[Alg, Boolean] =
Free.liftInject[Alg](CheckArgumentsNum(token, expected, given)) Free.liftInject[Alg](CheckArgumentsNum(token, expected, givenNum))
} }
object TypesAlgebra { object TypesAlgebra {

View File

@ -99,9 +99,9 @@ class TypesInterpreter[F[_], X](implicit lens: Lens[X, TypesState[F]], error: Re
case etm: EnsureTypeMatches[F] => case etm: EnsureTypeMatches[F] =>
// TODO in case of two literals, check for types intersection? // TODO in case of two literals, check for types intersection?
if (etm.expected.acceptsValueOf(etm.`given`)) State.pure(true) if (etm.expected.acceptsValueOf(etm.givenType)) State.pure(true)
else else
report(etm.token, s"Types mismatch, expected: ${etm.expected}, given: ${etm.`given`}") report(etm.token, s"Types mismatch, expected: ${etm.expected}, given: ${etm.givenType}")
.as(false) .as(false)
case ene: ExpectNoExport[F] => case ene: ExpectNoExport[F] =>
@ -111,11 +111,11 @@ class TypesInterpreter[F[_], X](implicit lens: Lens[X, TypesState[F]], error: Re
).as(()) ).as(())
case ca: CheckArgumentsNum[F] => case ca: CheckArgumentsNum[F] =>
if (ca.expected == ca.given) State.pure(true) if (ca.expected == ca.givenNum) State.pure(true)
else else
report( report(
ca.token, ca.token,
s"Number of arguments doesn't match the function type, expected: ${ca.expected}, given: ${ca.`given`}" s"Number of arguments doesn't match the function type, expected: ${ca.expected}, given: ${ca.givenNum}"
).as(false) ).as(false)
} }
} }