Migration to Scala 3 (#228)

This commit is contained in:
Dima 2021-08-06 13:33:58 +03:00 committed by GitHub
parent ee67d038ad
commit 4ccac9bf0e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
31 changed files with 198 additions and 181 deletions

View File

@ -1,6 +1,4 @@
val dottyVersion = "2.13.5"
//val dottyVersion = "3.0.0"
val dottyVersion = "3.0.1"
scalaVersion := dottyVersion
@ -8,16 +6,14 @@ val baseAquaVersion = settingKey[String]("base aqua version")
val catsV = "2.6.1"
val catsParseV = "0.3.4"
val monocleV = "3.0.0-M5"
val monocleV = "3.0.0-M6"
val scalaTestV = "3.2.9"
val fs2V = "3.0.4"
val catsEffectV = "3.1.1"
val fs2V = "3.0.6"
val catsEffectV = "3.2.1"
val airframeLogV = "21.5.4"
val log4catsV = "2.1.1"
val enumeratumV = "1.6.1" // Scala3 issue: https://github.com/lloydmeta/enumeratum/issues/300
val slf4jV = "1.7.30"
val declineV = "2.0.0-RC1" // Scala3 issue: https://github.com/bkirwi/decline/issues/260
val declineEnumV = "1.3.0"
val declineV = "2.1.0"
val airframeLog = "org.wvlet.airframe" %% "airframe-log" % airframeLogV
val catsEffect = "org.typelevel" %% "cats-effect" % catsEffectV
@ -36,7 +32,17 @@ val commons = Seq(
airframeLog,
"org.scalatest" %% "scalatest" % scalaTestV % Test
),
addCompilerPlugin("org.typelevel" %% "kind-projector" % "0.11.3" cross CrossVersion.full)
scalacOptions ++= {
Seq(
"-encoding",
"UTF-8",
"-feature",
"-language:implicitConversions",
"-unchecked",
"-Ykind-projector"
// "-Xfatal-warnings"
)
}
)
commons
@ -52,10 +58,8 @@ lazy val cli = project
"com.monovore" %% "decline-effect" % declineV,
catsEffect,
fs2Io,
"org.typelevel" %% "log4cats-slf4j" % log4catsV,
"com.beachape" %% "enumeratum" % enumeratumV,
"org.slf4j" % "slf4j-jdk14" % slf4jV,
"com.monovore" %% "decline-enumeratum" % declineEnumV
"org.typelevel" %% "log4cats-slf4j" % log4catsV,
"org.slf4j" % "slf4j-jdk14" % slf4jV
)
)
.dependsOn(compiler, `backend-air`, `backend-ts`, `backend-js`)

View File

@ -8,12 +8,12 @@ import cats.data.Validated.{Invalid, Valid}
import cats.data.{NonEmptyList, Validated, ValidatedNel}
import cats.effect.ExitCode
import cats.effect.std.Console
import cats.syntax.functor._
import cats.syntax.traverse._
import cats.syntax.functor.*
import cats.syntax.traverse.*
import cats.{Comonad, Functor}
import com.monovore.decline.Opts.help
import com.monovore.decline.enumeratum._
import com.monovore.decline.{Opts, Visibility}
import wvlet.log.{LogLevel => WLogLevel}
import java.nio.file.Path
@ -25,8 +25,22 @@ object AppOps {
val versionOpt: Opts[Unit] =
Opts.flag("version", help = "Show version", "v", Visibility.Partial)
val logLevelOpt: Opts[LogLevel] =
Opts.option[LogLevel]("log-level", help = "Set log level").withDefault(LogLevel.Info)
val logLevelOpt: Opts[WLogLevel] =
Opts.option[String]("log-level", help = "Set log level").withDefault("info").mapValidated {
str =>
Validated.fromEither(toLogLevel(str))
}
def toLogLevel(logLevel: String): Either[NonEmptyList[String], WLogLevel] = {
LogLevel.stringToLogLevel
.get(logLevel.toLowerCase)
.toRight(
NonEmptyList(
"log-level could be only 'all', 'trace', 'debug', 'info', 'warn', 'error', 'off'",
Nil
)
)
}
def checkPath: Path => ValidatedNel[String, Path] = { p =>
Validated

View File

@ -59,7 +59,7 @@ object AquaCli extends IOApp with LogSupport {
constantOpts[Id]
).mapN {
case (input, imports, output, toAir, toJs, noRelay, noXor, h, v, logLevel, constants) =>
WLogger.setDefaultLogLevel(LogLevel.toLogLevel(logLevel))
WLogger.setDefaultLogLevel(logLevel)
WLogger.setDefaultFormatter(CustomLogFormatter)
implicit val aio: AquaIO[F] = new AquaFilesIO[F]

View File

@ -1,30 +1,16 @@
package aqua
import enumeratum._
import wvlet.log.{LogLevel => WLogLevel}
sealed trait LogLevel extends EnumEntry with EnumEntry.Lowercase
object LogLevel {
object LogLevel extends Enum[LogLevel] {
case object Debug extends LogLevel
case object Trace extends LogLevel
case object Info extends LogLevel
case object Off extends LogLevel
case object Warn extends LogLevel
case object Error extends LogLevel
case object All extends LogLevel
val values = findValues
def toLogLevel(logLevel: LogLevel): WLogLevel = {
logLevel match {
case LogLevel.Debug => WLogLevel.DEBUG
case LogLevel.Trace => WLogLevel.TRACE
case LogLevel.Info => WLogLevel.INFO
case LogLevel.Off => WLogLevel.OFF
case LogLevel.Warn => WLogLevel.WARN
case LogLevel.Error => WLogLevel.ERROR
case LogLevel.All => WLogLevel.ALL
}
}
val stringToLogLevel: Map[String, WLogLevel] = Map(
("debug" -> WLogLevel.DEBUG),
("trace" -> WLogLevel.TRACE),
("info" -> WLogLevel.INFO),
("off" -> WLogLevel.OFF),
("warn" -> WLogLevel.WARN),
("error" -> WLogLevel.ERROR),
("all" -> WLogLevel.ALL)
)
}

View File

@ -28,9 +28,9 @@ object Test extends IOApp.Simple {
)
.map {
case Validated.Invalid(errs) =>
errs.map(System.err.println)
errs.map(System.err.println): Unit
case Validated.Valid(res) =>
res.map(println)
res.map(println): Unit
}
}

View File

@ -49,7 +49,7 @@ class AquaFileSources[F[_]: AquaIO: Monad](sourcesPath: Path, importFrom: List[P
from: FileModuleId,
imp: String
): F[ValidatedNec[AquaFileError, FileModuleId]] = {
Validated.fromEither(Try(Paths.get(imp)).toEither.leftMap(FileSystemError)) match {
Validated.fromEither(Try(Paths.get(imp)).toEither.leftMap(FileSystemError.apply)) match {
case Validated.Valid(importP) =>
filesIO
.resolve(importP, from.file.getParent +: importFrom)
@ -106,7 +106,7 @@ class AquaFileSources[F[_]: AquaIO: Monad](sourcesPath: Path, importFrom: List[P
ac.sourceId.file,
targetPath,
compiled.suffix
).leftMap(FileSystemError)
).leftMap(FileSystemError.apply)
.map { target =>
filesIO
.writeFile(

View File

@ -33,7 +33,9 @@ class AquaFilesIO[F[_]: Files: Concurrent] extends AquaIO[F] {
.compile
.last
.map(
_.fold((EmptyFileError(file): AquaFileError).asLeft[String])(_.left.map(FileSystemError))
_.fold((EmptyFileError(file): AquaFileError).asLeft[String])(
_.left.map(FileSystemError.apply)
)
)
)
@ -49,13 +51,13 @@ class AquaFilesIO[F[_]: Files: Concurrent] extends AquaIO[F] {
EitherT(
Concurrent[F].attempt(p.toFile.isFile.pure[F])
)
.leftMap[AquaFileError](FileSystemError)
.leftMap[AquaFileError](FileSystemError.apply)
.recover({ case _ => false })
.flatMap {
case true =>
EitherT(
Concurrent[F].attempt(p.toAbsolutePath.normalize().pure[F])
).leftMap[AquaFileError](FileSystemError)
).leftMap[AquaFileError](FileSystemError.apply)
case false =>
findFirstF(in.tail, notFound)
}
@ -86,7 +88,7 @@ class AquaFilesIO[F[_]: Files: Concurrent] extends AquaIO[F] {
} else {
Right(f :: Nil)
}
}.toEither.leftMap[AquaFileError](FileSystemError).flatMap(identity)
}.toEither.leftMap[AquaFileError](FileSystemError.apply).flatMap(identity)
)
.leftMap(NonEmptyChain.one)
.pure[F]
@ -98,7 +100,7 @@ class AquaFilesIO[F[_]: Files: Concurrent] extends AquaIO[F] {
.fromTry(
Try(Chain.one(f.toPath.toAbsolutePath.normalize()))
)
.leftMap(FileSystemError)
.leftMap(FileSystemError.apply)
.leftMap(NonEmptyChain.one)
.pure[F]
case f if f.isDirectory =>
@ -112,10 +114,10 @@ class AquaFilesIO[F[_]: Files: Concurrent] extends AquaIO[F] {
}
private def deleteIfExists(file: Path): EitherT[F, AquaFileError, Boolean] =
Files[F].deleteIfExists(file).attemptT.leftMap(FileSystemError)
Files[F].deleteIfExists(file).attemptT.leftMap(FileSystemError.apply)
private def createDirectories(path: Path): EitherT[F, AquaFileError, Path] =
Files[F].createDirectories(path).attemptT.leftMap(FileSystemError)
Files[F].createDirectories(path).attemptT.leftMap(FileSystemError.apply)
// Writes to a file, creates directories if they do not exist
override def writeFile(file: Path, content: String): EitherT[F, AquaFileError, Unit] =
@ -128,7 +130,7 @@ class AquaFilesIO[F[_]: Files: Concurrent] extends AquaIO[F] {
.attempt
.compile
.last
.map(_.getOrElse(Right()))
.map(_.getOrElse(Right(())))
)
.leftMap(FileWriteError(file, _))

View File

@ -63,7 +63,7 @@ class AquaParser[F[_]: Monad, E, I, S[_]: Comonad](
)
case Validated.Invalid(errs) =>
Validated.invalid[NonEmptyChain[Err], Chain[AquaModule[I, Err, Body]]](errs).pure[F]
}.map(_.map(_.foldLeft(Modules[I, Err, Body]())(_.add(_, export = true))))
}.map(_.map(_.foldLeft(Modules[I, Err, Body]())(_.add(_, toExport = true))))
def loadModule(imp: I): F[ValidatedNec[Err, AquaModule[I, Err, Ast[S]]]] =
sources

View File

@ -30,7 +30,7 @@ object Linker extends LogSupport {
else {
val folded = canHandle.foldLeft(proc) { case (acc, m) =>
val importKeys = m.dependsOn.keySet
debug(m.id + " dependsOn " + importKeys)
debug(s"${m.id} dependsOn $importKeys")
val deps: T => T =
importKeys.map(acc).foldLeft[T => T](identity) { case (fAcc, f) =>
debug("COMBINING ONE TIME ")

View File

@ -9,7 +9,7 @@ case class Modules[I, E, T](
exports: Set[I] = Set.empty[I]
) {
def add(aquaModule: AquaModule[I, E, T], export: Boolean = false): Modules[I, E, T] =
def add(aquaModule: AquaModule[I, E, T], toExport: Boolean = false): Modules[I, E, T] =
if (loaded.contains(aquaModule.id)) this
else
copy(
@ -20,7 +20,7 @@ case class Modules[I, E, T](
case (deps, (moduleId, err)) =>
deps.updatedWith(moduleId)(_.fold(NonEmptyChain.one(err))(_.append(err)).some)
},
exports = if (export) exports + aquaModule.id else exports
exports = if (toExport) exports + aquaModule.id else exports
)
def isResolved: Boolean = dependsOn.isEmpty

View File

@ -18,7 +18,7 @@ class LinkerSpec extends AnyFlatSpec with Matchers {
Map("mod2" -> "unresolved mod2 in mod1"),
_ ++ " | mod1"
),
export = true
toExport = true
)
withMod1.isResolved should be(false)

View File

@ -5,9 +5,9 @@ import aqua.model.func.{ArgsCall, FuncCallable, FuncModel}
import aqua.types.{ProductType, Type}
import cats.Monoid
import cats.data.NonEmptyMap
import cats.syntax.apply._
import cats.syntax.functor._
import cats.syntax.monoid._
import cats.syntax.apply.*
import cats.syntax.functor.*
import cats.syntax.monoid.*
import wvlet.log.LogSupport
import scala.collection.immutable.SortedMap
@ -21,33 +21,36 @@ case class AquaContext(
services: Map[String, ServiceModel]
) {
private def prefixFirst[T](prefix: String, pair: (String, T)): (String, T) =
(prefix + pair._1, pair._2)
def allTypes(prefix: String = ""): Map[String, Type] =
abilities
.foldLeft(types) { case (ts, (k, v)) =>
ts ++ v.allTypes(k + ".")
}
.map(_.swap.map(prefix + _).swap)
.map(prefixFirst(prefix, _))
def allFuncs(prefix: String = ""): Map[String, FuncCallable] =
abilities
.foldLeft(funcs) { case (ts, (k, v)) =>
ts ++ v.allFuncs(k + ".")
}
.map(_.swap.map(prefix + _).swap)
.map(prefixFirst(prefix, _))
def allValues(prefix: String = ""): Map[String, ValueModel] =
abilities
.foldLeft(values) { case (ts, (k, v)) =>
ts ++ v.allValues(k + ".")
}
.map(_.swap.map(prefix + _).swap)
.map(prefixFirst(prefix, _))
def allServices(prefix: String = ""): Map[String, ServiceModel] =
abilities
.foldLeft(services) { case (ts, (k, v)) =>
ts ++ v.allServices(k + ".")
}
.map(_.swap.map(prefix + _).swap)
.map(prefixFirst(prefix, _))
def `type`(name: String): Option[ProductType] =
NonEmptyMap
@ -118,7 +121,7 @@ object AquaContext extends LogSupport {
): AquaContext =
sm.models
.foldLeft((init, Monoid.empty[AquaContext])) {
case ((ctx, export), c: ConstantModel) =>
case ((ctx, exportContext), c: ConstantModel) =>
val add =
Monoid
.empty[AquaContext]
@ -126,17 +129,17 @@ object AquaContext extends LogSupport {
if (c.allowOverrides && ctx.values.contains(c.name)) ctx.values
else ctx.values.updated(c.name, c.value.resolveWith(ctx.values))
)
(ctx |+| add, export |+| add)
case ((ctx, export), func: FuncModel) =>
(ctx |+| add, exportContext |+| add)
case ((ctx, exportContext), func: FuncModel) =>
val fr = func.capture(ctx.funcs, ctx.values)
val add =
Monoid.empty[AquaContext].copy(funcs = ctx.funcs.updated(func.name, fr))
(ctx |+| add, export |+| add)
case ((ctx, export), t: TypeModel) =>
(ctx |+| add, exportContext |+| add)
case ((ctx, exportContext), t: TypeModel) =>
val add =
Monoid.empty[AquaContext].copy(types = ctx.types.updated(t.name, t.`type`))
(ctx |+| add, export |+| add)
case ((ctx, export), m: ServiceModel) =>
(ctx |+| add, exportContext |+| add)
case ((ctx, exportContext), m: ServiceModel) =>
val add =
Monoid
.empty[AquaContext]
@ -146,7 +149,7 @@ object AquaContext extends LogSupport {
),
services = ctx.services.updated(m.name, m)
)
(ctx |+| add, export |+| add)
(ctx |+| add, exportContext |+| add)
case (ce, _) => ce
}
._2

View File

@ -2,7 +2,7 @@ package aqua.model.func
import aqua.model.{ValueModel, VarModel}
import aqua.types.{ArrowType, DataType}
import cats.syntax.functor._
import cats.syntax.functor.*
/**
* Wraps argument definitions of a function, along with values provided when this function is called
@ -33,7 +33,7 @@ object ArgsCall {
argPrefix: String = "arg",
retName: String = "init_call_res"
): (ArgsDef, Call, Option[Call.Export]) = {
val argNamesTypes = arrow.args.zipWithIndex.map(iv => iv.map(i => argPrefix + i).swap)
val argNamesTypes = arrow.args.zipWithIndex.map { case (t, i) => (argPrefix + i, t) }
val argsDef = ArgsDef(argNamesTypes.map {
case (a, t: DataType) => ArgDef.Data(a, t)

View File

@ -6,8 +6,9 @@ import cats.Eval
import cats.data.Chain
import cats.free.Cofree
import cats.kernel.Semigroup
import cats.syntax.apply._
import cats.syntax.functor._
import cats.syntax.apply.*
import cats.syntax.functor.*
import cats.instances.tuple.*
case class FuncOp(tree: Cofree[Chain, RawTag]) extends Model {
def head: RawTag = tree.head
@ -22,19 +23,19 @@ case class FuncOp(tree: Cofree[Chain, RawTag]) extends Model {
Cofree.cata(tree)(folder)
def definesVarNames: Eval[Set[String]] = cata[Set[String]] {
case (CallArrowTag(_, Call(_, Some(export))), acc) =>
Eval.later(acc.foldLeft(Set(export.name))(_ ++ _))
case (CallServiceTag(_, _, Call(_, Some(export))), acc) =>
Eval.later(acc.foldLeft(Set(export.name))(_ ++ _))
case (NextTag(export), acc) => Eval.later(acc.foldLeft(Set(export))(_ ++ _))
case (CallArrowTag(_, Call(_, Some(exportTo))), acc) =>
Eval.later(acc.foldLeft(Set(exportTo.name))(_ ++ _))
case (CallServiceTag(_, _, Call(_, Some(exportTo))), acc) =>
Eval.later(acc.foldLeft(Set(exportTo.name))(_ ++ _))
case (NextTag(exportTo), acc) => Eval.later(acc.foldLeft(Set(exportTo))(_ ++ _))
case (_, acc) => Eval.later(acc.foldLeft(Set.empty[String])(_ ++ _))
}
def exportsVarNames: Eval[Set[String]] = cata[Set[String]] {
case (CallArrowTag(_, Call(_, Some(export))), acc) =>
Eval.later(acc.foldLeft(Set(export.name))(_ ++ _))
case (CallServiceTag(_, _, Call(_, Some(export))), acc) =>
Eval.later(acc.foldLeft(Set(export.name))(_ ++ _))
case (CallArrowTag(_, Call(_, Some(exportTo))), acc) =>
Eval.later(acc.foldLeft(Set(exportTo.name))(_ ++ _))
case (CallServiceTag(_, _, Call(_, Some(exportTo))), acc) =>
Eval.later(acc.foldLeft(Set(exportTo.name))(_ ++ _))
case (_, acc) => Eval.later(acc.foldLeft(Set.empty[String])(_ ++ _))
}
@ -106,9 +107,9 @@ object FuncOp {
cf.tail
.map(_.foldLeft[(A, Chain[Tree])]((headA, head.tailForced)) {
case ((aggrA, aggrTail), child) =>
traverseA(child, aggrA)(f).value.map(aggrTail.append)
traverseA(child, aggrA)(f).value match { case (a, tree) => (a, aggrTail.append(tree)) }
})
.map(_.map(ch => head.copy(tail = Eval.now(ch))))
.map { case (a, ch) => (a, head.copy(tail = Eval.now(ch))) }
}
// Semigroup for foldRight processing

View File

@ -10,7 +10,7 @@ import wvlet.log.LogSupport
// Can be heavily optimized by caching parent cursors, not just list of zippers
case class RawCursor(tree: NonEmptyList[ChainZipper[FuncOp.Tree]])
extends ChainCursor[RawCursor, FuncOp.Tree](RawCursor) with LogSupport {
extends ChainCursor[RawCursor, FuncOp.Tree](RawCursor.apply) with LogSupport {
def tag: RawTag = current.head
def parentTag: Option[RawTag] = parent.map(_.head)

View File

@ -90,7 +90,7 @@ class TopologySpec extends AnyFlatSpec with Matchers {
}
"topology resolver" should "build return path in par if there are exported variables" in {
val export = Some(Call.Export("result", ScalarType.string))
val exportTo = Some(Call.Export("result", ScalarType.string))
val result = VarModel("result", ScalarType.string)
val init = on(
@ -101,7 +101,7 @@ class TopologySpec extends AnyFlatSpec with Matchers {
on(
otherPeer,
otherRelay :: Nil,
callTag(1, export)
callTag(1, exportTo)
),
callTag(2)
),
@ -117,7 +117,7 @@ class TopologySpec extends AnyFlatSpec with Matchers {
MakeRes.seq(
through(relay),
through(otherRelay),
callRes(1, otherPeer, export),
callRes(1, otherPeer, exportTo),
through(otherRelay),
through(relay),
// we should return to a caller to continue execution

View File

@ -6,7 +6,7 @@ import aqua.parser.lift.LiftParser
import aqua.parser.lift.LiftParser._
import cats.data.{Chain, Validated, ValidatedNec}
import cats.free.Cofree
import cats.parse.{Parser0 => P0}
import cats.parse.Parser0 as P0
import cats.{Comonad, Eval}
case class Ast[F[_]](head: Ast.Head[F], tree: Ast.Tree[F]) {

View File

@ -1,16 +1,16 @@
package aqua.parser
import aqua.parser.Ast.Tree
import aqua.parser.lexer.Token._
import aqua.parser.lexer.Token
import aqua.parser.lexer.Token.*
import aqua.parser.lift.LiftParser
import aqua.parser.lift.LiftParser._
import aqua.parser.lift.LiftParser.*
import cats.data.Chain.:==
import cats.data.{Chain, NonEmptyChain, Validated, ValidatedNec}
import cats.free.Cofree
import cats.parse.{Parser => P}
import cats.syntax.comonad._
import cats.parse.Parser as P
import cats.syntax.comonad.*
import cats.{Comonad, Eval}
import Chain.:==
import aqua.parser.lexer.Token
abstract class Expr[F[_]](val companion: Expr.Companion, val token: Token[F]) {
@ -102,9 +102,9 @@ object Expr {
case class Acc[F[_]](
block: Option[(F[String], Tree[F])] = None,
window: Chain[(F[String], Tree[F])] = Chain.empty,
currentChildren: Chain[Ast.Tree[F]] = Chain.empty,
error: Chain[ParserError[F]] = Chain.empty
window: Chain[(F[String], Tree[F])] = Chain.empty[(F[String], Tree[F])],
currentChildren: Chain[Ast.Tree[F]] = Chain.empty[Ast.Tree[F]],
error: Chain[ParserError[F]] = Chain.empty[ParserError[F]]
)
// converts list of expressions to a tree
@ -119,7 +119,7 @@ object Expr {
val initialIndent = lHead._1.extract.length
// recursively creating a tree
// moving a window on a list depending on the nesting of the code
val acc = exprs.foldLeft(
val acc = exprs.foldLeft[Acc[F]](
Acc[F]()
) {
case (acc, (indent, currentExpr)) if acc.error.isEmpty =>

View File

@ -1,13 +1,14 @@
package aqua.parser.lexer
import aqua.parser.lexer.Token._
import aqua.parser.lexer.Token.*
import aqua.parser.lift.LiftParser
import aqua.parser.lift.LiftParser._
import aqua.parser.lift.LiftParser.*
import cats.data.NonEmptyList
import cats.parse.{Numbers, Parser => P, Parser0 => P0}
import cats.syntax.comonad._
import cats.syntax.functor._
import cats.parse.{Numbers, Parser as P, Parser0 as P0}
import cats.syntax.comonad.*
import cats.syntax.functor.*
import cats.{Comonad, Functor}
import scala.language.postfixOps
sealed trait LambdaOp[F[_]] extends Token[F]
@ -34,10 +35,11 @@ object LambdaOp {
private def parseArr[F[_]: LiftParser: Comonad]: P[LambdaOp[F]] = `*`.lift.map(IntoArray(_))
private val intP0: P0[Int] = Numbers.nonNegativeIntString.map(_.toInt).?.map(_.getOrElse(0))
private val nonNegativeIntP0: P0[Int] =
Numbers.nonNegativeIntString.map(_.toInt).?.map(_.getOrElse(0))
private def parseIdx[F[_]: LiftParser: Comonad]: P[LambdaOp[F]] =
((`!`: P[Unit]) *> intP0).lift.map(IntoIndex(_))
(exclamation *> nonNegativeIntP0).lift.map(IntoIndex(_))
private def parseOp[F[_]: LiftParser: Comonad]: P[LambdaOp[F]] =
P.oneOf(parseField.backtrack :: parseArr :: parseIdx :: Nil)

View File

@ -64,7 +64,7 @@ object Token {
val `.` : P[Unit] = P.char('.')
val `"` : P[Unit] = P.char('"')
val `*` : P[Unit] = P.char('*')
val `!` : P[Unit] = P.char('!')
val exclamation : P[Unit] = P.char('!')
val `[]` : P[Unit] = P.string("[]")
val `` : P[Unit] = P.char('')
val `⊥` : P[Unit] = P.char('⊥')

View File

@ -2,14 +2,14 @@ package aqua.parser
import aqua.AquaSpec
import aqua.parser.Ast.parser
import aqua.parser.expr._
import aqua.parser.lexer.{ArrowTypeToken, BasicTypeToken, EqOp, Token}
import aqua.parser.expr.*
import aqua.parser.lexer.{ArrowTypeToken, BasicTypeToken, EqOp, Literal, Token, VarLambda}
import aqua.parser.lift.LiftParser.Implicits.idLiftParser
import aqua.types.ScalarType._
import aqua.types.ScalarType.*
import cats.Id
import cats.data.Chain
import cats.free.Cofree
import cats.syntax.foldable._
import cats.syntax.foldable.*
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
@ -161,12 +161,17 @@ class FuncExprSpec extends AnyFlatSpec with Matchers with AquaSpec {
// Local service
qTree.d() shouldBe ServiceExpr(toAb("Local"), Some(toStr("local")))
qTree.d() shouldBe ArrowTypeExpr("gt", toArrowType(Nil, Some(scToBt(bool))))
qTree.d() shouldBe FuncExpr("tryGen", Nil, Some(scToBt(bool)), Some("v"))
qTree.d() shouldBe FuncExpr("tryGen", Nil, Some(scToBt(bool)), Some("v": VarLambda[Id]))
qTree.d() shouldBe OnExpr(toStr("deeper"), List(toStr("deep")))
qTree.d() shouldBe CallArrowExpr(Some("v"), Some(toAb("Local")), "gt", Nil)
qTree.d() shouldBe ReturnExpr(toVar("v"))
// genC function
qTree.d() shouldBe FuncExpr("genC", List(toArgSc("val", string)), Some(boolSc), Some("two"))
qTree.d() shouldBe FuncExpr(
"genC",
List(toArgSc("val", string)),
Some(boolSc),
Some("two": VarLambda[Id])
)
qTree.d() shouldBe CallArrowExpr(Some("one"), Some(toAb("Local")), "gt", List())
qTree.d() shouldBe OnExpr(toStr("smth"), List(toStr("else")))
qTree.d() shouldBe CallArrowExpr(Some("two"), None, "tryGen", List())

View File

@ -10,7 +10,7 @@ import org.scalatest.matchers.should.Matchers
class LambdaOpSpec extends AnyFlatSpec with Matchers with EitherValues {
"lambda ops" should "parse" in {
val opsP = (s: String) => LambdaOp.ops[Id].parseAll(s).right.value
val opsP = (s: String) => LambdaOp.ops[Id].parseAll(s).value
opsP(".field") should be(NonEmptyList.of(IntoField[Id]("field")))
opsP(".field.sub") should be(NonEmptyList.of(IntoField[Id]("field"), IntoField[Id]("sub")))

View File

@ -8,31 +8,31 @@ import org.scalatest.matchers.should.Matchers
class TokenSpec extends AnyFlatSpec with Matchers with EitherValues {
"\\n token" should "be parsed" in {
` \n`.parseAll("\n") should be('right)
` \n`.parseAll(" \n") should be('right)
` \n`.parseAll(" \n") should be('right)
` \n`.parseAll(" \n") should be('right)
` \n`.parseAll("--comment\n") should be('right)
` \n`.parseAll(" --comment\n") should be('right)
` \n`.parseAll(" --comment\n") should be('right)
` \n`.parseAll(" --comment with many words\n") should be('right)
` \n`.parseAll(" --comment with many words \n") should be('right)
` \n`.parse(" --comment with many words \n").right.value should be(("", ()))
` \n`.parse(" --comment with many words \n ").right.value should be((" ", ()))
` \n`.parseAll("\n").isRight should be(true)
` \n`.parseAll(" \n").isRight should be(true)
` \n`.parseAll(" \n").isRight should be(true)
` \n`.parseAll(" \n").isRight should be(true)
` \n`.parseAll("--comment\n").isRight should be(true)
` \n`.parseAll(" --comment\n").isRight should be(true)
` \n`.parseAll(" --comment\n").isRight should be(true)
` \n`.parseAll(" --comment with many words\n").isRight should be(true)
` \n`.parseAll(" --comment with many words \n").isRight should be(true)
` \n`.parse(" --comment with many words \n").value should be(("", ()))
` \n`.parse(" --comment with many words \n ").value should be((" ", ()))
}
"\\n* token" should "match the same strings" in {
` \n+`.parseAll("\n") should be('right)
` \n+`.parseAll(" \n") should be('right)
` \n+`.parseAll(" \n") should be('right)
` \n+`.parseAll(" \n") should be('right)
` \n+`.parseAll("--comment\n") should be('right)
` \n+`.parseAll(" --comment\n") should be('right)
` \n+`.parseAll(" --comment\n") should be('right)
` \n+`.parseAll(" --comment with many words\n") should be('right)
` \n+`.parseAll(" --comment with many words \n") should be('right)
` \n+`.parse(" --comment with many words \n").right.value should be(("", ()))
` \n+`.parse(" --comment with many words \n ").right.value should be((" ", ()))
` \n+`.parseAll("\n").isRight should be(true)
` \n+`.parseAll(" \n").isRight should be(true)
` \n+`.parseAll(" \n").isRight should be(true)
` \n+`.parseAll(" \n").isRight should be(true)
` \n+`.parseAll("--comment\n").isRight should be(true)
` \n+`.parseAll(" --comment\n").isRight should be(true)
` \n+`.parseAll(" --comment\n").isRight should be(true)
` \n+`.parseAll(" --comment with many words\n").isRight should be(true)
` \n+`.parseAll(" --comment with many words \n").isRight should be(true)
` \n+`.parse(" --comment with many words \n").value should be(("", ()))
` \n+`.parse(" --comment with many words \n ").value should be((" ", ()))
}
"\\n* token" should "match multi-line comments" in {
@ -41,7 +41,7 @@ class TokenSpec extends AnyFlatSpec with Matchers with EitherValues {
|
| -- line 3
| -- line 4
|""".stripMargin).right.value should be(())
|""".stripMargin).value should be(())
}
}

View File

@ -15,37 +15,37 @@ class TypeTokenSpec extends AnyFlatSpec with Matchers with EitherValues {
implicit def strToBt(st: ScalarType): BasicTypeToken[Id] = BasicTypeToken[Id](st)
"Basic type" should "parse" in {
BasicTypeToken.`basictypedef`.parseAll("u32").right.value should be(u32: BasicTypeToken[Id])
BasicTypeToken.`basictypedef`.parseAll("()") should be('left)
BasicTypeToken.`basictypedef`.parseAll("u32").value should be(u32: BasicTypeToken[Id])
BasicTypeToken.`basictypedef`.parseAll("()").isLeft should be(true)
}
"Arrow type" should "parse" in {
ArrowTypeToken.`arrowdef`.parseAll("-> B").right.value should be(
ArrowTypeToken.`arrowdef`.parseAll("-> B").value should be(
ArrowTypeToken[Id]((), Nil, Some(CustomTypeToken[Id]("B")))
)
ArrowTypeToken.`arrowdef`.parseAll("A -> B").right.value should be(
ArrowTypeToken.`arrowdef`.parseAll("A -> B").value should be(
ArrowTypeToken[Id]((), CustomTypeToken[Id]("A") :: Nil, Some(CustomTypeToken[Id]("B")))
)
ArrowTypeToken.`arrowWithNames`.parseAll("(a: A) -> B").right.value should be(
ArrowTypeToken.`arrowWithNames`.parseAll("(a: A) -> B").value should be(
ArrowTypeToken[Id]((), CustomTypeToken[Id]("A") :: Nil, Some(CustomTypeToken[Id]("B")))
)
ArrowTypeToken.`arrowdef`.parseAll("u32 -> Boo").right.value should be(
ArrowTypeToken.`arrowdef`.parseAll("u32 -> Boo").value should be(
ArrowTypeToken[Id]((), (u32: BasicTypeToken[Id]) :: Nil, Some(CustomTypeToken[Id]("Boo")))
)
TypeToken.`typedef`.parseAll("u32 -> ()").right.value should be(
TypeToken.`typedef`.parseAll("u32 -> ()").value should be(
ArrowTypeToken[Id]((), (u32: BasicTypeToken[Id]) :: Nil, None)
)
ArrowTypeToken.`arrowdef`.parseAll("A, u32 -> B").right.value should be(
ArrowTypeToken.`arrowdef`.parseAll("A, u32 -> B").value should be(
ArrowTypeToken[Id](
(),
CustomTypeToken[Id]("A") :: (u32: BasicTypeToken[Id]) :: Nil,
Some(CustomTypeToken[Id]("B"))
)
)
ArrowTypeToken.`arrowdef`.parseAll("[]Absolutely, u32 -> B").right.value should be(
ArrowTypeToken.`arrowdef`.parseAll("[]Absolutely, u32 -> B").value should be(
ArrowTypeToken[Id](
(),
ArrayTypeToken[Id]((), CustomTypeToken[Id]("Absolutely")) :: (u32: BasicTypeToken[

View File

@ -10,36 +10,36 @@ import cats.Id
class ValueSpec extends AnyFlatSpec with Matchers with EitherValues {
"var getter" should "parse" in {
Value.`value`.parseAll("varname").right.value should be(VarLambda(Name[Id]("varname"), Nil))
Value.`value`.parseAll("varname.field").right.value should be(
Value.`value`.parseAll("varname").value should be(VarLambda(Name[Id]("varname"), Nil))
Value.`value`.parseAll("varname.field").value should be(
VarLambda(Name[Id]("varname"), IntoField[Id]("field") :: Nil)
)
Value.`value`.parseAll("varname.field.sub").right.value should be(
Value.`value`.parseAll("varname.field.sub").value should be(
VarLambda(Name[Id]("varname"), IntoField[Id]("field") :: IntoField[Id]("sub") :: Nil)
)
}
"literals" should "parse" in {
Value.`value`.parseAll("true").right.value should be(Literal[Id]("true", LiteralType.bool))
Value.`value`.parseAll("false").right.value should be(Literal[Id]("false", LiteralType.bool))
Value.`value`.parseAll("true").value should be(Literal[Id]("true", LiteralType.bool))
Value.`value`.parseAll("false").value should be(Literal[Id]("false", LiteralType.bool))
Value.`value`.parseAll("1").right.value should be(Literal[Id]("1", LiteralType.number))
Value.`value`.parseAll("1111").right.value should be(Literal[Id]("1111", LiteralType.number))
Value.`value`.parseAll("1").value should be(Literal[Id]("1", LiteralType.number))
Value.`value`.parseAll("1111").value should be(Literal[Id]("1111", LiteralType.number))
Value.`value`.parseAll("-1543").right.value should be(Literal[Id]("-1543", LiteralType.signed))
Value.`value`.parseAll("-1543").value should be(Literal[Id]("-1543", LiteralType.signed))
Value.`value`.parseAll("1.0").right.value should be(Literal[Id]("1.0", LiteralType.float))
Value.`value`.parseAll("1.23").right.value should be(Literal[Id]("1.23", LiteralType.float))
Value.`value`.parseAll("-1.23").right.value should be(Literal[Id]("-1.23", LiteralType.float))
Value.`value`.parseAll("1.0").value should be(Literal[Id]("1.0", LiteralType.float))
Value.`value`.parseAll("1.23").value should be(Literal[Id]("1.23", LiteralType.float))
Value.`value`.parseAll("-1.23").value should be(Literal[Id]("-1.23", LiteralType.float))
Value.`value`.parseAll("\"some crazy string\"").right.value should be(
Value.`value`.parseAll("\"some crazy string\"").value should be(
Literal[Id]("\"some crazy string\"", LiteralType.string)
)
// This does not work :(
// Value.`value`.parseAll("\"some crazy string with escaped \\\" quote\"").right.value should be(
// Value.`value`.parseAll("\"some crazy string with escaped \\\" quote\"").value should be(
// Literal("\"some crazy string with escaped \\\" quote\"", BasicType.string)
// )
Value.`value`.parse("\"just string\" ").right.value should be(
Value.`value`.parse("\"just string\" ").value should be(
(" ", Literal[Id]("\"just string\"", LiteralType.string))
)
}

View File

@ -1 +1 @@
sbt.version=1.5.2
sbt.version=1.5.5

View File

@ -57,7 +57,7 @@ object Semantics extends LogSupport {
ast.cata(folder[F, Alg[F, *]]).value
def interpret[F[_]](free: Free[Alg[F, *], Model]): State[CompilerState[F], Model] = {
import monocle.macros.syntax.all._
import monocle.syntax.all._
implicit val re: ReportError[F, CompilerState[F]] =
(st: CompilerState[F], token: Token[F], hint: String) =>

View File

@ -51,7 +51,7 @@ object NamesState {
def init[F[_]](context: AquaContext): NamesState[F] =
NamesState(
rootArrows = context.allFuncs().map(_.map(_.arrowType)),
constants = context.allValues().map(_.map(_.lastType))
rootArrows = context.allFuncs().map { case (s, fc) => (s, fc.arrowType) },
constants = context.allValues().map { case (s, vm) => (s, vm.lastType) }
)
}

View File

@ -20,10 +20,10 @@ case class DefineAlias[F[_]](name: CustomTypeToken[F], target: Type) extends Typ
case class ResolveLambda[F[_]](root: Type, ops: List[LambdaOp[F]])
extends TypeOp[F, List[LambdaModel]]
case class EnsureTypeMatches[F[_]](token: Token[F], expected: Type, given: Type)
case class EnsureTypeMatches[F[_]](token: Token[F], expected: Type, givenType: Type)
extends TypeOp[F, Boolean]
case class ExpectNoExport[F[_]](token: Token[F]) extends TypeOp[F, Unit]
case class CheckArgumentsNum[F[_]](token: Token[F], expected: Int, given: Int)
case class CheckArgumentsNum[F[_]](token: Token[F], expected: Int, givenNum: Int)
extends TypeOp[F, Boolean]

View File

@ -33,14 +33,14 @@ class TypesAlgebra[F[_], Alg[_]](implicit T: InjectK[TypeOp[F, *], Alg]) {
def resolveLambda(root: Type, ops: List[LambdaOp[F]]): Free[Alg, List[LambdaModel]] =
Free.liftInject[Alg](ResolveLambda(root, ops))
def ensureTypeMatches(token: Token[F], expected: Type, given: Type): Free[Alg, Boolean] =
Free.liftInject[Alg](EnsureTypeMatches[F](token, expected, given))
def ensureTypeMatches(token: Token[F], expected: Type, givenType: Type): Free[Alg, Boolean] =
Free.liftInject[Alg](EnsureTypeMatches[F](token, expected, givenType))
def expectNoExport(token: Token[F]): Free[Alg, Unit] =
Free.liftInject[Alg](ExpectNoExport[F](token))
def checkArgumentsNumber(token: Token[F], expected: Int, given: Int): Free[Alg, Boolean] =
Free.liftInject[Alg](CheckArgumentsNum(token, expected, given))
def checkArgumentsNumber(token: Token[F], expected: Int, givenNum: Int): Free[Alg, Boolean] =
Free.liftInject[Alg](CheckArgumentsNum(token, expected, givenNum))
}
object TypesAlgebra {

View File

@ -99,9 +99,9 @@ class TypesInterpreter[F[_], X](implicit lens: Lens[X, TypesState[F]], error: Re
case etm: EnsureTypeMatches[F] =>
// TODO in case of two literals, check for types intersection?
if (etm.expected.acceptsValueOf(etm.`given`)) State.pure(true)
if (etm.expected.acceptsValueOf(etm.givenType)) State.pure(true)
else
report(etm.token, s"Types mismatch, expected: ${etm.expected}, given: ${etm.`given`}")
report(etm.token, s"Types mismatch, expected: ${etm.expected}, given: ${etm.givenType}")
.as(false)
case ene: ExpectNoExport[F] =>
@ -111,11 +111,11 @@ class TypesInterpreter[F[_], X](implicit lens: Lens[X, TypesState[F]], error: Re
).as(())
case ca: CheckArgumentsNum[F] =>
if (ca.expected == ca.given) State.pure(true)
if (ca.expected == ca.givenNum) State.pure(true)
else
report(
ca.token,
s"Number of arguments doesn't match the function type, expected: ${ca.expected}, given: ${ca.`given`}"
s"Number of arguments doesn't match the function type, expected: ${ca.expected}, given: ${ca.givenNum}"
).as(false)
}
}