mirror of
https://github.com/fluencelabs/aqua.git
synced 2024-12-04 22:50:18 +00:00
30 import basics (#49)
* Functions must be marked with `pub` to be exported * AquaFiles WIP * Factoring out linker * Core linker logic * FileModuleId WIP * AquaFile WIP * AquaFiles WIP * Basic imports works * Show errors * Dirty hack to make imports work (almost) * Better errors displaying * Simplified AquaCompiler interface * Write compiled files * New AquaCli * CLI fixed, works * Review fixes
This commit is contained in:
parent
569df52d27
commit
970da2e1b7
@ -27,6 +27,7 @@ Input directory should contain files with `aqua` scripts.
|
||||
- **[parser](./parser)** - parser, takes source text and produces a source AST
|
||||
- **[model](./model)** - middle-end, internal representation of the code, optimizations and transfromations
|
||||
- **[semantics](./semantics)** - rules to convert source AST into the model
|
||||
- **[linker](./linker)** - checks dependencies between modules, builds and combines an abstract dependencies tree
|
||||
- **[backend/air](./backend/air)** – generates AIR code from the middle-end model
|
||||
- **[backend/ts](./backend/ts)** - generates AIR code and Typescript wrappers for use with Fluence JS SDK
|
||||
- **[cli](./cli)** - CLI interface
|
||||
|
6
aqua-src/test-import.aqua
Normal file
6
aqua-src/test-import.aqua
Normal file
@ -0,0 +1,6 @@
|
||||
import "builtin.aqua"
|
||||
|
||||
func foo() -> u64:
|
||||
t <- Peer.timestamp_sec()
|
||||
id()
|
||||
<- t
|
35
build.sbt
35
build.sbt
@ -2,15 +2,17 @@ val dottyVersion = "2.13.5"
|
||||
|
||||
scalaVersion := dottyVersion
|
||||
|
||||
//val dottyVersion = "3.0.0-RC1"
|
||||
//val dottyVersion = "3.0.0-RC2"
|
||||
|
||||
val aquaV = "0.1.1"
|
||||
|
||||
val catsV = "2.4.2"
|
||||
val catsParseV = "0.3.1"
|
||||
val monocleV = "3.0.0-M3"
|
||||
val scalaTestV = "3.2.5"
|
||||
val fs2V = "3.0.0-M7"
|
||||
val catsV = "2.5.0"
|
||||
val catsParseV = "0.3.2"
|
||||
val monocleV = "3.0.0-M4"
|
||||
val scalaTestV = "3.2.7"
|
||||
val fs2V = "3.0.0"
|
||||
val catsEffectV = "3.0.2"
|
||||
val declineV = "2.0.0-RC1"
|
||||
|
||||
name := "aqua-hll"
|
||||
|
||||
@ -26,17 +28,18 @@ commons
|
||||
lazy val cli = project
|
||||
.settings(commons: _*)
|
||||
.settings(
|
||||
mainClass in (Compile, run) := Some("aqua.Main"),
|
||||
mainClass in assembly := Some("aqua.Main"),
|
||||
mainClass in (Compile, run) := Some("aqua.AquaCli"),
|
||||
mainClass in assembly := Some("aqua.AquaCli"),
|
||||
assemblyJarName in assembly := "aqua-hll.jar",
|
||||
libraryDependencies ++= Seq(
|
||||
"com.github.scopt" %% "scopt" % "4.0.1",
|
||||
"org.typelevel" %% "cats-effect" % "3.0.0-RC2",
|
||||
"co.fs2" %% "fs2-core" % fs2V,
|
||||
"co.fs2" %% "fs2-io" % fs2V
|
||||
"com.monovore" %% "decline" % declineV,
|
||||
"com.monovore" %% "decline-effect" % declineV,
|
||||
"org.typelevel" %% "cats-effect" % catsEffectV,
|
||||
"co.fs2" %% "fs2-core" % fs2V,
|
||||
"co.fs2" %% "fs2-io" % fs2V
|
||||
)
|
||||
)
|
||||
.dependsOn(semantics, `backend-air`, `backend-ts`)
|
||||
.dependsOn(semantics, `backend-air`, `backend-ts`, linker)
|
||||
|
||||
lazy val types = project
|
||||
.settings(commons)
|
||||
@ -56,6 +59,12 @@ lazy val parser = project
|
||||
)
|
||||
.dependsOn(types)
|
||||
|
||||
lazy val linker = project
|
||||
.settings(commons: _*)
|
||||
.settings(
|
||||
)
|
||||
.dependsOn(parser)
|
||||
|
||||
lazy val model = project
|
||||
.settings(commons: _*)
|
||||
.settings(
|
||||
|
@ -1,111 +0,0 @@
|
||||
-- predef
|
||||
|
||||
service Peer:
|
||||
timestamp: -> i64
|
||||
connect: []byte, []multiaddr -> bool
|
||||
|
||||
-- kad
|
||||
|
||||
<<<<<<<<<<<<< ALIAS >>>>>>>>>>>>>
|
||||
|
||||
alias Key = []byte
|
||||
|
||||
type Contact:
|
||||
peer_id: []byte
|
||||
multiaddrs: []multiaddr
|
||||
|
||||
type Node:
|
||||
key: Key
|
||||
contact: Contact
|
||||
|
||||
type KadConfig:
|
||||
bucketSize: i32
|
||||
siblingsSize: i32
|
||||
pingExpiresIn: i32
|
||||
|
||||
service Bucket:
|
||||
index: -> u32
|
||||
size: -> u32
|
||||
isFull: -> bool
|
||||
nodes: Key-> []Node
|
||||
|
||||
<<<<<<<<<<<<< RETURN OPTION >>>>>>>>>>>>>
|
||||
find: Key -> ?Node
|
||||
|
||||
update: Node, pingExpiresIn, currentTime -> ?Node to ping
|
||||
|
||||
service Siblings:
|
||||
size: -> i32
|
||||
isFull: -> bool
|
||||
nodes: Key -> []Node
|
||||
find: Key -> ?Node
|
||||
add: Node -> bool
|
||||
|
||||
<<<<<<<<<<<<< LOCAL FUNCTION PATTERN WITH RETURN >>>>>>>>>>>>>
|
||||
func ping(node: Node): {Peer} bool
|
||||
Peer.is_connected(node) || Peer.connect(node)
|
||||
|
||||
<<<<<<<<<<<<< BYTE OPS, XOR, ZEROS >>>>>>>>>>>>>
|
||||
func bucket(key: Key): {}Bucket
|
||||
index = zero_prefix $ this_peer_id & key
|
||||
idx_bucket(index)
|
||||
|
||||
<<<<<<<<<<<<< MAKE SRV ID FROM PARTS >>>>>>>>>>>>>
|
||||
func idx_bucket(index: i32): {}Bucket
|
||||
bucket_id = "bucket-"+index
|
||||
Bucket(bucket_id)
|
||||
|
||||
func update_bucket(node: Node, pingExpiresIn: i32): {Peer} bool
|
||||
time <- Peer.timestamp
|
||||
bucket = bucket(node.key)
|
||||
|
||||
last_node <- bucket.update(node, pingExpiresIn, time)
|
||||
if last_node:
|
||||
p <- ping(last_node, pingExpiresIn, time)
|
||||
if p:
|
||||
update(ping)
|
||||
else:
|
||||
update(node)
|
||||
|
||||
else true
|
||||
|
||||
func update_siblings(node: Node): {Siblings} bool
|
||||
Siblings.add(node)
|
||||
|
||||
ability RoutingTable(key_length):
|
||||
func find(key: Key): ?Node
|
||||
Siblings.find(key) || bucket(key).find(key)
|
||||
|
||||
func lookup(key: Key, num: i32): []Node
|
||||
o = order(\n -> zero_prefix $ n.key & key)
|
||||
<<<<<<<<<<<<< DIVERGING INDEXES ARE IMPOSSIBLE??? >>>>>>>>>>>>>
|
||||
idxs = diverging_indexes(key)
|
||||
$nodes: Node[]
|
||||
<<<<<<<<<<<<< PUSH ITEMS INTO A STREAM >>>>>>>>>>>>>
|
||||
for i <- idxs:
|
||||
$nodes ++ bucket(i).nodes(key)
|
||||
if $nodes.size > num: break
|
||||
|
||||
<<<<<<<<<<<<< SUBTRACTION WITH MAPPING, THEN ADD WITH ORDER >>>>>>>>>>>>>
|
||||
for n <- Siblings.nodes(key): // Take (siblings \ nodes)?
|
||||
if not $nodes(_.key).contains(n.key):
|
||||
$nodes + n // maybe push ordered?
|
||||
<<<<<<<<<<<<< ORDER A STREAM, TAKE SOME SUBSTREAM >>>>>>>>>>>>>
|
||||
$nodes.order(key).take(num)
|
||||
|
||||
<<<<<<<<<<<<< HOW TO MAKE A LAZY LOOKUP STREAM???? >>>>>>>>>>>>>
|
||||
func lookupAway(key: Key, num: i32): ????
|
||||
|
||||
func update(node: Node): {Peer}bool
|
||||
par:
|
||||
update_bucket || update_siblings
|
||||
|
||||
<<<<<<<<<<<<< MANY OPERATIONS WITH COLLECTIONS >>>>>>>>>>>>>
|
||||
func updateList(nodes: []Node): {}[]Node
|
||||
-- rearrange: take slices of nodes received by ???
|
||||
-- update portion: take a portion, update them, then update next portion
|
||||
-- update par portions: take all portions, update par
|
||||
-- divide nodes into groups by buckets (indices), make into slices (rearrange), then update par portions
|
||||
|
||||
|
||||
func lookupIterative(key: Key, num: i32, alpha: i32, pingExpiresIn): []Node
|
@ -5,17 +5,25 @@ import aqua.backend.ts.TypescriptFile
|
||||
import aqua.model.{Model, ScriptModel}
|
||||
import aqua.parser.Ast
|
||||
import cats.data.ValidatedNec
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.{FileSpan, LiftParser, Span}
|
||||
import aqua.semantics.Semantics
|
||||
import cats.syntax.show._
|
||||
|
||||
object Aqua {
|
||||
|
||||
def parse(input: String): ValidatedNec[AquaError, Ast[Span.F]] =
|
||||
def parseString(input: String): ValidatedNec[AquaError, Ast[Span.F]] =
|
||||
Ast.fromString[Span.F](input).leftMap(_.map(pe => SyntaxError(pe.failedAtOffset, pe.expected)))
|
||||
|
||||
def parseFileString(name: String, input: String): ValidatedNec[AquaError, Ast[FileSpan.F]] = {
|
||||
implicit val fileLift: LiftParser[FileSpan.F] = FileSpan.fileSpanLiftParser(name, input)
|
||||
Ast
|
||||
.fromString[FileSpan.F](input)
|
||||
.leftMap(_.map(pe => SyntaxError(pe.failedAtOffset, pe.expected)))
|
||||
}
|
||||
|
||||
// Will fail if imports are used
|
||||
def generateModel(input: String): ValidatedNec[AquaError, Model] =
|
||||
parse(input).andThen(ast =>
|
||||
parseString(input).andThen(ast =>
|
||||
Semantics.generateModel(ast).leftMap(_.map(ts => CompilerError(ts._1.unit._1, ts._2)))
|
||||
)
|
||||
|
||||
@ -38,9 +46,4 @@ object Aqua {
|
||||
case _ => "//No input given"
|
||||
}
|
||||
|
||||
def generateTS(input: String): ValidatedNec[AquaError, String] =
|
||||
generate(input, air = false)
|
||||
|
||||
def generateAir(input: String): ValidatedNec[AquaError, String] =
|
||||
generate(input, air = true)
|
||||
}
|
||||
|
58
cli/src/main/scala/aqua/AquaCli.scala
Normal file
58
cli/src/main/scala/aqua/AquaCli.scala
Normal file
@ -0,0 +1,58 @@
|
||||
package aqua
|
||||
|
||||
import cats.data.Validated
|
||||
import cats.effect.{ExitCode, IO, IOApp}
|
||||
import com.monovore.decline.Opts
|
||||
import com.monovore.decline.effect.CommandIOApp
|
||||
import cats.syntax.apply._
|
||||
|
||||
import java.nio.file.Path
|
||||
|
||||
object AquaCli extends IOApp {
|
||||
|
||||
val inputOpts: Opts[Path] =
|
||||
Opts.option[Path]("input", "Path to the input directory that contains your .aqua files", "i")
|
||||
|
||||
val outputOpts: Opts[Path] =
|
||||
Opts.option[Path]("output", "Path to the output directory", "o")
|
||||
|
||||
val importOpts: Opts[LazyList[Path]] =
|
||||
Opts
|
||||
.options[Path]("import", "Path to the directory to import from", "m")
|
||||
.map(_.toList.to(LazyList))
|
||||
.withDefault(LazyList.empty)
|
||||
|
||||
val compileToAir: Opts[Boolean] =
|
||||
Opts
|
||||
.flag("air", "Generate .air file instead of typescript", "a")
|
||||
.map(_ => true)
|
||||
.withDefault(false)
|
||||
|
||||
def mainOpts: Opts[IO[ExitCode]] =
|
||||
(inputOpts, importOpts, outputOpts, compileToAir).mapN { case (input, imports, output, toAir) =>
|
||||
AquaCompiler
|
||||
.compileFilesTo[IO](
|
||||
input,
|
||||
imports,
|
||||
output,
|
||||
if (toAir) AquaCompiler.AirTarget else AquaCompiler.TypescriptTarget
|
||||
)
|
||||
.map {
|
||||
case Validated.Invalid(errs) =>
|
||||
errs.map(println)
|
||||
ExitCode.Error
|
||||
case Validated.Valid(res) =>
|
||||
res.map(println)
|
||||
ExitCode.Success
|
||||
}
|
||||
}
|
||||
|
||||
override def run(args: List[String]): IO[ExitCode] =
|
||||
CommandIOApp.run[IO](
|
||||
"aqua-c",
|
||||
"Aquamarine compiler",
|
||||
helpFlag = true,
|
||||
// TODO get version from SBT!
|
||||
Option("0.1.1").filter(_.nonEmpty)
|
||||
)(mainOpts, args)
|
||||
}
|
165
cli/src/main/scala/aqua/AquaCompiler.scala
Normal file
165
cli/src/main/scala/aqua/AquaCompiler.scala
Normal file
@ -0,0 +1,165 @@
|
||||
package aqua
|
||||
|
||||
import aqua.backend.air.FuncAirGen
|
||||
import aqua.backend.ts.TypescriptFile
|
||||
import aqua.io.{AquaFileError, AquaFiles, FileModuleId, Unresolvable}
|
||||
import aqua.linker.Linker
|
||||
import aqua.model.ScriptModel
|
||||
import aqua.parser.lexer.Token
|
||||
import aqua.parser.lift.FileSpan
|
||||
import aqua.semantics.{CompilerState, Semantics}
|
||||
import cats.Applicative
|
||||
import cats.data.{Chain, EitherT, NonEmptyChain, Validated, ValidatedNec}
|
||||
import cats.effect.kernel.Concurrent
|
||||
import fs2.io.file.Files
|
||||
import cats.syntax.monoid._
|
||||
import cats.syntax.functor._
|
||||
import cats.syntax.flatMap._
|
||||
import cats.syntax.show._
|
||||
import fs2.text
|
||||
|
||||
import java.nio.file.Path
|
||||
|
||||
object AquaCompiler {
|
||||
sealed trait CompileTarget
|
||||
case object TypescriptTarget extends CompileTarget
|
||||
case object AirTarget extends CompileTarget
|
||||
|
||||
case class Prepared(target: String => Path, model: ScriptModel)
|
||||
|
||||
def prepareFiles[F[_]: Files: Concurrent](
|
||||
srcPath: Path,
|
||||
imports: LazyList[Path],
|
||||
targetPath: Path
|
||||
): F[ValidatedNec[String, Chain[Prepared]]] =
|
||||
AquaFiles
|
||||
.readAndResolve[F, CompilerState.S[FileSpan.F]](
|
||||
srcPath,
|
||||
imports,
|
||||
ast =>
|
||||
_.flatMap(m => {
|
||||
//println(Console.YELLOW + "running for ast " + Console.RESET);
|
||||
for {
|
||||
y <- Semantics.astToState(ast)
|
||||
} yield m |+| y
|
||||
})
|
||||
)
|
||||
.value
|
||||
.map {
|
||||
case Left(fileErrors) =>
|
||||
Validated.invalid(fileErrors.map(_.showForConsole))
|
||||
|
||||
case Right(modules) =>
|
||||
Linker[FileModuleId, AquaFileError, CompilerState.S[FileSpan.F]](
|
||||
modules,
|
||||
ids => Unresolvable(ids.map(_.id.file.toString).mkString(" -> "))
|
||||
) match {
|
||||
case Validated.Valid(files) ⇒
|
||||
val (errs, preps) =
|
||||
files.toSeq.foldLeft[(Chain[String], Chain[Prepared])]((Chain.empty, Chain.empty)) {
|
||||
case ((errs, preps), (modId, proc)) =>
|
||||
proc.run(CompilerState()).value match {
|
||||
case (proc, _) if proc.errors.nonEmpty =>
|
||||
(errs ++ showProcErrors(proc.errors), preps)
|
||||
|
||||
case (_, model: ScriptModel) =>
|
||||
(errs, preps :+ Prepared(modId.targetPath(srcPath, targetPath, _), model))
|
||||
|
||||
case (_, model) =>
|
||||
(
|
||||
errs.append(Console.RED + "Unknown model: " + model + Console.RESET),
|
||||
preps
|
||||
)
|
||||
}
|
||||
}
|
||||
NonEmptyChain
|
||||
.fromChain(errs)
|
||||
.fold(Validated.validNec[String, Chain[Prepared]](preps))(Validated.invalid)
|
||||
|
||||
case Validated.Invalid(errs) ⇒
|
||||
Validated.invalid(
|
||||
errs
|
||||
.map(_.showForConsole)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
def showProcErrors(
|
||||
errors: Chain[(Token[FileSpan.F], String)]
|
||||
): Chain[String] =
|
||||
errors.map(err =>
|
||||
err._1.unit._1
|
||||
.focus(1)
|
||||
.map(_.toConsoleStr(err._2, Console.CYAN))
|
||||
.getOrElse("(Dup error, but offset is beyond the script)") + "\n"
|
||||
)
|
||||
|
||||
def compileFilesTo[F[_]: Files: Concurrent](
|
||||
srcPath: Path,
|
||||
imports: LazyList[Path],
|
||||
targetPath: Path,
|
||||
compileTo: CompileTarget
|
||||
): F[ValidatedNec[String, Chain[String]]] =
|
||||
prepareFiles(srcPath, imports, targetPath).flatMap[ValidatedNec[String, Chain[String]]] {
|
||||
case Validated.Invalid(e) =>
|
||||
Applicative[F].pure(Validated.invalid(e))
|
||||
case Validated.Valid(preps) =>
|
||||
(compileTo match {
|
||||
case TypescriptTarget =>
|
||||
preps
|
||||
.map(p => writeFile(p.target("ts"), TypescriptFile(p.model).generateTS()))
|
||||
|
||||
// TODO add function name to AirTarget class
|
||||
case AirTarget =>
|
||||
preps
|
||||
.map(p =>
|
||||
writeFile(
|
||||
p.target("air"),
|
||||
p.model.resolveFunctions
|
||||
.map(FuncAirGen)
|
||||
.map(g =>
|
||||
// add function name before body
|
||||
s";; function name: ${g.func.name}\n\n" + g.generateAir.show
|
||||
)
|
||||
.toList
|
||||
.mkString("\n\n\n")
|
||||
)
|
||||
)
|
||||
|
||||
}).foldLeft(
|
||||
EitherT.rightT[F, NonEmptyChain[String]](Chain.empty[String])
|
||||
) { case (accET, writeET) =>
|
||||
EitherT(for {
|
||||
a <- accET.value
|
||||
w <- writeET.value
|
||||
} yield (a, w) match {
|
||||
case (Left(errs), Left(err)) => Left(errs :+ err)
|
||||
case (Right(res), Right(r)) => Right(res :+ r)
|
||||
case (Left(errs), _) => Left(errs)
|
||||
case (_, Left(err)) => Left(NonEmptyChain.of(err))
|
||||
})
|
||||
}.value
|
||||
.map(Validated.fromEither)
|
||||
|
||||
}
|
||||
|
||||
def writeFile[F[_]: Files: Concurrent](file: Path, content: String): EitherT[F, String, String] =
|
||||
EitherT.right[String](Files[F].deleteIfExists(file)) >>
|
||||
EitherT[F, String, String](
|
||||
fs2.Stream
|
||||
.emit(
|
||||
content
|
||||
)
|
||||
.through(text.utf8Encode)
|
||||
.through(Files[F].writeAll(file))
|
||||
.attempt
|
||||
.map { e =>
|
||||
e.left
|
||||
.map(t => s"Error on writing file $file" + t)
|
||||
}
|
||||
.compile
|
||||
.drain
|
||||
.map(_ => Right(s"Compiled $file"))
|
||||
)
|
||||
|
||||
}
|
@ -1,7 +1,9 @@
|
||||
package aqua
|
||||
|
||||
import aqua.parser.lift.Span
|
||||
import cats.Eval
|
||||
import cats.data.NonEmptyList
|
||||
import cats.parse.LocationMap
|
||||
import cats.parse.Parser.Expectation
|
||||
|
||||
sealed trait AquaError {
|
||||
@ -12,10 +14,16 @@ case class SyntaxError(offset: Int, expectations: NonEmptyList[Expectation]) ext
|
||||
|
||||
override def showForConsole(script: String): String =
|
||||
Span(offset, offset + 1)
|
||||
.focus(script, 3)
|
||||
.map(_.toConsoleStr(s"Syntax error, expected: ${expectations.toList.mkString(", ")}", Console.RED))
|
||||
.focus(Eval.later(LocationMap(script)), 2)
|
||||
.map(
|
||||
_.toConsoleStr(
|
||||
s"Syntax error, expected: ${expectations.toList.mkString(", ")}",
|
||||
Console.RED
|
||||
)
|
||||
)
|
||||
.getOrElse(
|
||||
"(offset is beyond the script, syntax errors) " + Console.RED + expectations.toList.mkString(", ")
|
||||
"(offset is beyond the script, syntax errors) " + Console.RED + expectations.toList
|
||||
.mkString(", ")
|
||||
) + Console.RESET + "\n"
|
||||
}
|
||||
|
||||
@ -23,7 +31,7 @@ case class CompilerError(span: Span, hint: String) extends AquaError {
|
||||
|
||||
override def showForConsole(script: String): String =
|
||||
span
|
||||
.focus(script, 3)
|
||||
.focus(Eval.later(LocationMap(script)), 1)
|
||||
.map(_.toConsoleStr(hint, Console.CYAN))
|
||||
.getOrElse("(Dup error, but offset is beyond the script)") + "\n"
|
||||
}
|
||||
|
@ -1,88 +0,0 @@
|
||||
package aqua
|
||||
|
||||
import aqua.cli.AquaGen.{convertAqua, convertAquaFilesToDir}
|
||||
import aqua.cli.{AquaScriptErrors, ArgsConfig, CliArgsError, CliError, IOError}
|
||||
import cats.effect.{ExitCode, IO, IOApp, Resource}
|
||||
|
||||
final case class ParseArgsException(
|
||||
private val message: String,
|
||||
private val cause: Throwable = None.orNull
|
||||
) extends Exception(message, cause)
|
||||
|
||||
object Main extends IOApp {
|
||||
|
||||
private def showResults(results: List[Either[CliError, String]]): IO[Unit] = {
|
||||
IO {
|
||||
results.map {
|
||||
case Left(err) =>
|
||||
err match {
|
||||
case AquaScriptErrors(name, script, errors) =>
|
||||
println(Console.RED + s"File '$name' processed with errors:" + Console.RESET)
|
||||
errors.map(_.showForConsole(script)).map(println)
|
||||
case CliArgsError(name, error) =>
|
||||
println(Console.RED + s"File '$name' processed with error: $error" + Console.RESET)
|
||||
case IOError(msg, t) =>
|
||||
println(Console.RED + s"$msg: $t" + Console.RESET)
|
||||
}
|
||||
case Right(name) =>
|
||||
println(Console.GREEN + s"File '$name' processed successfully" + Console.RESET)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private def readAllInput(): IO[Option[String]] = {
|
||||
import java.io.BufferedReader
|
||||
import java.io.InputStreamReader
|
||||
Resource
|
||||
.make(IO(new BufferedReader(new InputStreamReader(System.in))))(b => IO(b.close()))
|
||||
.use { reader =>
|
||||
IO {
|
||||
if (reader.ready()) {
|
||||
val lineSep = sys.props("line.separator")
|
||||
var line: String = reader.readLine
|
||||
val buf = new StringBuilder()
|
||||
while (line != null) {
|
||||
buf.append(line + lineSep)
|
||||
line = reader.readLine
|
||||
}
|
||||
Some(buf.toString)
|
||||
} else Option("")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override def run(args: List[String]): IO[ExitCode] = {
|
||||
ArgsConfig.parseArgs(args) match {
|
||||
case Some(config) =>
|
||||
val io = for {
|
||||
results <-
|
||||
(config.input, config.output) match {
|
||||
case (Some(i), Some(o)) =>
|
||||
convertAquaFilesToDir[IO](i, o, config.air)
|
||||
case _ =>
|
||||
readAllInput().map {
|
||||
case Some(i) =>
|
||||
List(convertAqua("stdin", i, config.air))
|
||||
case None =>
|
||||
println("input is empty")
|
||||
List()
|
||||
}
|
||||
}
|
||||
_ <- showResults(results)
|
||||
} yield {
|
||||
if (results.exists(_.isLeft))
|
||||
ExitCode.Error
|
||||
else
|
||||
ExitCode.Success
|
||||
}
|
||||
io.handleErrorWith { err =>
|
||||
// this is an unhandled errors
|
||||
println(err)
|
||||
IO(ExitCode.Error)
|
||||
}
|
||||
case _ =>
|
||||
// errors should have been reported before
|
||||
IO(ExitCode.Error)
|
||||
}
|
||||
}
|
||||
}
|
@ -3,25 +3,23 @@ package aqua
|
||||
import cats.effect.{IO, IOApp}
|
||||
import cats.data.Validated
|
||||
|
||||
import scala.io.Source
|
||||
import java.nio.file.Paths
|
||||
|
||||
object Test extends IOApp.Simple {
|
||||
|
||||
override def run: IO[Unit] =
|
||||
IO {
|
||||
def process(str: String) =
|
||||
Aqua.generateTS(str) match {
|
||||
case Validated.Valid(v) ⇒
|
||||
println(v)
|
||||
println(Console.GREEN + "Aqua script processed successfully" + Console.RESET)
|
||||
case Validated.Invalid(errs) ⇒
|
||||
errs.map(_.showForConsole(str)).map(println)
|
||||
println(Console.RED + s"Aqua script errored, total ${errs.length} problems found" + Console.RESET)
|
||||
}
|
||||
|
||||
val script = Source.fromResource("for-par.aqua").mkString
|
||||
process(script)
|
||||
|
||||
}
|
||||
AquaCompiler
|
||||
.compileFilesTo[IO](
|
||||
Paths.get("./aqua-src"),
|
||||
LazyList(Paths.get("./aqua")),
|
||||
Paths.get("./target"),
|
||||
AquaCompiler.TypescriptTarget
|
||||
)
|
||||
.map {
|
||||
case Validated.Invalid(errs) =>
|
||||
errs.map(println)
|
||||
case Validated.Valid(res) =>
|
||||
res.map(println)
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,97 +0,0 @@
|
||||
package aqua.cli
|
||||
|
||||
import aqua.Aqua
|
||||
import cats.data.{EitherT, Validated}
|
||||
import cats.effect.Concurrent
|
||||
import cats.syntax.functor._
|
||||
import cats.Applicative
|
||||
import fs2.io.file.Files
|
||||
import fs2.text
|
||||
|
||||
import java.io.File
|
||||
import java.nio.file.Path
|
||||
|
||||
object AquaGen {
|
||||
|
||||
def checkAndChangeExtension[F[_]: Applicative](
|
||||
fileName: String,
|
||||
air: Boolean
|
||||
): EitherT[F, CliError, String] = {
|
||||
val arr = fileName.split("\\.").toList
|
||||
for {
|
||||
_ <- EitherT.cond[F](
|
||||
arr.nonEmpty && arr.last == "aqua",
|
||||
(),
|
||||
CliError.parseError(fileName, s"File '$fileName' should have '.aqua' extension")
|
||||
)
|
||||
} yield {
|
||||
arr.dropRight(1).mkString(".") + (if (air) ".air" else ".ts")
|
||||
}
|
||||
}
|
||||
|
||||
def convertAqua[F[_]](name: String, text: String, air: Boolean): Either[CliError, String] =
|
||||
Aqua.generate(text, air) match {
|
||||
case Validated.Valid(v) ⇒
|
||||
Right(v)
|
||||
case Validated.Invalid(errs) ⇒
|
||||
Left(CliError.errorInfo(name, text, errs))
|
||||
}
|
||||
|
||||
def convertAquaFromFile[F[_]: Files: Concurrent](
|
||||
file: File,
|
||||
outputDir: Path,
|
||||
air: Boolean
|
||||
): EitherT[F, CliError, String] = {
|
||||
val name = file.getName
|
||||
for {
|
||||
newName <- checkAndChangeExtension(name, air)
|
||||
newPath = outputDir.resolve(newName)
|
||||
converted <- EitherT(
|
||||
Files[F]
|
||||
.readAll(file.toPath, 4096)
|
||||
.fold(Vector.empty[Byte])((acc, b) => acc :+ b)
|
||||
.flatMap(fs2.Stream.emits)
|
||||
.through(text.utf8Decode)
|
||||
.attempt
|
||||
.map {
|
||||
_.left
|
||||
.map(t => CliError.ioError("Error on reading file", t))
|
||||
.flatMap { text =>
|
||||
convertAqua(name, text, air)
|
||||
}
|
||||
}
|
||||
.compile
|
||||
.toList
|
||||
.map(_.head)
|
||||
)
|
||||
// delete old file
|
||||
_ <- EitherT.right(Files[F].deleteIfExists(newPath))
|
||||
result <-
|
||||
EitherT[F, CliError, String](
|
||||
fs2.Stream
|
||||
.emit(converted)
|
||||
.through(text.utf8Encode)
|
||||
.through(Files[F].writeAll(newPath))
|
||||
.attempt
|
||||
.map { e =>
|
||||
e.left
|
||||
.map(t => CliError.ioError("Error on writing file", t))
|
||||
}
|
||||
.compile
|
||||
.drain
|
||||
.map(_ => Right(newName))
|
||||
)
|
||||
} yield result
|
||||
}
|
||||
|
||||
def convertAquaFilesToDir[F[_]: Files: Concurrent](
|
||||
files: List[File],
|
||||
outputDir: Path,
|
||||
air: Boolean
|
||||
): F[List[Either[CliError, String]]] =
|
||||
fs2.Stream
|
||||
.emits(files)
|
||||
.evalMap(f => convertAquaFromFile(f, outputDir, air).value)
|
||||
.compile
|
||||
.toList
|
||||
}
|
@ -1,65 +0,0 @@
|
||||
package aqua.cli
|
||||
|
||||
import scopt.{OParser, Read}
|
||||
import scopt.Read.stringRead
|
||||
|
||||
import java.io.File
|
||||
import java.nio.file.Path
|
||||
|
||||
case class Config(
|
||||
input: Option[List[File]] = None,
|
||||
output: Option[Path] = None,
|
||||
air: Boolean = false,
|
||||
debug: Boolean = false
|
||||
)
|
||||
|
||||
object ArgsConfig {
|
||||
|
||||
implicit val filesListRead: Read[List[File]] =
|
||||
stringRead.map { str =>
|
||||
val inputDir = new File(str)
|
||||
if (!inputDir.isDirectory && !inputDir.exists())
|
||||
throw new IllegalArgumentException(s"The input path '$str' must be a directory and exist")
|
||||
else
|
||||
inputDir.listFiles().toList
|
||||
}
|
||||
|
||||
implicit val pathRead: Read[Path] =
|
||||
stringRead.map { str =>
|
||||
val outputDir = new File(str)
|
||||
if (!outputDir.isDirectory && !outputDir.exists())
|
||||
throw new IllegalArgumentException(s"The input path '$str' must be a directory and exist")
|
||||
else
|
||||
outputDir.toPath
|
||||
}
|
||||
|
||||
private val builder = OParser.builder[Config]
|
||||
|
||||
private val argParser = {
|
||||
import builder._
|
||||
OParser.sequence(
|
||||
programName("aqua-c"),
|
||||
head("aqua-c", "0.1", "Compiles Aquamarine language to TypeScript and AIR"),
|
||||
opt[Boolean]('d', "debug")
|
||||
.action((x, c) => c.copy(debug = x))
|
||||
.text("debug mode (not implemented)"),
|
||||
opt[List[File]]('i', "input")
|
||||
.action((x, c) => c.copy(input = Some(x)))
|
||||
.text("path to directory with aquamarine files"),
|
||||
opt[Path]('o', "output")
|
||||
.action((x, c) => c.copy(output = Some(x)))
|
||||
.text("path to output directory"),
|
||||
opt[Unit]('a', "air")
|
||||
.action((_, c) => c.copy(air = true))
|
||||
.text("generates only a code on air instead of TypeScript file"),
|
||||
help('h', "help").text("prints this usage text"),
|
||||
checkConfig(c => {
|
||||
if (c.input.isEmpty != c.output.isEmpty) {
|
||||
failure("'input' and 'output' must be both specified or not specified")
|
||||
} else success
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
def parseArgs(args: List[String]): Option[Config] = OParser.parse(argParser, args, Config())
|
||||
}
|
@ -1,25 +0,0 @@
|
||||
package aqua.cli
|
||||
|
||||
import aqua.AquaError
|
||||
import cats.data.NonEmptyChain
|
||||
|
||||
sealed trait CliError
|
||||
|
||||
object CliError {
|
||||
|
||||
def parseError(name: String, error: String): CliError = {
|
||||
CliArgsError(name, error)
|
||||
}
|
||||
|
||||
def errorInfo(name: String, script: String, errors: NonEmptyChain[AquaError]): CliError = {
|
||||
AquaScriptErrors(name, script, errors)
|
||||
}
|
||||
|
||||
def ioError(msg: String, t: Throwable): CliError = {
|
||||
IOError(msg, t)
|
||||
}
|
||||
}
|
||||
|
||||
case class IOError(msg: String, t: Throwable) extends Exception(msg, t) with CliError
|
||||
case class CliArgsError(name: String, error: String) extends CliError
|
||||
case class AquaScriptErrors(name: String, script: String, errors: NonEmptyChain[AquaError]) extends CliError
|
107
cli/src/main/scala/aqua/io/AquaFile.scala
Normal file
107
cli/src/main/scala/aqua/io/AquaFile.scala
Normal file
@ -0,0 +1,107 @@
|
||||
package aqua.io
|
||||
|
||||
import aqua.Aqua
|
||||
import aqua.linker.AquaModule
|
||||
import aqua.parser.Ast
|
||||
import aqua.parser.head.ImportExpr
|
||||
import aqua.parser.lift.FileSpan
|
||||
import cats.data.{EitherT, NonEmptyChain}
|
||||
import cats.effect.Concurrent
|
||||
import fs2.io.file.Files
|
||||
import fs2.text
|
||||
import cats.syntax.functor._
|
||||
import cats.syntax.apply._
|
||||
|
||||
import java.nio.file.{Path, Paths}
|
||||
|
||||
case class AquaFile(
|
||||
id: FileModuleId,
|
||||
imports: Map[String, FileSpan.Focus],
|
||||
source: String,
|
||||
ast: Ast[FileSpan.F]
|
||||
) {
|
||||
|
||||
def module[F[_]: Concurrent, T](
|
||||
transpile: Ast[FileSpan.F] => T => T,
|
||||
importFrom: LazyList[Path]
|
||||
): AquaFiles.ETC[F, AquaModule[FileModuleId, AquaFileError, T]] =
|
||||
imports.map { case (k, v) =>
|
||||
FileModuleId.resolve(v, Paths.get(k), id.file.getParent +: importFrom).map(_ -> v)
|
||||
}.foldLeft[AquaFiles.ETC[F, AquaModule[FileModuleId, AquaFileError, T]]](
|
||||
EitherT.rightT(
|
||||
AquaModule(
|
||||
id,
|
||||
Map(),
|
||||
transpile(ast)
|
||||
)
|
||||
)
|
||||
) { case (modF, nextF) =>
|
||||
EitherT((modF.value, nextF.value).mapN {
|
||||
case (moduleV, Right(dependency)) =>
|
||||
moduleV.map(m =>
|
||||
m.copy(dependsOn =
|
||||
m.dependsOn + dependency.map(FileNotFound(_, dependency._1.file, importFrom))
|
||||
)
|
||||
)
|
||||
case (Right(_), Left(err)) =>
|
||||
Left(NonEmptyChain(err))
|
||||
case (Left(errs), Left(err)) =>
|
||||
Left(errs.append(err))
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
object AquaFile {
|
||||
|
||||
def readSourceText[F[_]: Files: Concurrent](
|
||||
file: Path
|
||||
): fs2.Stream[F, Either[AquaFileError, String]] =
|
||||
Files[F]
|
||||
.readAll(file, 4096)
|
||||
.fold(Vector.empty[Byte])((acc, b) => acc :+ b)
|
||||
.flatMap(fs2.Stream.emits)
|
||||
.through(text.utf8Decode)
|
||||
.attempt
|
||||
.map {
|
||||
_.left
|
||||
.map(t => FileSystemError(t))
|
||||
}
|
||||
|
||||
def readAst[F[_]: Files: Concurrent](
|
||||
file: Path
|
||||
): fs2.Stream[F, Either[AquaFileError, (String, Ast[FileSpan.F])]] =
|
||||
readSourceText[F](file).map(
|
||||
_.flatMap(source =>
|
||||
Aqua
|
||||
.parseFileString(file.toString, source)
|
||||
.map(source -> _)
|
||||
.toEither
|
||||
.left
|
||||
.map(AquaScriptErrors(file.toString, source, _))
|
||||
)
|
||||
)
|
||||
|
||||
def read[F[_]: Files: Concurrent](file: Path): EitherT[F, AquaFileError, AquaFile] =
|
||||
EitherT(readAst[F](file).compile.last.map(_.getOrElse(Left(EmptyFileError(file))))).map {
|
||||
case (source, ast) =>
|
||||
AquaFile(
|
||||
FileModuleId(file.toAbsolutePath.normalize()),
|
||||
ast.head.tailForced
|
||||
.map(_.head)
|
||||
.collect { case ImportExpr(filename) =>
|
||||
val fn = filename.value.drop(1).dropRight(1)
|
||||
val focus = filename.unit._1.focus(1)
|
||||
fn -> focus
|
||||
}
|
||||
.collect { case (a, Some(b)) =>
|
||||
a -> b
|
||||
}
|
||||
.toList
|
||||
.toMap,
|
||||
source,
|
||||
ast
|
||||
)
|
||||
}
|
||||
|
||||
}
|
42
cli/src/main/scala/aqua/io/AquaFileError.scala
Normal file
42
cli/src/main/scala/aqua/io/AquaFileError.scala
Normal file
@ -0,0 +1,42 @@
|
||||
package aqua.io
|
||||
|
||||
import aqua.AquaError
|
||||
import aqua.parser.lift.FileSpan
|
||||
import cats.data.NonEmptyChain
|
||||
|
||||
import java.nio.file.Path
|
||||
|
||||
sealed trait AquaFileError {
|
||||
def showForConsole: String
|
||||
|
||||
override def toString: String = showForConsole
|
||||
}
|
||||
|
||||
case class FileNotFound(focus: FileSpan.Focus, name: Path, imports: Seq[Path])
|
||||
extends AquaFileError {
|
||||
|
||||
override def showForConsole: String = focus.toConsoleStr(
|
||||
s"File not found at $name, looking in ${imports.mkString(", ")}",
|
||||
Console.YELLOW
|
||||
)
|
||||
}
|
||||
|
||||
case class EmptyFileError(path: Path) extends AquaFileError {
|
||||
override def showForConsole: String = s"Path is empty: $path"
|
||||
}
|
||||
|
||||
case class FileSystemError(err: Throwable) extends Exception(err) with AquaFileError {
|
||||
override def showForConsole: String = s"File system error"
|
||||
}
|
||||
|
||||
case class Unresolvable(msg: String) extends AquaFileError {
|
||||
override def showForConsole: String = s"Unresolvable: $msg"
|
||||
}
|
||||
|
||||
// TODO there should be no AquaErrors, as they does not fit
|
||||
case class AquaScriptErrors(name: String, script: String, errors: NonEmptyChain[AquaError])
|
||||
extends AquaFileError {
|
||||
|
||||
override def showForConsole: String =
|
||||
errors.map(_.showForConsole(script)).toChain.toList.mkString("\n")
|
||||
}
|
108
cli/src/main/scala/aqua/io/AquaFiles.scala
Normal file
108
cli/src/main/scala/aqua/io/AquaFiles.scala
Normal file
@ -0,0 +1,108 @@
|
||||
package aqua.io
|
||||
|
||||
import aqua.linker.Modules
|
||||
import aqua.parser.Ast
|
||||
import aqua.parser.lift.FileSpan
|
||||
import cats.data.{Chain, EitherT, NonEmptyChain}
|
||||
import cats.effect.kernel.Concurrent
|
||||
import cats.syntax.apply._
|
||||
import fs2.io.file.Files
|
||||
|
||||
import java.nio.file.Path
|
||||
import scala.util.Try
|
||||
|
||||
object AquaFiles {
|
||||
type Mods[T] = Modules[FileModuleId, AquaFileError, T]
|
||||
type ETC[F[_], T] = EitherT[F, NonEmptyChain[AquaFileError], T]
|
||||
|
||||
def readSources[F[_]: Files: Concurrent](
|
||||
sourcePath: Path
|
||||
): ETC[F, Chain[AquaFile]] =
|
||||
// TODO use effect instead of Try
|
||||
EitherT
|
||||
.fromEither[F](
|
||||
Try(sourcePath.toFile)
|
||||
.filter(_.isDirectory)
|
||||
.flatMap(d => Try(d.listFiles().toList))
|
||||
.toEither
|
||||
)
|
||||
.leftMap[AquaFileError](FileSystemError)
|
||||
.leftMap(NonEmptyChain.one)
|
||||
.flatMap(
|
||||
_.collect {
|
||||
case f if f.isFile && f.getName.endsWith(".aqua") =>
|
||||
AquaFile
|
||||
.read(f.toPath.toAbsolutePath)
|
||||
.map(Chain(_))
|
||||
.leftMap(NonEmptyChain.one)
|
||||
case f if f.isDirectory =>
|
||||
readSources(f.toPath)
|
||||
}
|
||||
.foldLeft[ETC[F, Chain[AquaFile]]](
|
||||
EitherT.rightT(Chain.empty)
|
||||
) { case (accF, nextF) =>
|
||||
EitherT((accF.value, nextF.value).mapN {
|
||||
case (Right(acc), Right(v)) =>
|
||||
Right(acc ++ v)
|
||||
case (Left(acc), Left(v)) =>
|
||||
Left(acc ++ v)
|
||||
case (Left(acc), _) =>
|
||||
Left(acc)
|
||||
case (_, Left(v)) =>
|
||||
Left(v)
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
def sourceModules[F[_]: Concurrent, T](
|
||||
sources: Chain[AquaFile],
|
||||
importFromPaths: LazyList[Path],
|
||||
transpile: Ast[FileSpan.F] => T => T
|
||||
): ETC[F, Mods[T]] =
|
||||
sources
|
||||
.map(_.module(transpile, importFromPaths))
|
||||
.foldLeft[ETC[F, Mods[T]]](
|
||||
EitherT.rightT(Modules())
|
||||
) { case (modulesF, modF) =>
|
||||
for {
|
||||
ms <- modulesF
|
||||
m <- modF
|
||||
} yield ms.add(m, export = true)
|
||||
}
|
||||
|
||||
def resolveModules[F[_]: Files: Concurrent, T](
|
||||
modules: Modules[FileModuleId, AquaFileError, T],
|
||||
importFromPaths: LazyList[Path],
|
||||
transpile: Ast[FileSpan.F] => T => T
|
||||
): ETC[F, Mods[T]] =
|
||||
modules.dependsOn.map { case (moduleId, unresolvedErrors) =>
|
||||
AquaFile
|
||||
.read[F](moduleId.file)
|
||||
.leftMap(unresolvedErrors.prepend)
|
||||
.flatMap(_.module(transpile, importFromPaths))
|
||||
|
||||
}.foldLeft[ETC[F, Mods[T]]](
|
||||
EitherT.rightT(modules)
|
||||
) { case (modulesF, modF) =>
|
||||
for {
|
||||
ms <- modulesF
|
||||
m <- modF
|
||||
} yield ms.add(m)
|
||||
}.flatMap {
|
||||
case ms if ms.isResolved =>
|
||||
EitherT.rightT(ms)
|
||||
case ms => resolveModules(ms, importFromPaths, transpile)
|
||||
}
|
||||
|
||||
def readAndResolve[F[_]: Files: Concurrent, T](
|
||||
sourcePath: Path,
|
||||
importFromPaths: LazyList[Path],
|
||||
transpile: Ast[FileSpan.F] => T => T
|
||||
): ETC[F, Mods[T]] =
|
||||
for {
|
||||
srcs <- readSources(sourcePath)
|
||||
srcMods <- sourceModules(srcs, importFromPaths, transpile)
|
||||
resMods <- resolveModules(srcMods, importFromPaths, transpile)
|
||||
} yield resMods
|
||||
|
||||
}
|
53
cli/src/main/scala/aqua/io/FileModuleId.scala
Normal file
53
cli/src/main/scala/aqua/io/FileModuleId.scala
Normal file
@ -0,0 +1,53 @@
|
||||
package aqua.io
|
||||
|
||||
import aqua.parser.lift.FileSpan
|
||||
import cats.data.EitherT
|
||||
import cats.effect.kernel.Concurrent
|
||||
import cats.syntax.applicative._
|
||||
|
||||
import java.nio.file.Path
|
||||
|
||||
case class FileModuleId(file: Path) {
|
||||
|
||||
def targetPath(src: Path, target: Path, ext: String): Path = {
|
||||
val aqua =
|
||||
target.toAbsolutePath
|
||||
.normalize()
|
||||
.resolve(src.toAbsolutePath.normalize().relativize(file.toAbsolutePath.normalize()))
|
||||
aqua.getParent.resolve(aqua.getFileName.toString.stripSuffix(".aqua") + s".$ext")
|
||||
}
|
||||
}
|
||||
|
||||
object FileModuleId {
|
||||
|
||||
private def findFirstF[F[_]: Concurrent](
|
||||
in: LazyList[Path],
|
||||
notFound: EitherT[F, AquaFileError, FileModuleId]
|
||||
): EitherT[F, AquaFileError, FileModuleId] =
|
||||
in.headOption.fold(notFound)(p =>
|
||||
EitherT(
|
||||
Concurrent[F].attempt(p.toFile.isFile.pure[F])
|
||||
)
|
||||
.leftMap[AquaFileError](FileSystemError)
|
||||
.recover({ case _ => false })
|
||||
.flatMap {
|
||||
case true =>
|
||||
EitherT(
|
||||
Concurrent[F].attempt(FileModuleId(p.toAbsolutePath.normalize()).pure[F])
|
||||
).leftMap[AquaFileError](FileSystemError)
|
||||
case false =>
|
||||
findFirstF(in.tail, notFound)
|
||||
}
|
||||
)
|
||||
|
||||
def resolve[F[_]: Concurrent](
|
||||
focus: FileSpan.Focus,
|
||||
src: Path,
|
||||
imports: LazyList[Path]
|
||||
): EitherT[F, AquaFileError, FileModuleId] =
|
||||
findFirstF(
|
||||
imports
|
||||
.map(_.resolve(src)),
|
||||
EitherT.leftT(FileNotFound(focus, src, imports))
|
||||
)
|
||||
}
|
3
linker/src/main/scala/aqua/linker/AquaModule.scala
Normal file
3
linker/src/main/scala/aqua/linker/AquaModule.scala
Normal file
@ -0,0 +1,3 @@
|
||||
package aqua.linker
|
||||
|
||||
case class AquaModule[I, E, T](id: I, dependsOn: Map[I, E], body: T => T)
|
59
linker/src/main/scala/aqua/linker/Linker.scala
Normal file
59
linker/src/main/scala/aqua/linker/Linker.scala
Normal file
@ -0,0 +1,59 @@
|
||||
package aqua.linker
|
||||
|
||||
import cats.data.{NonEmptyChain, Validated, ValidatedNec}
|
||||
import cats.kernel.{Monoid, Semigroup}
|
||||
import cats.syntax.monoid._
|
||||
|
||||
import scala.annotation.tailrec
|
||||
|
||||
object Linker {
|
||||
|
||||
@tailrec
|
||||
def iter[I, E, T: Semigroup](
|
||||
mods: List[AquaModule[I, E, T]],
|
||||
proc: Map[I, T => T],
|
||||
cycleError: List[AquaModule[I, E, T]] => E
|
||||
): Either[E, Map[I, T => T]] =
|
||||
mods match {
|
||||
case Nil => Right(proc)
|
||||
case _ =>
|
||||
val (canHandle, postpone) = mods.partition(_.dependsOn.keySet.forall(proc.contains))
|
||||
println("ITERATE, can handle: " + canHandle.map(_.id))
|
||||
println(s"proc = ${proc.keySet}")
|
||||
|
||||
if (canHandle.isEmpty && postpone.nonEmpty)
|
||||
Left(cycleError(postpone))
|
||||
else
|
||||
iter(
|
||||
postpone,
|
||||
// TODO can be done in parallel
|
||||
canHandle.foldLeft(proc) { case (acc, m) =>
|
||||
println(m.id + " dependsOn " + m.dependsOn.keySet)
|
||||
val deps: T => T =
|
||||
m.dependsOn.keySet.map(acc).foldLeft[T => T](identity) { case (fAcc, f) =>
|
||||
println("COMBINING ONE TIME ")
|
||||
t => {
|
||||
println(s"call combine ${t}")
|
||||
fAcc(t) |+| f(t)
|
||||
}
|
||||
}
|
||||
acc + (m.id -> m.body.compose(deps))
|
||||
},
|
||||
cycleError
|
||||
)
|
||||
}
|
||||
|
||||
def apply[I, E, T: Monoid](
|
||||
modules: Modules[I, E, T],
|
||||
cycleError: List[AquaModule[I, E, T]] => E
|
||||
): ValidatedNec[E, Map[I, T]] =
|
||||
if (modules.dependsOn.nonEmpty) Validated.invalid(modules.dependsOn.values.reduce(_ ++ _))
|
||||
else
|
||||
Validated.fromEither(
|
||||
iter(modules.loaded.values.toList, Map.empty[I, T => T], cycleError)
|
||||
.map(_.view.filterKeys(modules.exports).mapValues(_.apply(Monoid[T].empty)).toMap)
|
||||
.left
|
||||
.map(NonEmptyChain.one)
|
||||
)
|
||||
|
||||
}
|
26
linker/src/main/scala/aqua/linker/Modules.scala
Normal file
26
linker/src/main/scala/aqua/linker/Modules.scala
Normal file
@ -0,0 +1,26 @@
|
||||
package aqua.linker
|
||||
|
||||
import cats.data.NonEmptyChain
|
||||
import cats.syntax.option._
|
||||
|
||||
case class Modules[I, E, T](
|
||||
loaded: Map[I, AquaModule[I, E, T]] = Map.empty[I, AquaModule[I, E, T]],
|
||||
dependsOn: Map[I, NonEmptyChain[E]] = Map.empty[I, NonEmptyChain[E]],
|
||||
exports: Set[I] = Set.empty[I]
|
||||
) {
|
||||
|
||||
def add(m: AquaModule[I, E, T], export: Boolean = false): Modules[I, E, T] =
|
||||
if (loaded.contains(m.id)) this
|
||||
else
|
||||
copy(
|
||||
loaded = loaded + (m.id -> m),
|
||||
dependsOn = m.dependsOn.foldLeft(dependsOn - m.id) {
|
||||
case (deps, (mId, _)) if loaded.contains(mId) || mId == m.id => deps
|
||||
case (deps, (mId, err)) =>
|
||||
deps.updatedWith(mId)(_.fold(NonEmptyChain.one(err))(_.append(err)).some)
|
||||
},
|
||||
exports = if (export) exports + m.id else exports
|
||||
)
|
||||
|
||||
def isResolved: Boolean = dependsOn.isEmpty
|
||||
}
|
37
linker/src/test/scala/aqua/linker/LinkerSpec.scala
Normal file
37
linker/src/test/scala/aqua/linker/LinkerSpec.scala
Normal file
@ -0,0 +1,37 @@
|
||||
package aqua.linker
|
||||
|
||||
import cats.data.Validated
|
||||
import org.scalatest.flatspec.AnyFlatSpec
|
||||
import org.scalatest.matchers.should.Matchers
|
||||
|
||||
class LinkerSpec extends AnyFlatSpec with Matchers {
|
||||
|
||||
"linker" should "resolve dependencies" in {
|
||||
|
||||
val empty = Modules[String, String, String]()
|
||||
|
||||
val withMod1 =
|
||||
empty
|
||||
.add(
|
||||
AquaModule("mod1", Map("mod2" -> "unresolved mod2 in mod1"), _ ++ " | mod1"),
|
||||
export = true
|
||||
)
|
||||
withMod1.isResolved should be(false)
|
||||
|
||||
Linker[String, String, String](
|
||||
withMod1,
|
||||
cycle => cycle.map(_.id).mkString(" -> ")
|
||||
) should be(Validated.invalidNec("unresolved mod2 in mod1"))
|
||||
|
||||
val withMod2 =
|
||||
withMod1.add(AquaModule("mod2", Map.empty, _ ++ " | mod2"))
|
||||
|
||||
withMod2.isResolved should be(true)
|
||||
|
||||
Linker[String, String, String](
|
||||
withMod2,
|
||||
cycle => cycle.map(_.id + "?").mkString(" -> ")
|
||||
) should be(Validated.validNec(Map("mod1" -> " | mod2 | mod1")))
|
||||
}
|
||||
|
||||
}
|
@ -1,6 +1,7 @@
|
||||
package aqua.parser
|
||||
|
||||
import aqua.parser.expr._
|
||||
import aqua.parser.head.{HeadExpr, HeaderExpr, ImportExpr}
|
||||
import aqua.parser.lexer.Token._
|
||||
import aqua.parser.lift.LiftParser
|
||||
import cats.data.{Chain, NonEmptyChain, Validated, ValidatedNec}
|
||||
@ -8,7 +9,7 @@ import cats.free.Cofree
|
||||
import cats.parse.{Parser => P, Parser0 => P0}
|
||||
import cats.{Comonad, Eval}
|
||||
|
||||
case class Ast[F[_]](tree: Ast.Tree[F]) {
|
||||
case class Ast[F[_]](head: Ast.Head[F], tree: Ast.Tree[F]) {
|
||||
|
||||
def cata[T](folder: (Expr[F], Chain[T]) => Eval[T]): Eval[T] =
|
||||
Cofree.cata[Chain, Expr[F], T](tree)(folder)
|
||||
@ -16,17 +17,26 @@ case class Ast[F[_]](tree: Ast.Tree[F]) {
|
||||
|
||||
object Ast {
|
||||
type Tree[F[_]] = Cofree[Chain, Expr[F]]
|
||||
type Head[F[_]] = Cofree[Chain, HeaderExpr[F]]
|
||||
|
||||
def rootExprs: List[Expr.Companion] =
|
||||
def treeExprs: List[Expr.Companion] =
|
||||
ServiceExpr :: AliasExpr :: DataStructExpr :: FuncExpr :: Nil
|
||||
|
||||
def headExprs: List[HeaderExpr.Companion] =
|
||||
ImportExpr :: Nil
|
||||
|
||||
def parser[F[_]: LiftParser: Comonad](ps: Indent): P0[Ast[F]] =
|
||||
P.repSep0(
|
||||
P.oneOf(rootExprs.map(_.ast[F](ps))),
|
||||
((P.repSep0(P.oneOf(headExprs.map(_.ast[F])), ` \n+`) <* ` \n+`).? ~ P.repSep0(
|
||||
P.oneOf(treeExprs.map(_.ast[F](ps))),
|
||||
` \n+`
|
||||
).map(Chain.fromSeq)
|
||||
.surroundedBy(` \n+`.?)
|
||||
.map(ls => Ast(Cofree(RootExpr(), Eval.now(ls))))
|
||||
)).surroundedBy(` \n+`.?)
|
||||
.map {
|
||||
case (Some(head), tree) => Chain.fromSeq(head) -> Chain.fromSeq(tree)
|
||||
case (_, tree) => Chain.empty[Head[F]] -> Chain.fromSeq(tree)
|
||||
}
|
||||
.map { case (hs, ls) =>
|
||||
Ast(Cofree(HeadExpr(), Eval.now(hs)), Cofree(RootExpr(), Eval.now(ls)))
|
||||
}
|
||||
|
||||
def fromString[F[_]: LiftParser: Comonad](script: String): ValidatedNec[P.Error, Ast[F]] =
|
||||
Validated
|
||||
|
@ -12,19 +12,20 @@ trait Expr[F[_]]
|
||||
|
||||
object Expr {
|
||||
|
||||
def defer(companion: => Companion): Companion = new Companion {
|
||||
override def p[F[_]: LiftParser: Comonad]: P[Expr[F]] = companion.p[F]
|
||||
|
||||
override def ast[F[_]: LiftParser: Comonad](ps: Indent): P[Tree[F]] = companion.ast[F](ps)
|
||||
}
|
||||
|
||||
trait Companion {
|
||||
def p[F[_]: LiftParser: Comonad]: P[Expr[F]]
|
||||
|
||||
def ast[F[_]: LiftParser: Comonad](ps: Indent): P[Ast.Tree[F]]
|
||||
}
|
||||
|
||||
abstract class And(thenInline: List[Expr.Companion], orIndented: List[Expr.Companion]) extends Companion {
|
||||
def defer(companion: => Companion): Companion = new Companion {
|
||||
override def p[F[_]: LiftParser: Comonad]: P[Expr[F]] = companion.p[F]
|
||||
|
||||
override def ast[F[_]: LiftParser: Comonad](ps: Indent): P[Tree[F]] = companion.ast[F](ps)
|
||||
}
|
||||
|
||||
abstract class And(thenInline: List[Expr.Companion], orIndented: List[Expr.Companion])
|
||||
extends Companion {
|
||||
|
||||
override def ast[F[_]: LiftParser: Comonad](ps: Indent): P[Ast.Tree[F]] =
|
||||
(p[F] ~ ((` `.backtrack *> P
|
||||
|
@ -9,8 +9,12 @@ import cats.free.Cofree
|
||||
import cats.parse.Parser
|
||||
import cats.Comonad
|
||||
|
||||
case class FuncExpr[F[_]](name: Name[F], args: List[Arg[F]], ret: Option[DataTypeToken[F]], retValue: Option[Value[F]])
|
||||
extends Expr[F]
|
||||
case class FuncExpr[F[_]](
|
||||
name: Name[F],
|
||||
args: List[Arg[F]],
|
||||
ret: Option[DataTypeToken[F]],
|
||||
retValue: Option[Value[F]]
|
||||
) extends Expr[F]
|
||||
|
||||
object FuncExpr
|
||||
extends Expr.AndIndented(
|
||||
@ -26,8 +30,9 @@ object FuncExpr
|
||||
|
||||
override def p[F[_]: LiftParser: Comonad]: Parser[FuncExpr[F]] =
|
||||
((`func` *> ` ` *> Name.p[F]) ~ comma0(Arg.p)
|
||||
.between(`(`, `)`) ~ (` -> ` *> DataTypeToken.`datatypedef`).?).map { case ((name, args), ret) =>
|
||||
FuncExpr(name, args, ret, None)
|
||||
.between(`(`, `)`) ~ (` -> ` *> DataTypeToken.`datatypedef`).?).map {
|
||||
case ((name, args), ret) =>
|
||||
FuncExpr(name, args, ret, None)
|
||||
}
|
||||
|
||||
override def ast[F[_]: LiftParser: Comonad](ps: Indent): Parser[Tree[F]] =
|
||||
|
@ -3,5 +3,3 @@ package aqua.parser.expr
|
||||
import aqua.parser.Expr
|
||||
|
||||
case class RootExpr[F[_]]() extends Expr[F]
|
||||
|
||||
object RootExpr
|
||||
|
3
parser/src/main/scala/aqua/parser/head/HeadExpr.scala
Normal file
3
parser/src/main/scala/aqua/parser/head/HeadExpr.scala
Normal file
@ -0,0 +1,3 @@
|
||||
package aqua.parser.head
|
||||
|
||||
case class HeadExpr[F[_]]() extends HeaderExpr[F]
|
25
parser/src/main/scala/aqua/parser/head/HeaderExpr.scala
Normal file
25
parser/src/main/scala/aqua/parser/head/HeaderExpr.scala
Normal file
@ -0,0 +1,25 @@
|
||||
package aqua.parser.head
|
||||
|
||||
import aqua.parser.Ast
|
||||
import aqua.parser.lift.LiftParser
|
||||
import cats.{Comonad, Eval}
|
||||
import cats.data.Chain
|
||||
import cats.free.Cofree
|
||||
import cats.parse.{Parser => P}
|
||||
|
||||
trait HeaderExpr[F[_]]
|
||||
|
||||
object HeaderExpr {
|
||||
|
||||
trait Companion {
|
||||
def p[F[_]: LiftParser: Comonad]: P[HeaderExpr[F]]
|
||||
|
||||
def ast[F[_]: LiftParser: Comonad]: P[Ast.Head[F]]
|
||||
}
|
||||
|
||||
abstract class Leaf extends Companion {
|
||||
|
||||
override def ast[F[_]: LiftParser: Comonad]: P[Ast.Head[F]] =
|
||||
p[F].map(Cofree[Chain, HeaderExpr[F]](_, Eval.now(Chain.empty)))
|
||||
}
|
||||
}
|
15
parser/src/main/scala/aqua/parser/head/ImportExpr.scala
Normal file
15
parser/src/main/scala/aqua/parser/head/ImportExpr.scala
Normal file
@ -0,0 +1,15 @@
|
||||
package aqua.parser.head
|
||||
|
||||
import aqua.parser.lexer.Token._
|
||||
import aqua.parser.lexer.{Literal, Value}
|
||||
import aqua.parser.lift.LiftParser
|
||||
import cats.Comonad
|
||||
import cats.parse.Parser
|
||||
|
||||
case class ImportExpr[F[_]](filename: Literal[F]) extends HeaderExpr[F]
|
||||
|
||||
object ImportExpr extends HeaderExpr.Leaf {
|
||||
|
||||
override def p[F[_]: LiftParser: Comonad]: Parser[HeaderExpr[F]] =
|
||||
`import` *> ` ` *> Value.string[F].map(ImportExpr(_))
|
||||
}
|
@ -21,6 +21,9 @@ object Token {
|
||||
|
||||
val ` ` : P[String] = P.charsWhile(fSpaces)
|
||||
val `data`: P[Unit] = P.string("data")
|
||||
val `import`: P[Unit] = P.string("import")
|
||||
val `use`: P[Unit] = P.string("use")
|
||||
val `as`: P[Unit] = P.string("as")
|
||||
val `alias`: P[Unit] = P.string("alias")
|
||||
val `service`: P[Unit] = P.string("service")
|
||||
val `func`: P[Unit] = P.string("func")
|
||||
@ -37,8 +40,14 @@ object Token {
|
||||
val `par`: P[Unit] = P.string("par")
|
||||
val `:` : P[Unit] = P.char(':')
|
||||
val ` : ` : P[Unit] = P.char(':').surroundedBy(` `.?)
|
||||
val `name`: P[String] = (P.charIn(az) ~ P.charsWhile(anum_).?).map { case (c, s) ⇒ c.toString ++ s.getOrElse("") }
|
||||
val `Class`: P[String] = (P.charIn(AZ) ~ P.charsWhile(anum_).?).map { case (c, s) ⇒ c.toString ++ s.getOrElse("") }
|
||||
|
||||
val `name`: P[String] = (P.charIn(az) ~ P.charsWhile(anum_).?).map { case (c, s) ⇒
|
||||
c.toString ++ s.getOrElse("")
|
||||
}
|
||||
|
||||
val `Class`: P[String] = (P.charIn(AZ) ~ P.charsWhile(anum_).?).map { case (c, s) ⇒
|
||||
c.toString ++ s.getOrElse("")
|
||||
}
|
||||
val `\n` : P[Unit] = P.string("\n\r") | P.char('\n') | P.string("\r\n")
|
||||
val `--` : P[Unit] = ` `.?.with1 *> P.string("--") <* ` `.?
|
||||
val ` \n` : P[Unit] = (` `.?.void *> (`--` *> P.charsWhile(_ != '\n')).?.void).with1 *> `\n`
|
||||
|
46
parser/src/main/scala/aqua/parser/lift/FileSpan.scala
Normal file
46
parser/src/main/scala/aqua/parser/lift/FileSpan.scala
Normal file
@ -0,0 +1,46 @@
|
||||
package aqua.parser.lift
|
||||
|
||||
import cats.{Comonad, Eval}
|
||||
import cats.parse.{LocationMap, Parser => P}
|
||||
|
||||
import scala.language.implicitConversions
|
||||
|
||||
case class FileSpan(name: String, source: String, locationMap: Eval[LocationMap], span: Span) {
|
||||
|
||||
def focus(ctx: Int): Option[FileSpan.Focus] =
|
||||
span.focus(locationMap, ctx).map(FileSpan.Focus(name, locationMap, ctx, _))
|
||||
}
|
||||
|
||||
object FileSpan {
|
||||
|
||||
case class Focus(name: String, locationMap: Eval[LocationMap], ctx: Int, spanFocus: Span.Focus) {
|
||||
|
||||
def toConsoleStr(msg: String, onLeft: String, onRight: String = Console.RESET): String =
|
||||
s"$name:${spanFocus.line._1 + 1}:${spanFocus.column + 1}\n" + spanFocus.toConsoleStr(
|
||||
msg,
|
||||
onLeft,
|
||||
onRight
|
||||
)
|
||||
}
|
||||
|
||||
type F[T] = (FileSpan, T)
|
||||
|
||||
implicit object spanComonad extends Comonad[F] {
|
||||
override def extract[A](x: F[A]): A = x._2
|
||||
|
||||
override def coflatMap[A, B](fa: F[A])(f: F[A] ⇒ B): F[B] = fa.copy(_2 = f(fa))
|
||||
|
||||
override def map[A, B](fa: F[A])(f: A ⇒ B): F[B] = fa.copy(_2 = f(fa._2))
|
||||
}
|
||||
|
||||
def fileSpanLiftParser(name: String, source: String): LiftParser[F] = new LiftParser[F] {
|
||||
|
||||
val memoizedLocationMap = Eval.later(LocationMap(source)).memoize
|
||||
|
||||
override def lift[T](p: P[T]): P[F[T]] = {
|
||||
implicitly[LiftParser[Span.F]].lift(p).map { case (span, value) =>
|
||||
(FileSpan(name, source, memoizedLocationMap, span), value)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,35 +1,41 @@
|
||||
package aqua.parser.lift
|
||||
|
||||
import cats.Comonad
|
||||
import cats.{Comonad, Eval}
|
||||
import cats.parse.{LocationMap, Parser => P}
|
||||
|
||||
import scala.language.implicitConversions
|
||||
|
||||
case class Span(startIndex: Int, endIndex: Int) {
|
||||
|
||||
def focus(text: String, ctx: Int): Option[Span.Focus] = {
|
||||
val map = LocationMap(text)
|
||||
map.toLineCol(startIndex).flatMap {
|
||||
case (line, column) =>
|
||||
map
|
||||
.getLine(line)
|
||||
.map(l =>
|
||||
Span.Focus(
|
||||
(Math.max(0, line - ctx) until line).map(i => map.getLine(i).map(i -> _)).toList.flatten, {
|
||||
val (l1, l2) = l.splitAt(column)
|
||||
val (lc, l3) = l2.splitAt(endIndex - startIndex)
|
||||
(line, l1, lc, l3)
|
||||
},
|
||||
((line + 1) to (line + ctx)).map(i => map.getLine(i).map(i -> _)).toList.flatten
|
||||
)
|
||||
def focus(locationMap: Eval[LocationMap], ctx: Int): Option[Span.Focus] = {
|
||||
val map = locationMap.value
|
||||
map.toLineCol(startIndex).flatMap { case (line, column) =>
|
||||
map
|
||||
.getLine(line)
|
||||
.map(l =>
|
||||
Span.Focus(
|
||||
(Math
|
||||
.max(0, line - ctx) until line).map(i => map.getLine(i).map(i -> _)).toList.flatten, {
|
||||
val (l1, l2) = l.splitAt(column)
|
||||
val (lc, l3) = l2.splitAt(endIndex - startIndex)
|
||||
(line, l1, lc, l3)
|
||||
},
|
||||
((line + 1) to (line + ctx)).map(i => map.getLine(i).map(i -> _)).toList.flatten,
|
||||
column
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
object Span {
|
||||
|
||||
case class Focus(pre: List[(Int, String)], line: (Int, String, String, String), post: List[(Int, String)]) {
|
||||
case class Focus(
|
||||
pre: List[(Int, String)],
|
||||
line: (Int, String, String, String),
|
||||
post: List[(Int, String)],
|
||||
column: Int
|
||||
) {
|
||||
|
||||
private lazy val lastN = post.lastOption.map(_._1).getOrElse(line._1) + 1
|
||||
private lazy val lastNSize = lastN.toString.length
|
||||
@ -79,8 +85,8 @@ object Span {
|
||||
implicit object spanLiftParser extends LiftParser[F] {
|
||||
|
||||
override def lift[T](p: P[T]): P[F[T]] =
|
||||
(P.index.with1 ~ p ~ P.index).map {
|
||||
case ((s, v), e) ⇒ (Span(s, e), v)
|
||||
(P.index.with1 ~ p ~ P.index).map { case ((s, v), e) ⇒
|
||||
(Span(s, e), v)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1 +1 @@
|
||||
sbt.version=1.4.6
|
||||
sbt.version=1.5.0
|
||||
|
@ -1,2 +1 @@
|
||||
addSbtPlugin("ch.epfl.lamp" % "sbt-dotty" % "0.4.6")
|
||||
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.15.0")
|
||||
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.15.0")
|
||||
|
56
semantics/src/main/scala/aqua/semantics/CompilerState.scala
Normal file
56
semantics/src/main/scala/aqua/semantics/CompilerState.scala
Normal file
@ -0,0 +1,56 @@
|
||||
package aqua.semantics
|
||||
|
||||
import aqua.model.{EmptyModel, Model}
|
||||
import aqua.parser.lexer.Token
|
||||
import aqua.semantics.rules.abilities.AbilitiesState
|
||||
import aqua.semantics.rules.names.NamesState
|
||||
import aqua.semantics.rules.types.TypesState
|
||||
import cats.data.{Chain, State}
|
||||
import cats.kernel.Monoid
|
||||
import cats.syntax.monoid._
|
||||
|
||||
case class CompilerState[F[_]](
|
||||
errors: Chain[(Token[F], String)] = Chain.empty[(Token[F], String)],
|
||||
names: NamesState[F] = NamesState[F](),
|
||||
abilities: AbilitiesState[F] = AbilitiesState[F](),
|
||||
types: TypesState[F] = TypesState[F]()
|
||||
)
|
||||
|
||||
object CompilerState {
|
||||
type S[F[_]] = State[CompilerState[F], Model]
|
||||
|
||||
implicit def compilerStateMonoid[F[_]]: Monoid[S[F]] = new Monoid[S[F]] {
|
||||
override def empty: S[F] = State.pure(EmptyModel("compiler state monoid empty"))
|
||||
|
||||
override def combine(x: S[F], y: S[F]): S[F] = for {
|
||||
a <- x.get
|
||||
b <- y.get
|
||||
_ <- State.set(
|
||||
CompilerState[F](
|
||||
a.errors ++ b.errors,
|
||||
NamesState(
|
||||
Nil,
|
||||
a.names.rootArrows ++ b.names.rootArrows,
|
||||
a.names.definitions ++ b.names.definitions
|
||||
),
|
||||
AbilitiesState(
|
||||
Nil,
|
||||
a.abilities.services ++ b.abilities.services,
|
||||
a.abilities.rootServiceIds ++ b.abilities.rootServiceIds,
|
||||
definitions = a.abilities.definitions ++ b.abilities.definitions
|
||||
),
|
||||
TypesState(
|
||||
strict = a.types.strict ++ b.types.strict,
|
||||
definitions = a.types.definitions ++ b.types.definitions
|
||||
)
|
||||
)
|
||||
)
|
||||
am <- x
|
||||
ym <- y
|
||||
} yield {
|
||||
println(s"MONOID COMBINE $am $ym")
|
||||
am |+| ym
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -54,13 +54,6 @@ object Semantics {
|
||||
def transpile[F[_]](ast: Ast[F]): Free[Alg[F, *], Model] =
|
||||
ast.cata(folder[F, Alg[F, *]]).value
|
||||
|
||||
case class CompilerState[F[_]](
|
||||
errors: Chain[(Token[F], String)] = Chain.empty[(Token[F], String)],
|
||||
names: NamesState[F] = NamesState[F](),
|
||||
abilities: AbilitiesState[F] = AbilitiesState[F](),
|
||||
types: TypesState[F] = TypesState[F]()
|
||||
)
|
||||
|
||||
def interpret[F[_]](free: Free[Alg[F, *], Model]): State[CompilerState[F], Model] = {
|
||||
import monocle.macros.syntax.all._
|
||||
|
||||
@ -87,8 +80,11 @@ object Semantics {
|
||||
free.foldMap[State[CompilerState[F], *]](interpreter)
|
||||
}
|
||||
|
||||
def generateModel[F[_]](ast: Ast[F]): ValidatedNec[(Token[F], String), Model] =
|
||||
def astToState[F[_]](ast: Ast[F]): State[CompilerState[F], Model] =
|
||||
(transpile[F] _ andThen interpret[F])(ast)
|
||||
|
||||
def generateModel[F[_]](ast: Ast[F]): ValidatedNec[(Token[F], String), Model] =
|
||||
astToState[F](ast)
|
||||
.run(CompilerState[F]())
|
||||
.map { case (state, gen) =>
|
||||
NonEmptyChain
|
||||
|
@ -1,7 +1,7 @@
|
||||
package aqua.semantics.rules.abilities
|
||||
|
||||
import aqua.semantics.rules.{ReportError, StackInterpreter}
|
||||
import aqua.parser.lexer.{Name, Token, Value}
|
||||
import aqua.parser.lexer.{Ability, Name, Token, Value}
|
||||
import aqua.types.ArrowType
|
||||
import cats.data.{NonEmptyList, NonEmptyMap, State}
|
||||
import cats.~>
|
||||
@ -95,9 +95,19 @@ class AbilitiesInterpreter[F[_], X](implicit
|
||||
|
||||
case ds: DefineService[F] =>
|
||||
getService(ds.name.value).flatMap {
|
||||
case Some(_) => report(ds.name, "Service with this name was already defined").as(false)
|
||||
case Some(_) =>
|
||||
getState.map(_.definitions.get(ds.name.value).exists(_ == ds.name)).flatMap {
|
||||
case true => State.pure(false)
|
||||
case false => report(ds.name, "Service with this name was already defined").as(false)
|
||||
|
||||
}
|
||||
case None =>
|
||||
modify(s => s.copy(services = s.services.updated(ds.name.value, ds.arrows))).as(true)
|
||||
modify(s =>
|
||||
s.copy(
|
||||
services = s.services.updated(ds.name.value, ds.arrows),
|
||||
definitions = s.definitions.updated(ds.name.value, ds.name)
|
||||
)
|
||||
).as(true)
|
||||
}
|
||||
|
||||
}).asInstanceOf[State[X, A]]
|
||||
@ -106,7 +116,8 @@ class AbilitiesInterpreter[F[_], X](implicit
|
||||
case class AbilitiesState[F[_]](
|
||||
stack: List[AbilityStackFrame[F]] = Nil,
|
||||
services: Map[String, NonEmptyMap[String, ArrowType]] = Map.empty,
|
||||
rootServiceIds: Map[String, Value[F]] = Map.empty[String, Value[F]]
|
||||
rootServiceIds: Map[String, Value[F]] = Map.empty[String, Value[F]],
|
||||
definitions: Map[String, Ability[F]] = Map.empty[String, Ability[F]]
|
||||
) {
|
||||
|
||||
def purgeArrows: Option[(NonEmptyList[(Name[F], ArrowType)], AbilitiesState[F])] =
|
||||
|
@ -1,7 +1,7 @@
|
||||
package aqua.semantics.rules.names
|
||||
|
||||
import aqua.semantics.rules.{ReportError, StackInterpreter}
|
||||
import aqua.parser.lexer.Token
|
||||
import aqua.parser.lexer.{Name, Token}
|
||||
import aqua.types.{ArrowType, Type}
|
||||
import cats.data.State
|
||||
import cats.~>
|
||||
@ -49,7 +49,11 @@ class NamesInterpreter[F[_], X](implicit lens: Lens[X, NamesState[F]], error: Re
|
||||
|
||||
case dn: DefineName[F] =>
|
||||
readName(dn.name.value).flatMap {
|
||||
case Some(_) => report(dn.name, "This name was already defined in the scope").as(false)
|
||||
case Some(_) =>
|
||||
getState.map(_.definitions.get(dn.name.value).exists(_ == dn.name)).flatMap {
|
||||
case true => State.pure(false)
|
||||
case false => report(dn.name, "This name was already defined in the scope").as(false)
|
||||
}
|
||||
case None =>
|
||||
mapStackHead(
|
||||
report(dn.name, "Cannot define a variable in the root scope")
|
||||
@ -58,11 +62,21 @@ class NamesInterpreter[F[_], X](implicit lens: Lens[X, NamesState[F]], error: Re
|
||||
}
|
||||
case da: DefineArrow[F] =>
|
||||
readName(da.name.value).flatMap {
|
||||
case Some(_) => report(da.name, "This name was already defined in the scope").as(false)
|
||||
case Some(_) =>
|
||||
getState.map(_.definitions.get(da.name.value).exists(_ == da.name)).flatMap {
|
||||
case true => State.pure(false)
|
||||
case false => report(da.name, "This arrow was already defined in the scope").as(false)
|
||||
}
|
||||
|
||||
case None =>
|
||||
mapStackHead(
|
||||
if (da.isRoot)
|
||||
modify(st => st.copy(rootArrows = st.rootArrows.updated(da.name.value, da.gen)))
|
||||
modify(st =>
|
||||
st.copy(
|
||||
rootArrows = st.rootArrows.updated(da.name.value, da.gen),
|
||||
definitions = st.definitions.updated(da.name.value, da.name)
|
||||
)
|
||||
)
|
||||
.as(true)
|
||||
else
|
||||
report(da.name, "Cannot define a variable in the root scope")
|
||||
@ -78,7 +92,8 @@ class NamesInterpreter[F[_], X](implicit lens: Lens[X, NamesState[F]], error: Re
|
||||
|
||||
case class NamesState[F[_]](
|
||||
stack: List[NamesFrame[F]] = Nil,
|
||||
rootArrows: Map[String, ArrowType] = Map.empty
|
||||
rootArrows: Map[String, ArrowType] = Map.empty,
|
||||
definitions: Map[String, Name[F]] = Map.empty[String, Name[F]]
|
||||
) {
|
||||
|
||||
def allNames: LazyList[String] =
|
||||
|
@ -75,22 +75,31 @@ class TypesInterpreter[F[_], X](implicit lens: Lens[X, TypesState[F]], error: Re
|
||||
}
|
||||
|
||||
case ddt: DefineDataType[F] =>
|
||||
getState.map(_.isDefined(ddt.name.value)).flatMap {
|
||||
case true => report(ddt.name, s"Type `${ddt.name.value}` was already defined").as(false)
|
||||
case false =>
|
||||
getState.map(_.definitions.get(ddt.name.value)).flatMap {
|
||||
case Some(n) if n == ddt.name => State.pure(false)
|
||||
case Some(_) =>
|
||||
report(ddt.name, s"Type `${ddt.name.value}` was already defined").as(false)
|
||||
case None =>
|
||||
modify(st =>
|
||||
st.copy(strict =
|
||||
st.strict.updated(ddt.name.value, ProductType(ddt.name.value, ddt.fields))
|
||||
st.copy(
|
||||
strict = st.strict.updated(ddt.name.value, ProductType(ddt.name.value, ddt.fields)),
|
||||
definitions = st.definitions.updated(ddt.name.value, ddt.name)
|
||||
)
|
||||
)
|
||||
.as(true)
|
||||
}
|
||||
|
||||
case da: DefineAlias[F] =>
|
||||
getState.map(_.isDefined(da.name.value)).flatMap {
|
||||
case true => report(da.name, s"Type `${da.name.value}` was already defined").as(false)
|
||||
case false =>
|
||||
modify(st => st.copy(strict = st.strict.updated(da.name.value, da.target))).as(true)
|
||||
getState.map(_.definitions.get(da.name.value)).flatMap {
|
||||
case Some(n) if n == da.name => State.pure(false)
|
||||
case Some(_) => report(da.name, s"Type `${da.name.value}` was already defined").as(false)
|
||||
case None =>
|
||||
modify(st =>
|
||||
st.copy(
|
||||
strict = st.strict.updated(da.name.value, da.target),
|
||||
definitions = st.definitions.updated(da.name.value, da.name)
|
||||
)
|
||||
).as(true)
|
||||
}
|
||||
|
||||
case rl: ResolveLambda[F] =>
|
||||
@ -117,7 +126,8 @@ class TypesInterpreter[F[_], X](implicit lens: Lens[X, TypesState[F]], error: Re
|
||||
|
||||
case class TypesState[F[_]](
|
||||
fields: Map[String, (Name[F], Type)] = Map.empty[String, (Name[F], Type)],
|
||||
strict: Map[String, Type] = Map.empty[String, Type]
|
||||
strict: Map[String, Type] = Map.empty[String, Type],
|
||||
definitions: Map[String, CustomTypeToken[F]] = Map.empty[String, CustomTypeToken[F]]
|
||||
) {
|
||||
def isDefined(t: String): Boolean = strict.contains(t)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user