Merge branch 'main' into feat/refactor-exports-LNG-289

This commit is contained in:
InversionSpaces 2024-01-24 10:42:58 +00:00
commit a7d21b8a21
155 changed files with 8495 additions and 1530 deletions

View File

@ -1,3 +1,3 @@
{
".": "0.13.3"
".": "0.13.4"
}

View File

@ -29,4 +29,4 @@ jobs:
apps: sbt
- name: Run tests
run: sbt test
run: env JAVA_OPTS="-Xmx4G" sbt test

View File

@ -1,5 +1,20 @@
# Changelog
## [0.13.4](https://github.com/fluencelabs/aqua/compare/aqua-v0.13.3...aqua-v0.13.4) (2024-01-11)
### Features
* **compiler:** `for ... rec` [LNG-307] ([#1026](https://github.com/fluencelabs/aqua/issues/1026)) ([ae32f80](https://github.com/fluencelabs/aqua/commit/ae32f8027729bfd463cddc57f857c307e1e3c709))
* **compiler:** Enhance message of type error [LNG-313] ([#1033](https://github.com/fluencelabs/aqua/issues/1033)) ([d5cd77b](https://github.com/fluencelabs/aqua/commit/d5cd77bb865433fdff46fefb48875bf8f5e585dc))
### Bug Fixes
* **compiler:** Add outside context to closures [LNG-317] ([#1038](https://github.com/fluencelabs/aqua/issues/1038)) ([85f3ecd](https://github.com/fluencelabs/aqua/commit/85f3ecdf3985c8bd3a4c68fb827968b79516f9b3))
* **compiler:** Passing closures with abilities [LNG-314] ([#1035](https://github.com/fluencelabs/aqua/issues/1035)) ([5241f52](https://github.com/fluencelabs/aqua/commit/5241f522d8bc58649f4048aada034e3cbe320eb7))
* **compiler:** Type check arrow calls on services and abilities [LNG-315] ([#1037](https://github.com/fluencelabs/aqua/issues/1037)) ([d46ee03](https://github.com/fluencelabs/aqua/commit/d46ee0347fee94055a6690a4d4b8d0e1cf29430c))
## [0.13.3](https://github.com/fluencelabs/aqua/compare/aqua-v0.13.2...aqua-v0.13.3) (2023-12-22)

View File

@ -1,6 +1,6 @@
{
"name": "@fluencelabs/aqua-api",
"version": "0.13.3",
"version": "0.13.4",
"description": "Aqua API",
"type": "module",
"main": "index.js",
@ -26,7 +26,7 @@
},
"homepage": "https://github.com/fluencelabs/aqua#readme",
"devDependencies": {
"@fluencelabs/interfaces": "0.9.0",
"@fluencelabs/interfaces": "0.10.0",
"prettier": "3.1.1"
}
}

View File

@ -19,8 +19,6 @@ import aqua.logging.{LogFormatter, LogLevels}
import aqua.model.AquaContext
import aqua.model.transform.{Transform, TransformConfig}
import aqua.parser.lexer.{LiteralToken, Token}
import aqua.parser.lift.FileSpan.F
import aqua.parser.lift.{FileSpan, Span}
import aqua.parser.{ArrowReturnError, BlockIndentError, LexerError, ParserError}
import aqua.raw.ops.Call
import aqua.raw.ops.CallArrowRawTag

View File

@ -6,7 +6,6 @@ import aqua.compiler.AquaCompiled
import aqua.files.FileModuleId
import cats.data.Chain
import cats.data.Validated.{Invalid, Valid}
import cats.effect.{IO, IOApp}
import fs2.io.file.{Files, Path}
import fs2.{Stream, text}
@ -14,14 +13,16 @@ import fs2.{Stream, text}
object Test extends IOApp.Simple {
override def run: IO[Unit] = {
APICompilation
.compilePath(
"./aqua-src/antithesis.aqua",
Imports.fromMap(Map("/" -> Map("" -> List("./aqua")))),
AquaAPIConfig(targetType = TypeScriptType),
TypeScriptBackend(false, "IFluenceClient$$")
)
.flatMap { res =>
).timed
.flatMap { case (duration, res) =>
println("Compilation time: " + duration.toMillis)
val (warnings, result) = res.value.run
IO.delay {

View File

@ -16,7 +16,7 @@ import aqua.parser.expr.AbilityExpr.p
import aqua.parser.lexer.{LiteralToken, Token}
import aqua.parser.lift.FileSpan.F
import aqua.parser.lift.{FileSpan, Span}
import aqua.parser.{ArrowReturnError, BlockIndentError, LexerError, ParserError}
import aqua.parser.{ArrowReturnError, LexerError, ParserError}
import aqua.raw.ConstantRaw
import aqua.raw.ops.Call
import aqua.raw.value.ValueRaw

View File

@ -2,7 +2,6 @@ package aqua.run
import aqua.backend.air.FuncAirGen
import aqua.definitions.{FunctionDef, TypeDefinition}
import aqua.io.OutputPrinter
import aqua.model.transform.{Transform, TransformConfig}
import aqua.model.{FuncArrow, ValueModel, VarModel}
import aqua.parser.lexer.CallArrowToken
@ -10,6 +9,7 @@ import aqua.parser.lift.Span
import aqua.raw.ops.{Call, CallArrowRawTag, SeqTag}
import aqua.raw.value.{LiteralRaw, ValueRaw, VarRaw}
import aqua.types.*
import cats.data.Validated.{invalid, invalidNec, invalidNel, validNec, validNel}
import cats.data.{NonEmptyList, Validated, ValidatedNec}
import cats.effect.kernel.Async
@ -18,8 +18,7 @@ import cats.syntax.flatMap.*
import cats.syntax.partialOrder.*
import cats.syntax.show.*
import cats.syntax.traverse.*
import cats.{~>, Id}
import cats.{Id, ~>}
import scala.collection.immutable.SortedMap
import scala.concurrent.ExecutionContext

View File

@ -1,3 +1,33 @@
func arr() -> string:
n = "str"
<- n
aqua M
export returnSrvAsAbility
ability MyAb:
call() -> string
service MySrv("default-id"):
call() -> string
func mySrvDefault() -> MyAb:
<- MySrv
func mySrvResolved() -> MyAb:
MySrv "resolved-id"
<- MySrv
func mySrvThird() -> MyAb:
MySrv "third-id"
<- MySrv
func useMyAb{MyAb}() -> string:
<- MyAb.call()
func returnSrvAsAbility() -> []string:
result: *string
MySrvDefault <- mySrvDefault()
MySrvResolved <- mySrvResolved()
MySrvThird <- mySrvThird()
result <- useMyAb{MySrvDefault}()
result <- useMyAb{MySrvResolved}()
result <- useMyAb{MySrvThird}()
<- result

View File

@ -120,42 +120,77 @@ object Air {
case class Comment(comment: String, air: Air) extends Air(Keyword.NA)
private def show(depth: Int, air: Air): String = {
def showNext(a: Air) = show(depth + 1, a)
private def showInternal(space: String, sb: StringBuilder, air: Air): Unit = {
val space = " " * depth
def showNext(a: Air): Unit = showInternal(space + " ", sb, a)
air match {
case Air.Comment(c, a) =>
space + "; " + c.replace("\n", "\n" + space + "; ") + "\n" +
show(depth, a)
case _ =>
s"$space(${air.keyword.value}" +
(air match {
case Air.Null ""
case Air.Never ""
case Air.Next(label) s" $label"
case Air.New(item, inst) s" ${item.show}\n${showNext(inst)}$space"
case Air.Fold(iter, label, inst, lastInst)
val l = show(depth + 1, lastInst)
s" ${iter.show} $label\n${showNext(inst)}$l$space"
case Air.Match(left, right, inst)
s" ${left.show} ${right.show}\n${showNext(inst)}$space"
case Air.Mismatch(left, right, inst)
s" ${left.show} ${right.show}\n${showNext(inst)}$space"
case Air.Par(l, r) s"\n${showNext(l)}${showNext(r)}$space"
case Air.Seq(l, r) s"\n${showNext(l)}${showNext(r)}$space"
case Air.Xor(l, r) s"\n${showNext(l)}${showNext(r)}$space"
case Air.Call(triplet, args, res)
s" ${triplet.show} [${args.map(_.show).mkString(" ")}]${res.fold("")(" " + _)}"
case Air.Ap(operand, result) s" ${operand.show} $result"
case Air.ApStreamMap(key, operand, result) s" (${key.show} ${operand.show}) $result"
case Air.Fail(operand) => s" ${operand.show}"
case Air.Canon(operand, peerId, result) s" ${peerId.show} ${operand.show} $result"
case Air.Comment(_, _) => ";; Should not be displayed"
}) + ")\n"
}
sb.append(space)
.append("; ")
.append(c.replace("\n", "\n" + space + "; "))
.append("\n")
showInternal(space, sb, a)
case _ =>
sb.append(s"$space(${air.keyword.value}")
(air match {
case Air.Null
case Air.Never
case Air.Next(label) sb.append(s" $label")
case Air.New(item, inst)
sb.append(s" ${item.show}\n")
showNext(inst)
sb.append(space)
case Air.Fold(iter, label, inst, lastInst)
sb.append(" ").append(s" ${iter.show} $label\n")
showNext(inst)
showNext(lastInst)
sb.append(space)
case Air.Match(left, right, inst)
sb.append(s" ${left.show} ${right.show}\n")
showNext(inst)
sb.append(space)
case Air.Mismatch(left, right, inst)
sb.append(s" ${left.show} ${right.show}\n")
showNext(inst)
sb.append(space)
case Air.Par(l, r)
sb.append("\n")
showNext(l)
showNext(r)
sb.append(space)
case Air.Seq(l, r)
sb.append("\n")
showNext(l)
showNext(r)
sb.append(space)
case Air.Xor(l, r)
sb.append("\n")
showNext(l)
showNext(r)
sb.append(space)
case Air.Call(triplet, args, res)
sb.append(s" ${triplet.show} [${args.map(_.show).mkString(" ")}]${res.fold("")(" " + _)}")
case Air.Ap(operand, result)
sb.append(s" ${operand.show} $result")
case Air.ApStreamMap(key, operand, result)
sb.append(s" (${key.show} ${operand.show}) $result")
case Air.Fail(operand) => sb.append(s" ${operand.show}")
case Air.Canon(operand, peerId, result)
sb.append(s" ${peerId.show} ${operand.show} $result")
case Air.Comment(_, _) => ";; Should not be displayed"
})
sb.append(")\n")
}
}
private def show(depth: Int, air: Air): String = {
val sb = StringBuilder()
val space = " " * depth
showInternal(space, sb, air)
sb.result()
}
implicit val s: Show[Air] = Show.show(show(0, _))

View File

@ -1,6 +1,6 @@
import BundleJS.*
val aquaVersion = "0.13.3"
val aquaVersion = "0.13.4"
val scalaV = "3.3.1"
val catsV = "2.10.0"
@ -10,7 +10,7 @@ val scalaTestV = "3.2.17"
val scalaTestScalaCheckV = "3.2.17.0"
val sourcecodeV = "0.3.0"
// Snapshot is used to get latest fixes
val fs2V = "3.9.3-37-8badc91-SNAPSHOT"
val fs2V = "3.10-365636d"
val catsEffectV = "3.6-1f95fd7"
val declineV = "2.3.0"
val circeVersion = "0.14.2"
@ -188,7 +188,7 @@ lazy val inline = crossProject(JVMPlatform, JSPlatform)
.crossType(CrossType.Pure)
.in(file("model/inline"))
.settings(commons)
.dependsOn(raw, model)
.dependsOn(raw, model, mangler)
lazy val transform = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
@ -207,7 +207,7 @@ lazy val semantics = crossProject(JVMPlatform, JSPlatform)
"dev.optics" %%% "monocle-macro" % monocleV
)
)
.dependsOn(raw, parser, errors)
.dependsOn(raw, parser, errors, mangler)
lazy val compiler = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
@ -253,6 +253,17 @@ lazy val logging = crossProject(JVMPlatform, JSPlatform)
)
)
lazy val mangler = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("utils/mangler"))
.settings(commons)
.settings(
libraryDependencies ++= Seq(
"org.typelevel" %%% "cats-core" % catsV
)
)
lazy val constants = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)

View File

@ -1,30 +1,20 @@
package aqua.compiler
import aqua.backend.Backend
import aqua.compiler.AquaError.{ParserError as AquaParserError, *}
import aqua.linker.Linker.link
import aqua.linker.{AquaModule, Linker, Modules}
import aqua.model.AquaContext
import aqua.parser.lift.{LiftParser, Span}
import aqua.parser.{Ast, ParserError}
import aqua.raw.RawPart.Parts
import aqua.raw.{RawContext, RawPart}
import aqua.res.AquaRes
import aqua.semantics.header.{HeaderHandler, HeaderSem, Picker}
import aqua.semantics.{CompilerState, Semantics}
import aqua.semantics.{SemanticError, SemanticWarning}
import aqua.semantics.header.{HeaderHandler, Picker}
import aqua.semantics.{SemanticError, Semantics}
import cats.arrow.FunctionK
import cats.data.*
import cats.data.Validated.{Invalid, Valid, validNec}
import cats.parse.Parser0
import cats.syntax.applicative.*
import cats.syntax.either.*
import cats.syntax.flatMap.*
import cats.syntax.functor.*
import cats.syntax.monoid.*
import cats.syntax.semigroup.*
import cats.syntax.traverse.*
import cats.{Comonad, Functor, Monad, Monoid, Order, ~>}
import cats.{Comonad, Monad, Monoid, Order, ~>}
import scribe.Logging
class AquaCompiler[F[_]: Monad, E, I: Order, S[_]: Comonad, C: Monoid: Picker](
@ -33,43 +23,31 @@ class AquaCompiler[F[_]: Monad, E, I: Order, S[_]: Comonad, C: Monoid: Picker](
) extends Logging {
type Err = AquaError[I, E, S]
type Ctx = NonEmptyMap[I, C]
type CompileWarns = [A] =>> CompileWarnings[S][A]
type CompileRes = [A] =>> CompileResult[I, E, S][A]
type CompiledCtx = CompileRes[Ctx]
type CompiledCtxT = CompiledCtx => CompiledCtx
// Transpilation function for module
// (Imports contexts => Compilation result)
type TP = Map[String, C] => CompileRes[C]
private def linkModules(
modules: Modules[I, Err, CompiledCtxT],
cycleError: Linker.DepCycle[AquaModule[I, Err, CompiledCtxT]] => Err
): CompileRes[Map[I, C]] = {
logger.trace("linking modules...")
// By default, provide an empty context for this module's id
val empty: I => CompiledCtx = i => NonEmptyMap.one(i, Monoid[C].empty).pure[CompileRes]
for {
linked <- Linker
.link(modules, cycleError, empty)
.toEither
.toEitherT[CompileWarns]
res <- EitherT(
linked.toList.traverse { case (id, ctx) =>
ctx
.map(
/**
* NOTE: This should be safe
* as result for id should contain itself
*/
_.apply(id).map(id -> _).get
)
.toValidated
}.map(_.sequence.toEither)
)
} yield res.toMap
}
private def transpile(body: Ast[S]): TP =
imports =>
for {
// Process header, get initial context
headerSem <- headerHandler
.sem(imports, body.head)
.toCompileRes
// Analyze the body, with prepared initial context
_ = logger.trace("semantic processing...")
processed <- semantics
.process(body, headerSem.initCtx)
.toCompileRes
// Handle exports, declares - finalize the resulting context
rc <- headerSem
.finCtx(processed)
.toCompileRes
} yield rc
def compileRaw(
sources: AquaSources[F, E, I],
@ -77,48 +55,18 @@ class AquaCompiler[F[_]: Monad, E, I: Order, S[_]: Comonad, C: Monoid: Picker](
): F[CompileRes[Map[I, C]]] = {
logger.trace("starting resolving sources...")
new AquaParser[F, E, I, S](sources, parser)
.resolve[CompiledCtx](mod =>
context =>
for {
// Context with prepared imports
ctx <- context
imports = mod.imports.flatMap { case (fn, id) =>
ctx.apply(id).map(fn -> _)
}
header = mod.body.head
headerSem <- headerHandler
.sem(imports, header)
.toCompileRes
// Analyze the body, with prepared initial context
_ = logger.trace("semantic processing...")
processed <- semantics
.process(
mod.body,
headerSem.initCtx
)
.toCompileRes
// Handle exports, declares - finalize the resulting context
rc <- headerSem
.finCtx(processed)
.toCompileRes
/**
* Here we build a map of contexts while processing modules.
* Should not linker provide this info inside this process?
* Building this map complicates things a lot.
*/
} yield NonEmptyMap.one(mod.id, rc)
)
.value
.map(resolved =>
for {
modules <- resolved.toEitherT[CompileWarns]
linked <- linkModules(
modules,
cycle => CycleError(cycle.map(_.id))
)
} yield linked
)
val parsing = new AquaParser(sources, parser)
parsing.resolve.value.map(resolution =>
for {
// Lift resolution to CompileRes
modules <- resolution.toEitherT[CompileWarns]
// Generate transpilation functions for each module
transpiled = modules.map(body => transpile(body))
// Link modules
linked <- Linker.link(transpiled, CycleError.apply)
} yield linked
)
}
private val warningsK: semantics.Warnings ~> CompileWarns =

View File

@ -5,21 +5,22 @@ import aqua.linker.{AquaModule, Modules}
import aqua.parser.head.{FilenameExpr, ImportExpr}
import aqua.parser.lift.{LiftParser, Span}
import aqua.parser.{Ast, ParserError}
import aqua.syntax.eithert.fromValidatedF
import cats.data.{Chain, EitherNec, EitherT, NonEmptyChain, Validated, ValidatedNec}
import cats.parse.Parser0
import cats.syntax.either.*
import cats.syntax.applicative.*
import cats.syntax.flatMap.*
import cats.syntax.functor.*
import cats.syntax.monad.*
import cats.syntax.foldable.*
import cats.syntax.traverse.*
import cats.syntax.validated.*
import cats.data.Chain.*
import cats.data.Validated.*
import cats.data.{Chain, EitherNec, EitherT, NonEmptyChain, Validated, ValidatedNec}
import cats.parse.Parser0
import cats.syntax.applicative.*
import cats.syntax.either.*
import cats.syntax.flatMap.*
import cats.syntax.foldable.*
import cats.syntax.functor.*
import cats.syntax.monad.*
import cats.syntax.parallel.*
import cats.syntax.traverse.*
import cats.{~>, Comonad, Monad}
import cats.syntax.validated.*
import cats.{Comonad, Monad, ~>}
import scribe.Logging
// TODO: add tests
@ -33,108 +34,82 @@ class AquaParser[F[_]: Monad, E, I, S[_]: Comonad](
private type FE[A] = EitherT[F, NonEmptyChain[Err], A]
// Parse all the source files
private def parseSources: F[ValidatedNec[Err, Chain[(I, Body)]]] =
sources.sources.map(
_.leftMap(_.map(SourcesError.apply)).andThen(
_.traverse { case (i, s) =>
parser(i)(s).bimap(
_.map(AquaParserError.apply),
ast => i -> ast
)
}
)
// Parse one source (text)
private def parse(id: I, src: String): EitherNec[Err, (I, Body)] =
parser(id)(src).toEither.bimap(
_.map(AquaParserError.apply),
ast => id -> ast
)
// Parse all the source files
private def parseSources: FE[Chain[(I, Body)]] =
for {
srcs <- EitherT
.fromValidatedF(sources.sources)
.leftMap(_.map(SourcesError.apply))
parsed <- srcs
.parTraverse(parse.tupled)
.toEitherT
} yield parsed
// Load one module (parse, resolve imports)
private def loadModule(id: I): FE[AquaModule[I, Err, Body]] =
for {
src <- EitherT
.fromValidatedF(sources.load(id))
.leftMap(_.map(SourcesError.apply))
parsed <- parse(id, src).toEitherT
(id, ast) = parsed
resolved <- resolveImports(id, ast)
} yield resolved
// Resolve imports (not parse, just resolve) of the given file
private def resolveImports(id: I, ast: Body): F[ValidatedNec[Err, AquaModule[I, Err, Body]]] =
ast.collectHead { case fe: FilenameExpr[S] =>
private def resolveImports(id: I, ast: Body): FE[AquaModule[I, Err, Body]] =
ast.head.collect { case fe: FilenameExpr[S] =>
fe.fileValue -> fe.token
}.value.traverse { case (filename, token) =>
sources
.resolveImport(id, filename)
.map(
_.bimap(
_.map(ResolveImportsError(id, token, _): Err),
importId => importId -> (filename, ImportError(token): Err)
)
}.parTraverse { case (filename, token) =>
EitherT
.fromValidatedF(
sources.resolveImport(id, filename)
)
}.map(_.sequence.map { collected =>
AquaModule[I, Err, Body](
id,
.bimap(
_.map(ResolveImportsError(id, token, _): Err),
importId => importId -> (filename, ImportError(token): Err)
)
}.map { collected =>
AquaModule(
id = id,
// How filenames correspond to the resolved IDs
collected.map { case (i, (fn, _)) =>
imports = collected.map { case (i, (fn, _)) =>
fn -> i
}.toList.toMap[String, I],
}.toList.toMap,
// Resolved IDs to errors that point to the import in source code
collected.map { case (i, (_, err)) =>
dependsOn = collected.map { case (i, (_, err)) =>
i -> err
}.toList.toMap[I, Err],
ast
}.toList.toMap,
body = ast
)
})
// Parse sources, convert to modules
private def sourceModules: F[ValidatedNec[Err, Modules[I, Err, Body]]] =
parseSources.flatMap {
case Validated.Valid(srcs) =>
srcs.traverse { case (id, ast) =>
resolveImports(id, ast)
}.map(_.sequence)
case Validated.Invalid(errs) =>
errs.invalid.pure[F]
}.map(
_.map(
_.foldLeft(Modules[I, Err, Body]())(
_.add(_, toExport = true)
)
)
)
private def loadModule(imp: I): F[ValidatedNec[Err, AquaModule[I, Err, Body]]] =
sources
.load(imp)
.map(_.leftMap(_.map(SourcesError.apply)).andThen { src =>
parser(imp)(src).leftMap(_.map(AquaParserError.apply))
})
.flatMap {
case Validated.Valid(ast) =>
resolveImports(imp, ast)
case Validated.Invalid(errs) =>
errs.invalid.pure[F]
}
private def resolveModules(
modules: Modules[I, Err, Body]
): F[ValidatedNec[Err, Modules[I, Err, Ast[S]]]] =
modules.dependsOn.toList.traverse { case (moduleId, unresolvedErrors) =>
loadModule(moduleId).map(_.leftMap(_ ++ unresolvedErrors))
}.map(
_.sequence.map(
_.foldLeft(modules)(_ add _)
)
).flatMap {
case Validated.Valid(ms) if ms.isResolved =>
ms.validNec.pure[F]
case Validated.Valid(ms) =>
resolveModules(ms)
case err =>
err.pure[F]
}
private def resolveSources: FE[Modules[I, Err, Ast[S]]] =
// Load modules (parse, resolve imports) of all the source files
private lazy val loadModules: FE[Modules[I, Err, Body]] =
for {
ms <- EitherT(
sourceModules.map(_.toEither)
)
res <- EitherT(
resolveModules(ms).map(_.toEither)
)
} yield res
srcs <- parseSources
modules <- srcs.parTraverse(resolveImports.tupled)
} yield Modules.from(modules)
def resolve[T](
transpile: AquaModule[I, Err, Body] => T => T
): FE[Modules[I, Err, T => T]] =
resolveSources.map(_.mapModuleToBody(transpile))
// Resolve modules (load all the dependencies)
private def resolveModules(
modules: Modules[I, Err, Body]
): FE[Modules[I, Err, Ast[S]]] =
modules.iterateUntilM(ms =>
// Load all modules that are dependencies of the current modules
ms.dependsOn.toList.parTraverse { case (moduleId, unresolvedErrors) =>
loadModule(moduleId).leftMap(_ ++ unresolvedErrors)
}.map(ms.addAll) // Add all loaded modules to the current modules
)(_.isResolved)
lazy val resolve: FE[Modules[I, Err, Body]] =
loadModules >>= resolveModules
}

View File

@ -1,33 +1,22 @@
package aqua.compiler
import aqua.backend.Backend
import aqua.compiler.AquaError.*
import aqua.backend.{AirFunction, Backend}
import aqua.linker.{AquaModule, Linker, Modules}
import aqua.model.AquaContext
import aqua.parser.lift.{LiftParser, Span}
import aqua.parser.{Ast, ParserError}
import aqua.raw.RawPart.Parts
import aqua.raw.{RawContext, RawPart}
import aqua.res.AquaRes
import aqua.raw.RawContext
import aqua.semantics.RawSemantics
import aqua.semantics.header.{HeaderHandler, HeaderSem}
import aqua.semantics.{CompilerState, RawSemantics, Semantics}
import cats.data.*
import cats.data.Validated.{invalid, validNec, Invalid, Valid}
import cats.parse.Parser0
import cats.syntax.applicative.*
import cats.syntax.flatMap.*
import cats.syntax.foldable.*
import cats.syntax.functor.*
import cats.syntax.monoid.*
import cats.syntax.semigroup.*
import cats.syntax.traverse.*
import cats.syntax.either.*
import cats.{~>, Comonad, Monad, Monoid, Order}
import cats.syntax.flatMap.*
import cats.syntax.functor.*
import cats.syntax.traverse.*
import cats.{Comonad, Monad, Monoid, Order}
import scribe.Logging
import scala.collection.MapView
object CompilerAPI extends Logging {
private def toAquaProcessed[I: Order, E, S[_]: Comonad](

View File

@ -89,7 +89,7 @@ class AquaCompilerSpec extends AnyFlatSpec with Matchers with Inside {
val src = Map(
"index.aqua" ->
"""module Foo declares X
"""aqua Foo declares X
|
|export foo, foo2 as foo_two, X
|
@ -135,7 +135,11 @@ class AquaCompilerSpec extends AnyFlatSpec with Matchers with Inside {
it should "create right topology" in {
val src = Map(
"index.aqua" ->
"""service Op("op"):
"""aqua Test
|
|export exec
|
|service Op("op"):
| identity(s: string) -> string
|
|func exec(peers: []string) -> []string:
@ -224,7 +228,11 @@ class AquaCompilerSpec extends AnyFlatSpec with Matchers with Inside {
it should "not generate hop back with empty response" in {
val src = Map(
"index.aqua" ->
"""service Op("op"):
"""aqua HopBackTest
|
|export exec
|
|service Op("op"):
| call(s: string)
|
|func exec(peers: []string):
@ -288,7 +296,7 @@ class AquaCompilerSpec extends AnyFlatSpec with Matchers with Inside {
val src = Map(
"index.aqua" ->
"""module Import
"""aqua Import
|import foobar from "export2.aqua"
|
|use foo as f from "export2.aqua" as Exp
@ -307,7 +315,7 @@ class AquaCompilerSpec extends AnyFlatSpec with Matchers with Inside {
)
val imports = Map(
"export2.aqua" ->
"""module Export declares foobar, foo
"""aqua Export declares foobar, foo
|
|func bar() -> string:
| <- " I am MyFooBar bar"
@ -323,7 +331,7 @@ class AquaCompilerSpec extends AnyFlatSpec with Matchers with Inside {
|
|""".stripMargin,
"../gen/OneMore.aqua" ->
"""
"""aqua Test declares OneMore
|service OneMore:
| more_call()
| consume(s: string)
@ -379,7 +387,10 @@ class AquaCompilerSpec extends AnyFlatSpec with Matchers with Inside {
it should "optimize math inside stream join" in {
val src = Map(
"main.aqua" -> """
"main.aqua" -> """aqua Test
|
|export main
|
|func main(i: i32):
| stream: *string
| stream <<- "a"
@ -434,8 +445,7 @@ class AquaCompilerSpec extends AnyFlatSpec with Matchers with Inside {
it should "allow returning and passing services as abilities" in {
val src = Map(
"main.aqua" -> """
|aqua Test
"main.aqua" -> """aqua Test
|
|export test
|

View File

@ -1,16 +0,0 @@
import "@fluencelabs/aqua-dht/pubsub.aqua"
import "@fluencelabs/aqua-dht/dht.aqua"
import "@fluencelabs/aqua-lib/builtin.aqua"
export getNeighbours, initTopicAndSubscribe, findSubscribers
func put_value(initial_peer: string, value: string) -> string:
initTopicAndSubscribe(initial_peer, value, nil, nil)
<- "OK"
func registerKeyPutValue(node_id: string, key: string, value: string, relay_id: ?string, service_id: ?string) -> []string:
nodes <- getNeighbours(key)
for n <- nodes par:
on n:
t <- Peer.timestamp_sec()
<- nodes

View File

@ -1,9 +1,9 @@
aqua Main
use DECLARE_CONST, decl_bar from "imports_exports/declare.aqua" as Declare
export handleAb, SomeService, bug214, checkAbCalls, bugLNG258_1, bugLNG258_2, bugLNG258_3, multipleAbilityWithClosure, MySrv, returnSrvAsAbility
use DECLARE_CONST, decl_bar from "imports_exports/declare.aqua" as Declare
service SomeService("wed"):
getStr(s: string) -> string

View File

@ -0,0 +1,23 @@
aqua M
export bugLNG314
ability WorkerJob:
runOnSingleWorker(w: string) -> string
func disjoint_run{WorkerJob}() -> -> string:
run = func () -> string:
r <- WorkerJob.runOnSingleWorker("worker")
<- r
<- run
func runJob(j: -> string) -> string:
<- j()
func bugLNG314() -> string:
job2 = () -> string:
<- "strstrstr"
worker_job = WorkerJob(runOnSingleWorker = job2)
subnet_job <- disjoint_run{worker_job}()
res <- runJob(subnet_job)
<- res

View File

@ -1,24 +0,0 @@
data SomeData:
value: string
otherValue: u64
data SubData:
someStr: string
someNum: i32
data SecondData:
value: string
complex: SubData
data ThirdData:
value: string
complex: SomeData
service ComplexService("op-ha"):
call(d: SomeData, sd: SecondData) -> SubData
identity() -> SecondData
func doSmth(d: SomeData, d2: SomeData, sd: SecondData, c: SubData, SecondData -> ThirdData) -> ThirdData:
res <- ComplexService.call(d, sd)
res2 <- c(res, sd)
<- res2

View File

@ -1,3 +1,7 @@
aqua Assignment
export doSmth
data Prod:
value: string

View File

@ -1,6 +1,11 @@
aqua CallArrow
export passFunctionAsArg, reproArgsBug426
import "println.aqua"
import "@fluencelabs/aqua-lib/builtin.aqua"
-- functions like `c` are called an 'arrow function' in Aqua
-- `c` passed to a function from a client, so, it could be called only on a client
func passFunctionAsArg(node: string, str: string, c: string -> string):

View File

@ -1,3 +1,7 @@
aqua Canon
export Ser, bugLng79
data Record:
relay_id: []string
peer_id: string

View File

@ -1,9 +1,9 @@
module Closure declares *
aqua Closure declares *
export LocalSrv, closureIn, closureOut, closureBig, closureOut2, lng58Bug, multipleClosuresBugLNG262, lng317Bug
import "@fluencelabs/aqua-lib/builtin.aqua"
export LocalSrv, closureIn, closureOut, closureBig, closureOut2, lng58Bug, multipleClosuresBugLNG262
service MyOp("op"):
identity(s: string) -> string
@ -80,4 +80,38 @@ func create(a: i8) -> -> i8:
func multipleClosuresBugLNG262() -> i8, i8:
arr1 <- create(1)
arr2 <- create(2)
<- arr1(), arr2()
<- arr1(), arr2()
ability WorkerJob:
runOnSingleWorker(w: string) -> []string
func runJob(j: -> []string) -> []string:
<- j()
func disjoint_run{WorkerJob}() -> -> []string:
run = func () -> []string:
r <- WorkerJob.runOnSingleWorker("a")
<- r
<- run
func empty() -> string:
a = "empty"
<- a
func lng317Bug() -> []string:
res: *string
outer = () -> string:
<- empty()
clos = () -> -> []string:
job2 = () -> []string:
res <- outer()
res <- MyOp.identity("identity")
<- res
<- job2
worker_job = WorkerJob(runOnSingleWorker = clos())
subnet_job <- disjoint_run{worker_job}()
finalRes <- runJob(subnet_job)
<- finalRes

View File

@ -1,3 +1,7 @@
aqua Co
export CoService, coFunc
import "@fluencelabs/aqua-lib/builtin.aqua"
service CoService("coservice-id"):

View File

@ -1,3 +1,7 @@
aqua CollectionSugar
export arraySugar, streamSugar, optionSugar, GetArr, bugLNG59
import "@fluencelabs/aqua-lib/builtin.aqua"
func arraySugar(n: u32, m: u32) -> []u32, []u32:

View File

@ -1,3 +1,7 @@
aqua Complex
export TestS, doStuff
import "helloWorld.aqua"
import "println.aqua"
import "@fluencelabs/aqua-lib/builtin.aqua"

View File

@ -1,3 +1,7 @@
aqua Constants
export Getter, callConstant, timestampAndTtl
import "@fluencelabs/aqua-lib/builtin.aqua"
service Getter("test"):

View File

@ -1,3 +1,7 @@
aqua DataAlias
export NodeIdGetter, getAliasedData
-- set `PeerId` name to be a type alias for `string` type
alias PeerId : string

View File

@ -1,3 +1,5 @@
aqua Example
service Peer("peer"):
is_connected: string -> bool

View File

@ -1,3 +1,7 @@
aqua Fold
export iterateAndPrint, iterateAndPrintParallel, forBug499
import "println.aqua"
import "@fluencelabs/aqua-lib/builtin.aqua"

View File

@ -1,9 +1,9 @@
module FoldJoin
import "@fluencelabs/aqua-lib/builtin.aqua"
aqua FoldJoin
export getTwoResults
import "@fluencelabs/aqua-lib/builtin.aqua"
service Op2("op"):
identity(s: u64)

View File

@ -1,3 +1,7 @@
aqua Func
export TestSrv, testFunc
service TestSrv("test-service-id"):
str: -> string

View File

@ -1,4 +1,4 @@
module Funcs declares main, A, calc
aqua Funcs declares main, A, calc
export main, A, calc, calc2, ifCalc, bugLNG260

View File

@ -1,3 +1,7 @@
aqua Functors
export lng119Bug
func lng119Bug() -> []u32:
nums = [1,2,3,4,5]
results: *u32

View File

@ -1,3 +1,7 @@
aqua HelloWorld
export StringExtra, helloWorld
service StringExtra("service-id"):
addNameToHello: string -> string

View File

@ -1,3 +1,7 @@
aqua If
export ifElseCall, ifElseNumCall, ifCorrectXorWrap, bugLNG69
import "println.aqua"
import "@fluencelabs/aqua-lib/builtin.aqua"

View File

@ -1,4 +1,4 @@
module FooBars declares decl_foo, decl_bar, SuperFoo, DECLARE_CONST, DECLARE_CONST2
aqua FooBars declares decl_foo, decl_bar, SuperFoo, DECLARE_CONST, DECLARE_CONST2
export SuperFoo
const DECLARE_CONST = "declare_const"

View File

@ -1,4 +1,4 @@
module Export declares foobar, foo
aqua Export declares foobar, foo
import Op as Noop from "@fluencelabs/aqua-lib/builtin.aqua"

View File

@ -1,5 +1,5 @@
-- exports3.aqua
module Export3 declares *
aqua Export3 declares *
import Op as Noop from "@fluencelabs/aqua-lib/builtin.aqua"

View File

@ -1,4 +1,4 @@
module Exports declares some_string, MyExportSrv, EXPORT_CONST, some_random_func
aqua Exports declares some_string, MyExportSrv, EXPORT_CONST, some_random_func
import Op as Noop from "@fluencelabs/aqua-lib/builtin.aqua"

View File

@ -1,2 +1,4 @@
aqua OneMore declares OneMore
service OneMore:
more_call()

View File

@ -1,4 +1,4 @@
module Import
aqua Import
import foobar from "export2.aqua"
use foo as f from "export2.aqua" as Exp

View File

@ -1,5 +1,5 @@
-- imports3.aqua
module Import3 declares *
aqua Import3 declares *
import Op as Noop from "@fluencelabs/aqua-lib/builtin.aqua"
export foo_wrapper

View File

@ -1,3 +1,5 @@
aqua ImportsEmpty
import decl_foo, decl_bar from "declare.aqua"
use DECLARE_CONST, SuperFoo, DECLARE_CONST2 as DC2 from "declare.aqua" as Declare
import Op as Noop from "@fluencelabs/aqua-lib/builtin.aqua"

View File

@ -1,3 +1,7 @@
aqua Imports
export StringService, concat_foobars
import decl_foo, decl_bar from "declare.aqua"
use DECLARE_CONST, SuperFoo, DECLARE_CONST2 as DC2 from "declare.aqua" as Declare
import Op as Noop from "@fluencelabs/aqua-lib/builtin.aqua"

View File

@ -1,3 +1,4 @@
aqua SubImport declares *
alias SomeString : string

View File

@ -1,3 +1,7 @@
aqua Join
export joinIdxLocal, joinIdxRelay, joinIdx
import "@fluencelabs/aqua-lib/builtin.aqua"
func joinIdxLocal(idx: i16, nodes: []string) -> []string:

View File

@ -1,3 +1,7 @@
aqua MultiReturn
export GetStr, GetNum, multiReturnFunc
import "@fluencelabs/aqua-lib/builtin.aqua"
service GetStr("multiret-test"):

View File

@ -1,3 +1,7 @@
aqua NestedData
export Test, test
data NestedType:
val: string

View File

@ -1,3 +1,7 @@
aqua NestedFuncs
export OpH, d
import "@fluencelabs/aqua-lib/builtin.aqua"
service OpH("opa"):

View File

@ -1,3 +1,7 @@
aqua On
export getPeerExternalAddresses
import "@fluencelabs/aqua-lib/builtin.aqua"
func getPeerExternalAddresses(otherNodePeerId: string) -> []string:

View File

@ -1,3 +1,7 @@
aqua OnErrorPropagation
export Test, onPropagate, nestedOnPropagate, seqOnPropagate
service Test("test-service"):
fail(err: string)

View File

@ -1,3 +1,7 @@
aqua Option
export SomeS, useOptional, returnOptional, returnNone
import "@fluencelabs/aqua-lib/builtin.aqua"
service SomeS("test2"):

View File

@ -1,3 +1,7 @@
aqua OptionGen
export OptionString, emptyString, checkEmpty, checkNoneEmpty
service OptionString("opt_str"):
checkOption(str: ?string) -> string

View File

@ -1,3 +1,7 @@
aqua Par
export ParService, parFunc, testTimeout
import "@fluencelabs/aqua-lib/builtin.aqua"
service ParService("parservice-id"):

View File

@ -1,3 +1,7 @@
aqua ParSeq
export testParSeq
import "@fluencelabs/aqua-lib/builtin.aqua"
service NumOp("op"):

View File

@ -1,3 +1,7 @@
aqua PassArgs
export AquaDHT, create_client_util, bugLNG60
import Op from "@fluencelabs/aqua-lib/builtin.aqua"
service AquaDHT("test-dht"):

View File

@ -1,3 +1,7 @@
aqua Println declares *
export Println, print
service Println("println-service-id"):
print: string -> ()

View File

@ -1,3 +1,7 @@
aqua PushToStream
export OpA, get_results
service OpA("pop"):
get_str() -> string

View File

@ -1,13 +0,0 @@
service YesNoService("yesno"):
get() -> string
func recursiveStream() -> []string, []string:
result: *string
loop: *string
loop <<- "yes"
for l <- loop:
if l == "yes":
loop <- YesNoService.get()
result <<- "success"
<- result, loop

View File

@ -0,0 +1,22 @@
aqua MultiRec
export TestService, multiRecStream
service TestService("test-srv"):
handle(i: i32) -> []i32
func multiRecStream(init: i32, target: i32) -> []i32:
result: *string
loop: *i32
loop <<- init
for l <- loop rec:
news <- TestService.handle(l)
for n <- news:
loop <<- n
if l == target:
result <<- "done"
join result!
<- loop

View File

@ -0,0 +1,19 @@
aqua Nested
export nested
func nested(n: u32) -> []u32:
result: *u32
iterator: *u32
iterator <<- 0
for i <- iterator rec:
if i < n:
for j <- iterator rec:
result <<- j
iterator <<- i + 1
if n > 0:
join result[n * (n + 1) / 2 - 1]
<- result

View File

@ -0,0 +1,29 @@
aqua Pipeline
export pipelineStream
func pipelineStream(init: i32, target: i32) -> []i32:
result: *string
loop1: *i32
loop2: *i32
loop3: *i32
loop1 <<- init
for l <- loop1 rec:
if l < target:
loop1 <<- l + 1
loop2 <<- l * 3
for l <- loop2 rec:
loop3 <<- l
loop3 <<- l + 1
loop3 <<- l + 2
for l <- loop3 rec:
if l == target:
result <<- "success"
join result!
<- loop3

View File

@ -0,0 +1,18 @@
aqua Range
export range
func range(a: i32, b: i32) -> []i32:
result: *i32
iterator: *i32
iterator <<- a
for i <- iterator rec:
if i < b:
result <<- i
iterator <<- i + 1
if b > a:
join result[b - a - 1]
<- result

View File

@ -0,0 +1,19 @@
aqua RemoteRec
export RemoteSrv, remoteRecStream
service RemoteSrv("remote-srv"):
handle(i: i32) -> i32
func remoteRecStream(init: i32, target: i32, friend: string, friendRelay: string) -> []i32:
loop: *i32
loop <<- init
for l <- loop rec:
on friend via friendRelay:
if l < target:
loop <- RemoteSrv.handle(l)
join loop[target - init]
<- loop

View File

@ -0,0 +1,21 @@
aqua YesNo
export YesNoService, yesNoStream
service YesNoService("yesno"):
get() -> string
func yesNoStream() -> []string:
result: *string
loop: *string
loop <<- "yes"
for l <- loop rec:
if l == "yes":
loop <- YesNoService.get()
else:
result <<- "success"
join result!
<- loop

View File

@ -1,9 +1,9 @@
aqua ReturnArrow
import "@fluencelabs/aqua-lib/builtin.aqua"
export callReturnedArrow, callReturnedChainArrow
import "@fluencelabs/aqua-lib/builtin.aqua"
func returnCall(arg: string) -> string -> string, string:
str <- Op.concat_strings(arg, " literal")
closure = (s: string) -> string, string:

View File

@ -1,2 +1,6 @@
aqua ReturnLiteral
export returnLiteral
func returnLiteral() -> string:
<- "some literal"

View File

@ -1,8 +1,5 @@
aqua Stream
import "@fluencelabs/aqua-lib/builtin.aqua"
import "println.aqua"
export Stringer
export checkStreams, returnStreamFromFunc
export stringEmpty, returnEmptyLiteral
@ -10,6 +7,9 @@ export returnNilLength, stringNone
export streamFunctor, streamAssignment
export streamIntFunctor, streamJoin
import "@fluencelabs/aqua-lib/builtin.aqua"
import "println.aqua"
service Stringer("stringer-id"):
returnString: string -> string

View File

@ -1,4 +1,4 @@
module Ret declares *
aqua Ret declares *
export someFunc

View File

@ -1,4 +1,6 @@
export accumRes, bugLNG63, bugLNG63_2
aqua StreamCan
export accumRes, bugLNG63, bugLNG63_2, bugLNG63_3
func toOpt(s: string) -> ?string:
str: *string

View File

@ -1,3 +1,7 @@
aqua StreamRestriction
export streamFold, streamRes
func streamFold(arr: []string) -> []string:
res: *string
for n <- arr:

View File

@ -1,3 +1,7 @@
aqua StreamResults
export DTGetter, use_name1, use_name2
data DT:
field: string

View File

@ -1,9 +1,9 @@
aqua Aaa
import "@fluencelabs/aqua-lib/builtin.aqua"
export structuralTypingTest
import "@fluencelabs/aqua-lib/builtin.aqua"
data WideData:
s: string
n: u32

View File

@ -1,3 +1,7 @@
aqua SubImportUsage
export subImportUsage, ConcatSubs
import "imports_exports/subImport.aqua"
service ConcatSubs("concat_subs"):

View File

@ -1,3 +1,7 @@
aqua TryCatch
export tryCatchTest
import "@fluencelabs/aqua-lib/builtin.aqua"
service Unexisted("unex"):

View File

@ -1,3 +1,7 @@
aqua TryOtherwise
export tryOtherwiseTest
service Unexisted("unex"):
getStr() -> string

6184
integration-tests/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -31,8 +31,7 @@
},
"prettier": {},
"devDependencies": {
"@fluencelabs/aqua-api": "0.13.3",
"@fluencelabs/aqua-dht": "0.2.5",
"@fluencelabs/aqua-api": "0.13.4",
"@fluencelabs/aqua-lib": "0.9.0",
"@types/jest": "29.5.2",
"@types/node": "18.19.3",
@ -44,7 +43,7 @@
},
"dependencies": {
"@fluencelabs/fluence-network-environment": "1.1.2",
"@fluencelabs/js-client": "0.5.5",
"@fluencelabs/js-client": "0.6.0",
"deep-equal": "2.2.1",
"loglevel": "1.8.1"
},

View File

@ -40,6 +40,7 @@ import {
multipleAbilityWithClosureCall,
returnSrvAsAbilityCall,
} from "../examples/abilityCall.js";
import { bugLNG314Call } from "../examples/abilityClosureCall.js";
import {
nilLengthCall,
nilLiteralCall,
@ -90,7 +91,7 @@ import {
streamArgsCall,
modifyStreamCall,
returnDerivedStreamCall,
lng280BugWithForEmptyStreamFuncCall
lng280BugWithForEmptyStreamFuncCall,
} from "../examples/streamArgsCall.js";
import { streamResultsCall } from "../examples/streamResultsCall.js";
import { structuralTypingCall } from "../examples/structuralTypingCall.js";
@ -120,6 +121,7 @@ import { lng193BugCall } from "../examples/closureReturnRename.js";
import {
closuresCall,
multipleClosuresLNG262BugCall,
lng317BugCall,
} from "../examples/closures.js";
import { closureArrowCaptureCall } from "../examples/closureArrowCapture.js";
import {
@ -135,7 +137,6 @@ import {
joinIdxLocalCall,
joinIdxRelayCall,
} from "../examples/joinCall.js";
import { recursiveStreamsCall } from "../examples/recursiveStreamsCall.js";
import { renameVarsCall } from "../examples/renameVars.js";
import {
arraySugarCall,
@ -161,6 +162,12 @@ import {
returnArrowCall,
returnArrowChainCall,
} from "../examples/returnArrowCall.js";
import { rangeCall } from "../examples/recursiveStreams/rangeCall.js";
import { nestedCall } from "../examples/recursiveStreams/nestedCall.js";
import { yesNoStreamCall } from "../examples/recursiveStreams/yesNoStreamCall.js";
import { multiRecStreamCall } from "../examples/recursiveStreams/multiRecStreamCall.js";
import { pipelineStreamCall } from "../examples/recursiveStreams/pipelineCall.js";
import { remoteRecStreamCall } from "../examples/recursiveStreams/remoteRecCall.js";
var selfPeerId: string;
var peer1: IFluenceClient;
@ -176,22 +183,12 @@ import log from "loglevel";
// log.setDefaultLevel("debug")
async function start() {
console.log("CONNECTING TO FIRST:");
Fluence.onConnectionStateChange((s) => {
console.log(s);
});
await Fluence.connect(relay1, {});
const cl = await Fluence.getClient();
peer1 = cl;
selfPeerId = cl.getPeerId();
console.log("CONNECTED");
peer2 = await createClient(relay2, {});
console.log("CONNECTING TO SECOND:");
peer2.onConnectionStateChange((s) => {
console.log(s);
});
console.log("CONNECTED");
peer1 = Fluence.getClient();
selfPeerId = peer1.getPeerId();
peer2 = await createClient(relay2);
}
async function stop() {
@ -217,6 +214,77 @@ describe("Testing examples", () => {
await stop();
});
describe("for ... rec", () => {
const range = (start: number, end: number) =>
Array.from({ length: end - start }, (v, k) => k + start);
it("range", async () => {
for (const i of range(-5, 5)) {
for (const j of range(-5, 5)) {
const result = await rangeCall(i, j);
if (i < j) {
expect(result).toEqual(range(i, j));
} else {
expect(result).toEqual([]);
}
}
}
}, 15000);
/**
* This test does not work due to Aqua VM
*/
it.skip("nested", async () => {
for (const i of range(0, 10)) {
const result = await nestedCall(i);
expect(result).toEqual(range(0, i).flatMap((x) => range(0, x + 1)));
}
}, 15000);
it("yes|no stream", async () => {
for (const i of range(1, 10)) {
const yesNo = await yesNoStreamCall(i);
expect(yesNo).toEqual(
range(0, i)
.map((_) => "yes")
.concat(["no"]),
);
}
}, 15000);
it("multi rec stream", async () => {
const handle = (i: number) => {
if (i % 3 === 0) return [i + 1];
if (i % 3 === 1) return [i + 1, i + 2];
return [];
};
for (const i of range(1, 10)) {
const loop = await multiRecStreamCall(0, i, handle);
range(0, i + 1).forEach((j) => {
expect(loop).toContain(j);
});
}
}, 15000);
it("pipeline", async () => {
for (const i of range(1, 10)) {
const result = await pipelineStreamCall(0, i);
expect(result.sort()).toEqual(range(0, i + 1));
}
}, 15000);
/**
* This test does not work due to `for ... rec`
* not taking topology into account
*/
it.skip("remote rec", async () => {
for (const i of range(0, 10)) {
const result = await remoteRecStreamCall(0, i, peer2);
expect(result).toEqual(range(0, i + 1));
}
}, 15000);
});
it("callArrow.aqua args bug 426", async () => {
let argResult = await reproArgsBug426Call();
@ -590,6 +658,11 @@ describe("Testing examples", () => {
expect(result).toStrictEqual(["default-id", "resolved-id"]);
});
it("abilitiesClosure.aqua bug LNG-314", async () => {
let result = await bugLNG314Call();
expect(result).toEqual("strstrstr");
});
it("functors.aqua LNG-119 bug", async () => {
let result = await bugLng119Call();
expect(result).toEqual([1]);
@ -630,29 +703,41 @@ describe("Testing examples", () => {
it.skip("streamArgs.aqua LNG-280 with for", async () => {
let result = await lng280BugWithForCall();
expect(result).toEqual([
"valueUseStream",
"valueReturnStream",
"valueUseStream",
"valueReturnStream",
"valueUseStream",
"valueReturnStream"
"valueUseStream",
"valueReturnStream",
"valueUseStream",
"valueReturnStream",
"valueUseStream",
"valueReturnStream",
]);
});
it("streamArgs.aqua LNG-280 with for and anonymous stream", async () => {
let result = await lng280BugWithForAnonStreamCall();
expect(result).toEqual([[1, 1], [1, 2], [1, 3], [1, 4], [1, 5]]);
expect(result).toEqual([
[1, 1],
[1, 2],
[1, 3],
[1, 4],
[1, 5],
]);
});
it("streamArgs.aqua LNG-280 with for and anonymous stream from function", async () => {
let result = await lng280BugWithForEmptyStreamFuncCall();
expect(result).toEqual([[1, 1], [1, 2], [1, 3], [1, 4], [1, 5]]);
});
let result = await lng280BugWithForEmptyStreamFuncCall();
expect(result).toEqual([
[1, 1],
[1, 2],
[1, 3],
[1, 4],
[1, 5],
]);
});
it.skip("streamArgs.aqua return derived stream", async () => {
let result = await returnDerivedStreamCall();
expect(result).toEqual([1]);
});
let result = await returnDerivedStreamCall();
expect(result).toEqual([1]);
});
it("streamResults.aqua", async () => {
let streamResultsResult = await streamResultsCall();
@ -822,15 +907,6 @@ describe("Testing examples", () => {
// expect(res).toEqual("ok")
// });
// TODO: uncomment
// it('recursiveStreams.aqua', async () => {
// let [sucList, loopList] = await recursiveStreamsCall();
// console.log(sucList);
// console.log(loopList);
// expect(loopList).toEqual(['yes', 'yes', 'yes', 'yes', 'no']);
// expect(sucList.length).toEqual(5);
// });
it("renameVars.aqua", async () => {
let renameVarsResult = await renameVarsCall();
expect(renameVarsResult).toEqual(["ok", "ok"]);
@ -1025,6 +1101,11 @@ describe("Testing examples", () => {
expect(result).toEqual([1, 2]);
});
it("closures.aqua bug LNG-317", async () => {
let result = await lng317BugCall();
expect(result).toEqual(["empty", "identity"]);
});
it("closureArrowCapture.aqua", async () => {
let result = await closureArrowCaptureCall("input");
expect(result).toEqual("call: ".repeat(4) + "input");

View File

@ -0,0 +1,7 @@
import {
bugLNG314
} from "../compiled/examples/abilitiesClosure.js";
export async function bugLNG314Call(): Promise<string> {
return await bugLNG314();
}

View File

@ -5,6 +5,7 @@ import {
registerLocalSrv,
closureOut2,
lng58Bug,
lng317Bug,
multipleClosuresBugLNG262
} from "../compiled/examples/closures.js";
import { config } from "../config.js";
@ -37,3 +38,7 @@ export async function lng58CBugCall(): Promise<string> {
export async function multipleClosuresLNG262BugCall(): Promise<[number, number]> {
return multipleClosuresBugLNG262();
}
export async function lng317BugCall(): Promise<string[]> {
return lng317Bug();
}

View File

@ -0,0 +1,14 @@
import {
multiRecStream,
registerTestService,
} from "../../compiled/examples/recursiveStreams/multiRec.js";
export async function multiRecStreamCall(
init: number,
target: number,
handle: (i: number) => number[],
): Promise<number[]> {
registerTestService({ handle });
return await multiRecStream(init, target);
}

View File

@ -0,0 +1,5 @@
import { nested } from "../../compiled/examples/recursiveStreams/nested.js";
export async function nestedCall(n: number): Promise<number[]> {
return await nested(n);
}

View File

@ -0,0 +1,8 @@
import { pipelineStream } from "../../compiled/examples/recursiveStreams/pipeline.js";
export async function pipelineStreamCall(
init: number,
target: number,
): Promise<number[]> {
return await pipelineStream(init, target);
}

View File

@ -0,0 +1,5 @@
import { range } from "../../compiled/examples/recursiveStreams/range.js";
export async function rangeCall(a: number, b: number): Promise<number[]> {
return await range(a, b);
}

View File

@ -0,0 +1,15 @@
import { IFluenceClient } from "@fluencelabs/js-client";
import { remoteRecStream } from "../../compiled/examples/recursiveStreams/remoteRec.js";
export async function remoteRecStreamCall(
init: number,
target: number,
peer: IFluenceClient,
): Promise<number[]> {
return await remoteRecStream(
init,
target,
peer.getPeerId(),
peer.getRelayPeerId(),
);
}

View File

@ -0,0 +1,16 @@
import {
yesNoStream,
registerYesNoService,
} from "../../compiled/examples/recursiveStreams/yesNo.js";
export async function yesNoStreamCall(limit: number): Promise<string[]> {
let i = 1;
registerYesNoService({
get: () => {
i += 1;
return i > limit ? "no" : "yes";
},
});
return await yesNoStream();
}

View File

@ -1,22 +0,0 @@
import {
recursiveStream,
registerYesNoService,
} from "../compiled/examples/recursiveStreams.js";
export async function recursiveStreamsCall(): Promise<[string[], string[]]> {
let i = 0;
registerYesNoService({
get: () => {
i++;
if (i > 3) {
console.log("return no");
return "no";
} else {
console.log("return yes");
return "yes";
}
},
});
return await recursiveStream();
}

View File

@ -3,29 +3,27 @@ package aqua
import aqua.files.FileModuleId
import aqua.parser.lift.{FileSpan, Span}
import aqua.parser.{Ast, Parser, ParserError}
import cats.data.*
import cats.parse.LocationMap
import cats.{~>, Comonad, Eval, Monad, Monoid, Order}
import cats.{Comonad, Eval, Monad, Monoid, Order, ~>}
object SpanParser extends scribe.Logging {
def parser: FileModuleId => String => ValidatedNec[ParserError[FileSpan.F], Ast[FileSpan.F]] = {
def parser: FileModuleId => String => ValidatedNec[ParserError[FileSpan.F], Ast[FileSpan.F]] =
id =>
{ source =>
{
logger.trace(s"creating parser for $id...")
val nat = new (Span.S ~> FileSpan.F) {
override def apply[A](span: Span.S[A]): FileSpan.F[A] = {
(
FileSpan(id.file.absolute.toString, Eval.later(LocationMap(source)), span._1),
span._2
)
}
source => {
logger.trace(s"creating parser for $id...")
val nat = new (Span.S ~> FileSpan.F) {
override def apply[A](span: Span.S[A]): FileSpan.F[A] = {
(
FileSpan(id.file.absolute.toString, Eval.later(LocationMap(source)), span._1),
span._2
)
}
val parser = Parser.natParser(Parser.spanParser, nat)(source)
logger.trace("parser created")
parser
}
val parser = Parser.natParser(Parser.spanParser, nat)(source)
logger.trace("parser created")
parser
}
}
}

View File

@ -7,26 +7,26 @@ import aqua.semantics.rules.locations.LocationsState
import aqua.semantics.{CompilerState, RawSemantics, SemanticError, SemanticWarning, Semantics}
import cats.data.Validated.{Invalid, Valid}
import cats.data.{NonEmptyChain, ValidatedNec}
import cats.syntax.applicative.*
import cats.syntax.apply.*
import cats.syntax.flatMap.*
import cats.syntax.functor.*
import cats.syntax.foldable.*
import cats.syntax.either.*
import cats.syntax.flatMap.*
import cats.syntax.foldable.*
import cats.syntax.functor.*
import cats.syntax.reducible.*
import cats.data.{NonEmptyChain, ValidatedNec}
import monocle.Lens
import monocle.macros.GenLens
class LspSemantics[S[_]] extends Semantics[S, LspContext[S]] {
private def getImportTokens(ast: Ast[S]): List[LiteralToken[S]] =
ast.collectHead {
ast.head.collect {
case ImportExpr(fn) => fn
case ImportFromExpr(_, fn) => fn
case UseExpr(fn, _) => fn
case UseFromExpr(_, fn, _) => fn
}.value.toList
}.toList
/**
* Process the AST and return the semantics result.

View File

@ -108,22 +108,18 @@ class AquaLSPSpec extends AnyFlatSpec with Matchers with Inside {
def compile(
src: Map[String, String],
imports: Map[String, String] = Map.empty
): ValidatedNec[AquaError[String, String, S], Map[String, LspContext[S]]] = {
): ValidatedNec[AquaError[String, String, S], Map[String, LspContext[S]]] =
LSPCompiler
.compileToLsp[Id, String, String, Span.S](
aquaSource(src, imports),
id => txt => Parser.parse(Parser.parserSchema)(txt),
AquaCompilerConf(ConstantRaw.defaultConstants(None))
)
.leftMap { errors =>
println(errors)
errors
}
}
it should "return right tokens" in {
val main =
"""module Import
"""aqua Import
|
|import foo, strFunc, num from "export2.aqua"
|
|import "../gen/OneMore.aqua"
@ -156,7 +152,7 @@ class AquaLSPSpec extends AnyFlatSpec with Matchers with Inside {
)
val firstImport =
"""module Export declares strFunc, num, foo
"""aqua Export declares strFunc, num, foo
|
|func absb() -> string:
| <- "ff"
@ -173,7 +169,8 @@ class AquaLSPSpec extends AnyFlatSpec with Matchers with Inside {
|""".stripMargin
val secondImport =
"""
"""aqua Export declares OneMore
|
|service OneMore:
| more_call()
| consume(s: string)
@ -226,7 +223,7 @@ class AquaLSPSpec extends AnyFlatSpec with Matchers with Inside {
// this is tokens from imports, if we will use `FileSpan.F` file names will be different
// OneMore service
res.checkTokenLoc(secondImport, "OneMore", 0, serviceType) shouldBe true
res.checkTokenLoc(secondImport, "OneMore", 1, serviceType) shouldBe true
res.checkTokenLoc(
secondImport,
"more_call",
@ -265,7 +262,7 @@ class AquaLSPSpec extends AnyFlatSpec with Matchers with Inside {
ProductType(ScalarType.u32 :: Nil)
)
) shouldBe true
res.checkTokenLoc(firstImport, "someVar", 2, ScalarType.u32, None, true) shouldBe true
res.checkTokenLoc(firstImport, "someVar", 2, ScalarType.u32, None) shouldBe true
// foo function
res.checkTokenLoc(

View File

@ -1,6 +1,6 @@
{
"name": "@fluencelabs/aqua-language-server-api",
"version": "0.13.3",
"version": "0.13.4",
"description": "Aqua Language Server API",
"type": "commonjs",
"files": [

View File

@ -1,17 +1,26 @@
package aqua.linker
import aqua.errors.Errors.internalError
import cats.MonadError
import cats.data.{NonEmptyChain, Validated, ValidatedNec}
import cats.kernel.{Monoid, Semigroup}
import cats.syntax.semigroup.*
import cats.syntax.validated.*
import cats.syntax.functor.*
import cats.instances.list.*
import cats.kernel.{Monoid, Semigroup}
import cats.syntax.applicative.*
import cats.syntax.flatMap.*
import cats.syntax.functor.*
import cats.syntax.semigroup.*
import cats.syntax.traverse.*
import cats.syntax.validated.*
import scala.annotation.tailrec
import scribe.Logging
import scala.annotation.tailrec
object Linker extends Logging {
// Transpilation function for module
// (Imports contexts => Compilation result)
type TP = [F[_], T] =>> Map[String, T] => F[T]
// Dependency Cycle, prev element import next
// and last imports head
type DepCycle[I] = NonEmptyChain[I]
@ -23,8 +32,8 @@ object Linker extends Logging {
* @return [[List]] of dependecy cycles found
*/
private def findDepCycles[I, E, T](
mods: List[AquaModule[I, E, T => T]]
): List[DepCycle[AquaModule[I, E, T => T]]] = {
mods: List[AquaModule[I, E, T]]
): List[DepCycle[I]] = {
val modsIds = mods.map(_.id).toSet
// Limit search to only passed modules (there maybe dependencies not from `mods`)
val deps = mods.map(m => m.id -> m.dependsOn.keySet.intersect(modsIds)).toMap
@ -56,7 +65,7 @@ object Linker extends Logging {
)
}
val cycles = mods
mods
.flatMap(m =>
findCycles(
paths = NonEmptyChain.one(m.id) :: Nil,
@ -69,73 +78,83 @@ object Linker extends Logging {
// should not be a lot of cycles
_.toChain.toList.toSet
)
val modsById = mods.fproductLeft(_.id).toMap
// This should be safe
cycles.map(_.map(modsById))
}
@tailrec
def iter[I, E, T: Semigroup](
mods: List[AquaModule[I, E, T => T]],
proc: Map[I, T => T],
cycleError: DepCycle[AquaModule[I, E, T => T]] => E
): ValidatedNec[E, Map[I, T => T]] =
/**
* Main iterative linking function
* @param mods Modules to link
* @param proc Already processed modules
* @param cycle Function to create error from dependency cycle
* @return Result for all modules
*/
def iter[I, E, F[_], T](
mods: List[AquaModule[I, E, TP[F, T]]],
proc: Map[I, T],
cycle: DepCycle[I] => E
)(using me: MonadError[F, NonEmptyChain[E]]): F[Map[I, T]] =
mods match {
case Nil =>
proc.valid
proc.pure
case _ =>
val (canHandle, postpone) = mods.partition(_.dependsOn.keySet.forall(proc.contains))
// Find modules that can be processed
val (canHandle, postpone) = mods.partition(
_.dependsOn.keySet.forall(proc.contains)
)
logger.debug("ITERATE, can handle: " + canHandle.map(_.id))
logger.debug(s"dependsOn = ${mods.map(_.dependsOn.keySet)}")
logger.debug(s"postpone = ${postpone.map(_.id)}")
logger.debug(s"proc = ${proc.keySet}")
// If there are no modules that can be processed
if (canHandle.isEmpty && postpone.nonEmpty) {
findDepCycles(postpone)
.map(cycleError)
.invalid
.leftMap(
// This should be safe as cycles should exist at this moment
errs => NonEmptyChain.fromSeq(errs).get
)
} else {
val folded = canHandle.foldLeft(proc) { case (acc, m) =>
val importKeys = m.dependsOn.keySet
logger.debug(s"${m.id} dependsOn $importKeys")
val deps: T => T =
importKeys.map(acc).foldLeft(identity[T]) { case (fAcc, f) =>
logger.debug("COMBINING ONE TIME ")
t => {
logger.debug(s"call combine $t")
fAcc(t) |+| f(t)
}
}
acc + (m.id -> m.body.compose(deps))
}
iter(
postpone,
// TODO can be done in parallel
folded,
cycleError
me.raiseError(
// This should be safe as cycles should exist at this moment
NonEmptyChain
.fromSeq(findDepCycles(postpone).map(cycle))
.get
)
} else
canHandle.traverse { mod =>
// Gather all imports for module
val imports = mod.imports.mapValues { imp =>
proc
.get(imp)
.getOrElse(
// Should not happen as we check it above
internalError(s"Module $imp not found in $proc")
)
}.toMap
// Process (transpile) module
mod.body(imports).map(mod.id -> _)
}.flatMap(processed =>
// flatMap should be stack safe
iter(
postpone,
proc ++ processed,
cycle
)
)
}
}
def link[I, E, T: Semigroup](
modules: Modules[I, E, T => T],
cycleError: DepCycle[AquaModule[I, E, T => T]] => E,
empty: I => T
): ValidatedNec[E, Map[I, T]] =
if (modules.dependsOn.nonEmpty) Validated.invalid(modules.dependsOn.values.reduce(_ ++ _))
else {
val result = iter(modules.loaded.values.toList, Map.empty, cycleError)
result.map(_.collect {
case (i, f) if modules.exports(i) =>
i -> f(empty(i))
})
}
/**
* Link modules
*
* @param modules Modules to link (with transpilation functions as bodies)
* @param cycle Function to create error from dependency cycle
* @return Result for all **exported** modules
*/
def link[I, E, F[_], T](
modules: Modules[I, E, TP[F, T]],
cycle: DepCycle[I] => E
)(using me: MonadError[F, NonEmptyChain[E]]): F[Map[I, T]] =
if (modules.dependsOn.nonEmpty)
me.raiseError(
modules.dependsOn.values.reduce(_ ++ _)
)
else
iter(modules.loaded.values.toList, Map.empty, cycle).map(
// Remove all modules that are not exported from result
_.filterKeys(modules.exports.contains).toMap
)
}

View File

@ -1,6 +1,8 @@
package aqua.linker
import cats.data.NonEmptyChain
import cats.Foldable
import cats.data.{Chain, NonEmptyChain}
import cats.syntax.foldable._
import cats.syntax.option._
case class Modules[I, E, T](
@ -23,17 +25,23 @@ case class Modules[I, E, T](
exports = if (toExport) exports + aquaModule.id else exports
)
def addAll[F[_]: Foldable](modules: F[AquaModule[I, E, T]]): Modules[I, E, T] =
modules.foldLeft(this)(_ add _)
def isResolved: Boolean = dependsOn.isEmpty
def map[TT](f: T => TT): Modules[I, E, TT] =
copy(loaded = loaded.view.mapValues(_.map(f)).toMap)
def mapModuleToBody[TT](f: AquaModule[I, E, T] => TT): Modules[I, E, TT] =
copy(loaded = loaded.view.mapValues(v => v.map(_ => f(v))).toMap)
def mapErr[EE](f: E => EE): Modules[I, EE, T] =
copy(
loaded = loaded.view.mapValues(_.mapErr(f)).toMap,
dependsOn = dependsOn.view.mapValues(_.map(f)).toMap
)
}
object Modules {
def from[I, E, T](modules: Chain[AquaModule[I, E, T]]): Modules[I, E, T] =
modules.foldLeft(Modules[I, E, T]())(_.add(_, toExport = true))
}

View File

@ -1,44 +1,61 @@
package aqua.linker
import cats.data.Validated
import cats.Id
import cats.data.{EitherNec, NonEmptyChain}
import cats.syntax.either.*
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
class LinkerSpec extends AnyFlatSpec with Matchers {
type TP = Map[String, String] => EitherNec[String, String]
val cycle: NonEmptyChain[String] => String =
_.toChain.toList.mkString(" -> ")
"linker" should "resolve dependencies" in {
val empty = Modules[String, String, String => String]()
val empty = Modules[String, String, TP]()
val withMod1 =
empty
.add(
AquaModule[String, String, String => String](
id = "mod1",
imports = Map.empty,
dependsOn = Map("mod2" -> "unresolved mod2 in mod1"),
body = _ ++ " | mod1"
),
toExport = true
)
val withMod1 = empty.add(
AquaModule(
id = "mod1",
imports = Map("mod2" -> "mod2"),
dependsOn = Map("mod2" -> "unresolved mod2 in mod1"),
body = imports => {
println(s"mod1: $imports")
imports
.get("mod2")
.toRight("mod2 not found in mod1")
.toEitherNec
.map(_ ++ " | mod1")
}
),
toExport = true
)
withMod1.isResolved should be(false)
Linker.link[String, String, String](
withMod1,
cycle => cycle.map(_.id).toChain.toList.mkString(" -> "),
_ => ""
) should be(Validated.invalidNec("unresolved mod2 in mod1"))
Linker.link(withMod1, cycle) should be(
Left("unresolved mod2 in mod1").toEitherNec
)
val withMod2 =
withMod1.add(AquaModule("mod2", Map.empty, Map.empty, _ ++ " | mod2"))
val withMod2 = withMod1.add(
AquaModule(
id = "mod2",
imports = Map.empty,
dependsOn = Map.empty,
body = _ => "mod2".asRight.toEitherNec
)
)
withMod2.isResolved should be(true)
Linker.link[String, String, String](
withMod2,
cycle => cycle.map(_.id + "?").toChain.toList.mkString(" -> "),
_ => ""
) should be(Validated.validNec(Map("mod1" -> " | mod2 | mod1")))
Linker.link(withMod2, cycle) should be(
Map(
"mod1" -> "mod2 | mod1"
).asRight.toEitherNec
)
}
}

View File

@ -348,7 +348,7 @@ object ArrowInliner extends Logging {
// Rename arrows according to values
arrowsRenamed = Renamed(
valuesRenamed.renames.filterKeys(abilitiesArrows.keySet).toMap,
valuesRenamed.renames.view.filterKeys(abilitiesArrows.keySet).toMap,
abilitiesArrows.renamed(valuesRenamed.renames)
)
@ -497,7 +497,7 @@ object ArrowInliner extends Logging {
exports <- Exports[S].exports
streams <- getOutsideStreamNames
arrows = passArrows ++ arrowsFromAbilities
inlineResult <- Exports[S].scope(
Arrows[S].scope(
for {

View File

@ -149,11 +149,10 @@ object TagInliner extends Logging {
def flat[S: Mangler](
vm: ValueModel,
op: Option[OpModel.Tree],
flatStream: Boolean
op: Option[OpModel.Tree]
): State[S, (ValueModel, Option[OpModel.Tree])] = {
vm match {
case v @ VarModel(n, StreamType(t), l) if flatStream =>
case ValueModel.Stream(v @ VarModel(n, _, l), StreamType(t)) =>
val canonName = n + "_canon"
for {
canonN <- Mangler[S].findAndForbidName(canonName)
@ -203,7 +202,7 @@ object TagInliner extends Logging {
peerIdDe <- valueToModel(peerId)
viaDe <- valueListToModel(via.toList)
viaDeFlattened <- viaDe.traverse { case (vm, tree) =>
flat(vm, tree, true)
flat(vm, tree)
}
(pid, pif) = peerIdDe
(viaD, viaF) = viaDeFlattened.unzip
@ -238,7 +237,10 @@ object TagInliner extends Logging {
case ForTag(item, iterable, mode) =>
for {
vp <- valueToModel(iterable)
flattened <- flat(vp._1, vp._2, true)
flattened <- mode match {
case ForTag.Mode.RecMode => State.pure(vp)
case _ => flat(vp._1, vp._2)
}
(v, p) = flattened
n <- Mangler[S].findAndForbidName(item)
elementType = iterable.`type` match {
@ -250,8 +252,8 @@ object TagInliner extends Logging {
}
_ <- Exports[S].resolved(item, VarModel(n, elementType))
modeModel = mode match {
case ForTag.Mode.Blocking => ForModel.Mode.Never
case ForTag.Mode.NonBlocking => ForModel.Mode.Null
case ForTag.Mode.SeqMode | ForTag.Mode.TryMode => ForModel.Mode.Null
case ForTag.Mode.ParMode | ForTag.Mode.RecMode => ForModel.Mode.Never
}
} yield TagInlined.Single(
model = ForModel(n, v, modeModel),

View File

@ -37,6 +37,7 @@ object MakeAbilityRawInliner extends RawInliner[AbilityRaw] {
varModel = VarModel(name, raw.baseType)
valsInline = foldedFields.toList.foldMap { case (_, inline) => inline }.desugar
_ <- updateFields(name, foldedFields)
_ <- Exports[S].resolved(name, varModel)
} yield {
(
varModel,

View File

@ -1,5 +1,6 @@
package aqua.model.inline.state
import aqua.mangler.ManglerState
import aqua.model.{FuncArrow, ValueModel}
import aqua.model.inline.state.{Arrows, Counter, Exports, Mangler}
import aqua.raw.arrow.FuncRaw
@ -23,7 +24,7 @@ import scribe.Logging
* for [[Counter]]
*/
case class InliningState(
noNames: Set[String] = Set.empty,
noNames: ManglerState = ManglerState(),
resolvedExports: Map[String, ValueModel] = Map.empty,
resolvedArrows: Map[String, FuncArrow] = Map.empty,
instructionCounter: Int = 0
@ -35,7 +36,7 @@ object InliningState {
Counter.Simple.transformS(_.instructionCounter, (acc, i) => acc.copy(instructionCounter = i))
given Mangler[InliningState] =
Mangler.Simple.transformS(_.noNames, (acc, nn) => acc.copy(noNames = nn))
Mangler[ManglerState].transformS(_.noNames, (acc, nn) => acc.copy(noNames = nn))
given Arrows[InliningState] =
Arrows.Simple.transformS(_.resolvedArrows, (acc, aa) => acc.copy(resolvedArrows = aa))

Some files were not shown because too many files have changed in this diff Show More