diff --git a/.github/workflows/test_branch.yml b/.github/workflows/test_branch.yml index 2a08df71..36f980ed 100644 --- a/.github/workflows/test_branch.yml +++ b/.github/workflows/test_branch.yml @@ -55,6 +55,9 @@ jobs: cd .. sbt "cliJS/fastOptJS" rm -rf aqua-playground/src/compiled/examples/* - node cli/.js/target/scala-3.0.2/cli-fastopt.js -i aqua-playground/aqua/examples -o aqua-playground/src/compiled/examples -m aqua-playground/node_modules -c "UNIQUE_CONST = 1" -c "ANOTHER_CONST = \"ab\"" - cd aqua-playground + mv cli/.js/target/scala-3.0.2/cli-fastopt.js npm/aqua.mjs + cd npm + npm i + node aqua.mjs -i ../aqua-playground/aqua/examples -o ../aqua-playground/src/compiled/examples -m ../aqua-playground/node_modules -c "UNIQUE_CONST = 1" -c "ANOTHER_CONST = \"ab\"" + cd ../aqua-playground npm run examples diff --git a/backend/ts/src/main/scala/aqua/backend/Header.scala b/backend/ts/src/main/scala/aqua/backend/Header.scala index 077606d7..a7b95405 100644 --- a/backend/ts/src/main/scala/aqua/backend/Header.scala +++ b/backend/ts/src/main/scala/aqua/backend/Header.scala @@ -2,7 +2,25 @@ package aqua.backend object Header { - def header(isJs: Boolean): String = + def header(isJs: Boolean, isCommonJS: Boolean): String = { + val imports = if (isCommonJS) { + """ + |const { Fluence, FluencePeer } = require('@fluencelabs/fluence'); + |const { + | ResultCodes, + | RequestFlow, + | RequestFlowBuilder, + | CallParams,} = require('@fluencelabs/fluence/dist/internal/compilerSupport/v1${if (isJs) ".js" else ""}'); + |""".stripMargin + } else { + s"""import { Fluence, FluencePeer } from '@fluencelabs/fluence'; + |import { + | ResultCodes, + | RequestFlow, + | RequestFlowBuilder, + | CallParams + |} from '@fluencelabs/fluence/dist/internal/compilerSupport/v1${if (isJs) ".js" else ""}';""".stripMargin + } s"""/** | * | * This file is auto-generated. Do not edit manually: changes may be erased. @@ -11,12 +29,7 @@ object Header { | * Aqua version: ${Version.version} | * | */ - |import { Fluence, FluencePeer } from '@fluencelabs/fluence'; - |import { - | ResultCodes, - | RequestFlow, - | RequestFlowBuilder, - | CallParams, - |} from '@fluencelabs/fluence/dist/internal/compilerSupport/v1${if (isJs) ".js" else ""}'; + |$imports |""".stripMargin + } } diff --git a/backend/ts/src/main/scala/aqua/backend/OutputFile.scala b/backend/ts/src/main/scala/aqua/backend/OutputFile.scala index 98df1f07..3027208f 100644 --- a/backend/ts/src/main/scala/aqua/backend/OutputFile.scala +++ b/backend/ts/src/main/scala/aqua/backend/OutputFile.scala @@ -6,7 +6,7 @@ import aqua.model.transform.res.AquaRes case class OutputFile(res: AquaRes) { - def generate(types: Types): String = { + def generate(types: Types, isCommonJS: Boolean): String = { import types.* val services = res.services .map(s => OutputService(s, types)) @@ -15,7 +15,7 @@ case class OutputFile(res: AquaRes) { .mkString("\n\n") val functions = res.funcs.map(f => OutputFunc(f, types)).map(_.generate).toList.mkString("\n\n") - s"""${Header.header(false)} + s"""${Header.header(false, isCommonJS)} | |function ${typed( s"""missingFields(${typed("obj", "any")}, ${typed("fields", "string[]")})""", diff --git a/backend/ts/src/main/scala/aqua/backend/js/JavaScriptBackend.scala b/backend/ts/src/main/scala/aqua/backend/js/JavaScriptBackend.scala index e48709aa..2232c00e 100644 --- a/backend/ts/src/main/scala/aqua/backend/js/JavaScriptBackend.scala +++ b/backend/ts/src/main/scala/aqua/backend/js/JavaScriptBackend.scala @@ -4,7 +4,7 @@ import aqua.backend.ts.TypeScriptTypes import aqua.backend.{Backend, EmptyTypes, Generated, Header, OutputFile, OutputFunc, OutputService} import aqua.model.transform.res.AquaRes -object JavaScriptBackend extends Backend { +case class JavaScriptBackend(isCommonJS: Boolean) extends Backend { val ext = ".js" val tsExt = ".d.ts" @@ -18,7 +18,7 @@ object JavaScriptBackend extends Backend { val functions = res.funcs.map(f => TypeScriptTypes.funcType(f)).map(_.generate).toList.mkString("\n\n") - val body = s"""${Header.header(false)} + val body = s"""${Header.header(false, false)} | |// Services |$services @@ -33,6 +33,6 @@ object JavaScriptBackend extends Backend { override def generate(res: AquaRes): Seq[Generated] = if (res.isEmpty) Nil else { - Generated(ext, OutputFile(res).generate(EmptyTypes)):: typesFile(res) :: Nil + Generated(ext, OutputFile(res).generate(EmptyTypes, isCommonJS)):: typesFile(res) :: Nil } } diff --git a/backend/ts/src/main/scala/aqua/backend/ts/TypeScriptBackend.scala b/backend/ts/src/main/scala/aqua/backend/ts/TypeScriptBackend.scala index 3cfe914d..7a84cdb4 100644 --- a/backend/ts/src/main/scala/aqua/backend/ts/TypeScriptBackend.scala +++ b/backend/ts/src/main/scala/aqua/backend/ts/TypeScriptBackend.scala @@ -9,5 +9,5 @@ object TypeScriptBackend extends Backend { val ext = ".ts" override def generate(res: AquaRes): Seq[Generated] = - if (res.isEmpty) Nil else Generated(ext, OutputFile(res).generate(TypeScriptTypes)) :: Nil + if (res.isEmpty) Nil else Generated(ext, OutputFile(res).generate(TypeScriptTypes, false)) :: Nil } diff --git a/backend/ts/src/main/scala/aqua/backend/ts/TypeScriptTypesFile.scala b/backend/ts/src/main/scala/aqua/backend/ts/TypeScriptTypesFile.scala index 98d37558..f6198f2b 100644 --- a/backend/ts/src/main/scala/aqua/backend/ts/TypeScriptTypesFile.scala +++ b/backend/ts/src/main/scala/aqua/backend/ts/TypeScriptTypesFile.scala @@ -5,7 +5,7 @@ import aqua.model.transform.res.AquaRes case class TypeScriptTypesFile(res: AquaRes) { def generate: String = - s"""${Header.header(false)} + s"""${Header.header(false, false)} | |// Services |${res.services.map(TSServiceTypes(_)).map(_.generate).toList.mkString("\n\n")} diff --git a/build.sbt b/build.sbt index c2a39083..9c4b2a12 100644 --- a/build.sbt +++ b/build.sbt @@ -17,7 +17,7 @@ val declineV = "2.1.0" name := "aqua-hll" val commons = Seq( - baseAquaVersion := "0.3.1", + baseAquaVersion := "0.3.2", version := baseAquaVersion.value + "-" + sys.env.getOrElse("BUILD_NUMBER", "SNAPSHOT"), scalaVersion := dottyVersion, libraryDependencies ++= Seq( @@ -56,7 +56,7 @@ lazy val cli = crossProject(JSPlatform, JVMPlatform) lazy val cliJS = cli.js .settings( - scalaJSLinkerConfig ~= (_.withModuleKind(ModuleKind.CommonJSModule)), + scalaJSLinkerConfig ~= (_.withModuleKind(ModuleKind.ESModule)), scalaJSUseMainModuleInitializer := true ) diff --git a/cli/.js/src/main/scala/aqua/CallJsFunction.scala b/cli/.js/src/main/scala/aqua/CallJsFunction.scala new file mode 100644 index 00000000..dbc2f813 --- /dev/null +++ b/cli/.js/src/main/scala/aqua/CallJsFunction.scala @@ -0,0 +1,86 @@ +package aqua + +import aqua.model.transform.TransformConfig +import aqua.model.transform.res.FuncRes +import aqua.types.Type + +import scala.concurrent.{ExecutionContext, Future, Promise} +import scala.scalajs.js + +object CallJsFunction { + + // Register a service that returns no result + def registerUnitService( + peer: FluencePeer, + serviceId: String, + fnName: String, + handler: (js.Array[js.Any]) => Unit + ) = { + peer.internals.callServiceHandler.use((req, resp, next) => { + if (req.serviceId == serviceId && req.fnName == fnName) { + handler(req.args) + resp.retCode = ResultCodes.success + resp.result = new js.Object {} + } + + next() + }) + } + + // Call a function with generated air script + def funcCallJs( + peer: FluencePeer, + air: String, + args: List[(String, js.Any)], + returnType: Option[Type], + config: TransformConfig + )(implicit ec: ExecutionContext): Future[Any] = { + val resultPromise: Promise[js.Any] = Promise[js.Any]() + + val requestBuilder = new RequestFlowBuilder() + val relayPeerId = peer.getStatus().relayPeerId + + requestBuilder + .disableInjections() + .withRawScript(air) + .configHandler((handler, r) => { + handler.on(config.getDataService, config.relayVarName.getOrElse("-relay-"), (_, _) => { relayPeerId }) + args.foreach { (fnName, arg) => + handler.on(config.getDataService, fnName, (_, _) => arg) + } + handler.onEvent( + config.callbackService, + config.respFuncName, + (args, _) => { + if (args.length == 1) { + resultPromise.success(args.pop()) + } else if (args.length == 0) { + resultPromise.success(()) + } else { + resultPromise.success(args) + } + () + } + ) + handler.onEvent( + config.errorHandlingService, + config.errorFuncName, + (args, _) => { + resultPromise.failure(new RuntimeException(args.pop().toString)) + () + } + ) + }) + .handleScriptError((err) => { + resultPromise.failure(new RuntimeException("script error: " + err.toString)) + }) + .handleTimeout(() => { + if (!resultPromise.isCompleted) + resultPromise.failure(new RuntimeException(s"Request timed out")) + }) + + peer.internals.initiateFlow(requestBuilder.build()).toFuture.flatMap { _ => + returnType.fold(resultPromise.success(()).future)(_ => resultPromise.future) + } + } +} diff --git a/cli/.js/src/main/scala/aqua/JsTypes.scala b/cli/.js/src/main/scala/aqua/JsTypes.scala new file mode 100644 index 00000000..ce2eb62f --- /dev/null +++ b/cli/.js/src/main/scala/aqua/JsTypes.scala @@ -0,0 +1,136 @@ +package aqua + +import scala.concurrent.Promise +import scala.scalajs.js +import scala.scalajs.js.annotation.JSImport + +/*** + * This is description of types from Fluence JS library. + * See here for details https://github.com/fluencelabs/fluence-js + */ + +/** + * Particle context. Contains additional information about particle which triggered `call` air instruction from AVM + */ +trait ParticleContext { + def particleId: String + def initPeerId: String + def timestamp: Int + def ttl: Int + def signature: String +} + +object ResultCodes { + val success = 0 + val unknownError = 1 + val exceptionInHandler = 2 +} + +/** + * Represents the result of the `call` air instruction to be returned into AVM + */ +trait CallServiceResult extends js.Object { + def retCode: Int + def retCode_=(code: Int): Unit + def result: js.Any + def result_=(res: js.Any): Unit +} + +/** + * Represents the information passed from AVM when a `call` air instruction is executed on the local peer + */ +trait CallServiceData extends js.Object { + def serviceId: String + def fnName: String + def args: js.Array[js.Any] + def particleContext: ParticleContext + def tetraplets: js.Any +} + +trait Internals extends js.Object { + def initiateFlow(r: RequestFlow): js.Promise[js.Any] + def callServiceHandler: CallServiceHandler +} + +/** + * Information about Fluence Peer connection + */ +trait PeerStatus extends js.Object { + def isInitialized: Boolean + def isConnected: Boolean + def peerId: String + def relayPeerId: String +} + +/** + * This class implements the Fluence protocol for javascript-based environments. + * It provides all the necessary features to communicate with Fluence network + */ +@js.native +@JSImport("@fluencelabs/fluence/dist/internal/compilerSupport/v1.js", "FluencePeer") +class FluencePeer extends js.Object { + val internals: Internals = js.native + def getStatus(): PeerStatus = js.native + def stop(): js.Promise[Unit] = js.native +} + +/** + * Public interface to Fluence JS SDK + */ +@js.native +@JSImport("@fluencelabs/fluence", "Fluence") +object Fluence extends js.Object { + def start(str: String): js.Promise[js.Any] = js.native + def stop(): js.Promise[js.Any] = js.native + def getPeer(): FluencePeer = js.native + def getStatus(): PeerStatus = js.native +} + +/** + * Class defines the handling of a `call` air intruction executed by AVM on the local peer. + * All the execution process is defined by the chain of middlewares - architecture popular among backend web frameworks. + */ +@js.native +@JSImport("@fluencelabs/fluence/dist/internal/compilerSupport/v1.js", "CallServiceHandler") +class CallServiceHandler extends js.Object { + + def on( + serviceId: String, + fnName: String, + handler: js.Function2[js.Array[js.Any], js.Any, js.Any] + ): js.Function0[CallServiceHandler] = js.native + + def onEvent( + serviceId: String, + fnName: String, + handler: js.Function2[js.Array[js.Any], js.Any, js.Any] + ): js.Function0[CallServiceHandler] = js.native + + def use(f: js.Function3[CallServiceData, CallServiceResult, js.Function0[Unit], Unit]): CallServiceHandler = js.native +} + +/** + * The class represents the current view (and state) of distributed the particle execution process from client's point of view. + * It stores the intermediate particles state during the process. RequestFlow is identified by the id of the particle that is executed during the flow. + * Each RequestFlow contains a separate (unique to the current flow) CallServiceHandler where the handling of `call` AIR instruction takes place + * Please note, that RequestFlow's is handler is combined with the handler from client before the execution occures. + * After the combination middlewares from RequestFlow are executed before client handler's middlewares. + */ +@js.native +@JSImport("@fluencelabs/fluence/dist/internal/compilerSupport/v1.js", "RequestFlow") +class RequestFlow extends js.Object {} + +/** + * Builder class for configuring and creating Request Flows + */ +@js.native +@JSImport("@fluencelabs/fluence/dist/internal/compilerSupport/v1.js", "RequestFlowBuilder") +class RequestFlowBuilder extends js.Object { + def withRawScript(air: String): RequestFlowBuilder = js.native + def configHandler(f: js.Function2[CallServiceHandler, js.Any, Unit]): RequestFlowBuilder = + js.native + def disableInjections(): RequestFlowBuilder = js.native + def build(): RequestFlow = js.native + def handleScriptError(f: js.Function1[js.Any, Unit]): RequestFlowBuilder = js.native + def handleTimeout(f: js.Function0[Unit]): RequestFlowBuilder = js.native +} diff --git a/cli/.js/src/main/scala/aqua/RunCommand.scala b/cli/.js/src/main/scala/aqua/RunCommand.scala new file mode 100644 index 00000000..ee2277c7 --- /dev/null +++ b/cli/.js/src/main/scala/aqua/RunCommand.scala @@ -0,0 +1,142 @@ +package aqua + +import aqua.backend.Generated +import aqua.backend.air.AirBackend +import aqua.backend.js.JavaScriptBackend +import aqua.backend.ts.TypeScriptBackend +import aqua.compiler.{AquaCompiled, AquaCompiler} +import aqua.files.{AquaFileSources, AquaFilesIO, FileModuleId} +import aqua.io.AquaFileError +import aqua.model.transform.TransformConfig +import aqua.model.transform.res.FuncRes +import aqua.parser.expr.CallArrowExpr +import aqua.parser.lexer.Literal +import aqua.parser.lift.FileSpan +import cats.data.* +import cats.effect.kernel.{Async, Clock} +import cats.effect.syntax.async.* +import cats.effect.{IO, IOApp, Sync} +import cats.syntax.applicative.* +import cats.syntax.apply.* +import cats.syntax.flatMap.* +import cats.syntax.functor.* +import cats.syntax.monad.* +import cats.syntax.show.* +import cats.{Id, Monad, ~>} +import fs2.io.file.{Files, Path} +import scribe.Logging + +import scala.concurrent.{ExecutionContext, Future, Promise} +import scala.scalajs.js +import scala.scalajs.js.annotation.* + +object RunCommand extends Logging { + + /** + * Calls an air code with FluenceJS SDK. + * @param multiaddr relay to connect to + * @param air code to call + * @return + */ + def funcCall(multiaddr: String, air: Generated, config: TransformConfig)(implicit + ec: ExecutionContext + ): Future[Validated[String, Unit]] = { + (for { + _ <- Fluence + .start(multiaddr) + .toFuture + peer = Fluence.getPeer() + _ = CallJsFunction.registerUnitService( + peer, + "console", + "print", + args => println("print: " + args) + ) + result <- CallJsFunction.funcCallJs( + peer, + air.content, + Nil, + None, // TODO + config + ) + _ <- peer.stop().toFuture + } yield { + Validated.Valid(()) + }) + } + + val generatedFuncName = "callerUniqueFunction" + + /** + * Runs a function that is located in `input` file with FluenceJS SDK. Returns no output + * @param multiaddr relay to connect to + * @param func function name + * @param input path to an aqua code with a function + * @param imports the sources the input needs + */ + def run[F[_]: Files: AquaIO: Async]( + multiaddr: String, + func: String, + input: Path, + imports: List[Path], + config: TransformConfig = TransformConfig() + )(implicit ec: ExecutionContext): F[Unit] = { + implicit val aio: AquaIO[IO] = new AquaFilesIO[IO] + + val generatedFile = Path("./.aqua/call0.aqua").absolute + val absInput = input.absolute + val code = + s"""import "${absInput.toString}" + | + |func $generatedFuncName(): + | $func + |""".stripMargin + + for { + _ <- AquaIO[F].writeFile(generatedFile, code).value + importsWithInput = absInput +: imports.map(_.absolute) + sources = new AquaFileSources[F](generatedFile, importsWithInput) + compileResult <- Clock[F].timed(AquaCompiler + .compile[F, AquaFileError, FileModuleId, FileSpan.F]( + sources, + SpanParser.parser, + AirBackend, + config + )) + (compileTime, airV) = compileResult + callResult <- Clock[F].timed { + airV match { + case Validated.Valid(airC: Chain[AquaCompiled[FileModuleId]]) => + // Cause we generate input with only one function, we should have only one air compiled content + airC.headOption + .flatMap(_.compiled.headOption) + .map { air => + Async[F].fromFuture { + funcCall(multiaddr, air, config).map(_.toValidatedNec).pure[F] + } + } + .getOrElse { + Validated + .invalidNec("Unexpected. There could be only one generated function.") + .pure[F] + } + case Validated.Invalid(errs) => + import ErrorRendering.showError + Validated.invalid(errs.map(_.show)).pure[F] + + } + } + (callTime, result) = callResult + } yield { + logger.debug(s"Compile time: ${compileTime.toMillis}ms") + logger.debug(s"Call time: ${callTime.toMillis}ms") + result.fold( + { (errs: NonEmptyChain[String]) => + errs.toChain.toList.foreach(err => println(err + "\n")) + }, + identity + ) + } + } + +} diff --git a/cli/.jvm/src/main/scala/aqua/RunCommand.scala b/cli/.jvm/src/main/scala/aqua/RunCommand.scala new file mode 100644 index 00000000..8f985ad0 --- /dev/null +++ b/cli/.jvm/src/main/scala/aqua/RunCommand.scala @@ -0,0 +1,19 @@ +package aqua + +import aqua.parser.expr.CallArrowExpr +import cats.Monad +import cats.effect.IO +import cats.effect.kernel.Async +import fs2.io.file.{Files, Path} + +import scala.concurrent.{ExecutionContext, Future} + +object RunCommand { + + def run[F[_]: Monad: Files: AquaIO: Async]( + multiaddr: String, + func: String, + input: Path, + imps: List[Path] + )(implicit ec: ExecutionContext): F[Unit] = ??? +} diff --git a/cli/.jvm/src/test/scala/WriteFileSpec.scala b/cli/.jvm/src/test/scala/WriteFileSpec.scala index 219f52f1..076e0a0e 100644 --- a/cli/.jvm/src/test/scala/WriteFileSpec.scala +++ b/cli/.jvm/src/test/scala/WriteFileSpec.scala @@ -32,7 +32,7 @@ class WriteFileSpec extends AnyFlatSpec with Matchers { Files[IO].deleteIfExists(targetTsFile).unsafeRunSync() AquaPathCompiler - .compileFilesTo[IO](src, List.empty, Option(targetJs), JavaScriptBackend, bc) + .compileFilesTo[IO](src, List.empty, Option(targetJs), JavaScriptBackend(false), bc) .unsafeRunSync() .leftMap { err => println(err) diff --git a/cli/src/main/scala/aqua/AppOps.scala b/cli/src/main/scala/aqua/AppOpts.scala similarity index 95% rename from cli/src/main/scala/aqua/AppOps.scala rename to cli/src/main/scala/aqua/AppOpts.scala index badfbd84..ddde00cd 100644 --- a/cli/src/main/scala/aqua/AppOps.scala +++ b/cli/src/main/scala/aqua/AppOpts.scala @@ -7,19 +7,22 @@ import aqua.parser.expr.ConstantExpr import aqua.parser.lift.LiftParser import cats.data.Validated.{Invalid, Valid} import cats.data.{NonEmptyList, Validated, ValidatedNec, ValidatedNel} -import cats.effect.{ExitCode, IO} +import cats.effect.kernel.Async import cats.effect.std.Console -import cats.syntax.functor.* -import cats.syntax.traverse.* +import cats.effect.{ExitCode, IO} import cats.syntax.applicative.* import cats.syntax.flatMap.* -import cats.{Comonad, Functor, Monad} +import cats.syntax.functor.* +import cats.syntax.traverse.* +import cats.{Comonad, Functor, Monad, ~>} import com.monovore.decline.Opts.help import com.monovore.decline.{Opts, Visibility} -import scribe.Level import fs2.io.file.{Files, Path} +import scribe.Level -object AppOps { +import scala.concurrent.{ExecutionContext, Future} + +object AppOpts { val helpOpt: Opts[Unit] = Opts.flag("help", help = "Display this help text", "h", Visibility.Partial).asHelp.as(()) @@ -33,6 +36,8 @@ object AppOps { Validated.fromEither(toLogLevel(str)) } + def runOpt[F[_]: Files: AquaIO: Async](implicit ec: ExecutionContext): Opts[F[ExitCode]] = Opts.subcommand(RunOpts.runCommand[F]) + def toLogLevel(logLevel: String): Either[NonEmptyList[String], Level] = { LogLevel.stringToLogLevel .get(logLevel.toLowerCase) diff --git a/cli/src/main/scala/aqua/AquaCli.scala b/cli/src/main/scala/aqua/AquaCli.scala index 55540e26..ce167dc8 100644 --- a/cli/src/main/scala/aqua/AquaCli.scala +++ b/cli/src/main/scala/aqua/AquaCli.scala @@ -7,21 +7,24 @@ import aqua.backend.ts.TypeScriptBackend import aqua.files.AquaFilesIO import aqua.model.transform.TransformConfig import aqua.parser.lift.LiftParser.Implicits.idLiftParser -import cats.{Functor, Id, Monad} -import cats.data.{Chain, NonEmptyList, Validated, ValidatedNec, ValidatedNel} +import cats.data.* import cats.effect.* import cats.effect.std.Console as ConsoleEff -import cats.syntax.apply.* -import cats.syntax.functor.* import cats.syntax.applicative.* +import cats.syntax.apply.* import cats.syntax.flatMap.* +import cats.syntax.functor.* +import cats.{Functor, Id, Monad, ~>} +import com.monovore.decline import com.monovore.decline.Opts import com.monovore.decline.effect.CommandIOApp import fs2.io.file.Files import scribe.Logging +import scala.concurrent.Future + object AquaCli extends IOApp with Logging { - import AppOps.* + import AppOpts.* sealed trait CompileTarget case object TypescriptTarget extends CompileTarget @@ -33,15 +36,18 @@ object AquaCli extends IOApp with Logging { case TypescriptTarget => TypeScriptBackend case JavaScriptTarget => - JavaScriptBackend + JavaScriptBackend(false) case AirTarget => AirBackend } } - def main[F[_]: Concurrent: Files: ConsoleEff](runtime: unsafe.IORuntime): Opts[F[ExitCode]] = { + def main[F[_]: Files: ConsoleEff: Async](runtime: unsafe.IORuntime): Opts[F[ExitCode]] = { implicit val r = runtime - versionOpt + implicit val aio: AquaIO[F] = new AquaFilesIO[F] + implicit val ec = r.compute + + runOpt orElse versionOpt .as( versionAndExit ) orElse helpOpt.as( @@ -67,8 +73,6 @@ object AquaCli extends IOApp with Logging { .withHandler(formatter = LogFormatter.formatter, minimumLevel = Some(logLevel)) .replace() - implicit val aio: AquaIO[F] = new AquaFilesIO[F] - // if there is `--help` or `--version` flag - show help and version // otherwise continue program execution h.map(_ => helpAndExit) orElse v.map(_ => versionAndExit) getOrElse { diff --git a/cli/src/main/scala/aqua/AquaPathCompiler.scala b/cli/src/main/scala/aqua/AquaPathCompiler.scala index e47260a9..dcc80900 100644 --- a/cli/src/main/scala/aqua/AquaPathCompiler.scala +++ b/cli/src/main/scala/aqua/AquaPathCompiler.scala @@ -43,20 +43,7 @@ object AquaPathCompiler extends Logging { AquaCompiler .compileTo[F, AquaFileError, FileModuleId, FileSpan.F, String]( sources, - id => { - source => { - val nat = new (Span.F ~> FileSpan.F) { - override def apply[A](span: Span.F[A]): FileSpan.F[A] = { - ( - FileSpan(id.file.absolute.toString, Eval.later(LocationMap(source)), span._1), - span._2 - ) - } - } - import Span.spanLiftParser - Parser.natParser(Parser.spanParser, nat)(source) - } - }, + SpanParser.parser, backend, transformConfig, targetPath.map(sources.write).getOrElse(dry[F]) diff --git a/cli/src/main/scala/aqua/RunOpts.scala b/cli/src/main/scala/aqua/RunOpts.scala new file mode 100644 index 00000000..1aaf0bd0 --- /dev/null +++ b/cli/src/main/scala/aqua/RunOpts.scala @@ -0,0 +1,52 @@ +package aqua + +import aqua.RunCommand +import aqua.parser.expr.CallArrowExpr +import aqua.parser.lift.LiftParser.Implicits.idLiftParser +import cats.data.{NonEmptyList, Validated} +import cats.effect.kernel.Async +import cats.effect.{ExitCode, IO} +import cats.syntax.applicative.* +import cats.syntax.apply.* +import cats.syntax.flatMap.* +import cats.syntax.functor.* +import cats.{Id, Monad, ~>} +import com.monovore.decline.{Command, Opts} +import fs2.io.file.Files + +import scala.concurrent.{ExecutionContext, Future} + +object RunOpts { + + val multiaddrOpt: Opts[String] = + Opts + .option[String]("addr", "Relay multiaddress", "a") + .withDefault("/dns4/kras-00.fluence.dev/tcp/19001/wss/p2p/12D3KooWR4cv1a8tv7pps4HH6wePNaK6gf1Hww5wcCMzeWxyNw51") + + val funcNameOpt: Opts[String] = + Opts + .option[String]("func", "Function to call with args", "f") + + def runOptions[F[_]: Files: AquaIO: Async](implicit ec: ExecutionContext): Opts[F[cats.effect.ExitCode]] = + (AppOpts.inputOpts[F], AppOpts.importOpts[F], multiaddrOpt, funcNameOpt).mapN { (inputF, importF, multiaddr, func) => + for { + inputV <- inputF + impsV <- importF + result <- inputV.fold(_ => cats.effect.ExitCode.Error.pure[F], { input => + impsV.fold(_ => cats.effect.ExitCode.Error.pure[F], { imps => + RunCommand.run(multiaddr, func, input, imps).map(_ => cats.effect.ExitCode.Success) + }) + }) + } yield { + result + } + + } + + def runCommand[F[_]: Files: AquaIO: Async](implicit ec: ExecutionContext): Command[F[ExitCode]] = Command( + name = "run", + header = "Run a function from an aqua code" + ) { + runOptions + } +} diff --git a/cli/src/main/scala/aqua/SpanParser.scala b/cli/src/main/scala/aqua/SpanParser.scala new file mode 100644 index 00000000..902f6fa7 --- /dev/null +++ b/cli/src/main/scala/aqua/SpanParser.scala @@ -0,0 +1,30 @@ +package aqua + +import aqua.files.FileModuleId +import aqua.parser.lift.{FileSpan, Span} +import aqua.parser.{Ast, Parser, ParserError} +import cats.data.* +import cats.parse.LocationMap +import cats.{Comonad, Eval, Monad, Monoid, Order, ~>} + +object SpanParser extends scribe.Logging { + def parser: FileModuleId => String => ValidatedNec[ParserError[FileSpan.F], Ast[FileSpan.F]] = { + id => { + source => { + logger.trace("creating parser...") + val nat = new (Span.F ~> FileSpan.F) { + override def apply[A](span: Span.F[A]): FileSpan.F[A] = { + ( + FileSpan(id.file.absolute.toString, Eval.later(LocationMap(source)), span._1), + span._2 + ) + } + } + import Span.spanLiftParser + val parser = Parser.natParser(Parser.spanParser, nat)(source) + logger.trace("parser created") + parser + } + } + } +} diff --git a/cli/src/main/scala/aqua/files/AquaFileSources.scala b/cli/src/main/scala/aqua/files/AquaFileSources.scala index c565b9ba..5e264351 100644 --- a/cli/src/main/scala/aqua/files/AquaFileSources.scala +++ b/cli/src/main/scala/aqua/files/AquaFileSources.scala @@ -3,7 +3,6 @@ package aqua.files import aqua.AquaIO import aqua.compiler.{AquaCompiled, AquaSources} import aqua.io.{AquaFileError, FileSystemError, ListAquaErrors} -import cats.{Functor, Monad} import cats.data.{Chain, NonEmptyChain, Validated, ValidatedNec} import cats.implicits.catsSyntaxApplicativeId import cats.syntax.either.* @@ -11,6 +10,7 @@ import cats.syntax.flatMap.* import cats.syntax.functor.* import cats.syntax.monad.* import cats.syntax.traverse.* +import cats.{Functor, Monad} import fs2.io.file.{Files, Path} import scribe.Logging diff --git a/compiler/src/main/scala/aqua/compiler/AquaCompiler.scala b/compiler/src/main/scala/aqua/compiler/AquaCompiler.scala index 0aab6438..a50d5707 100644 --- a/compiler/src/main/scala/aqua/compiler/AquaCompiler.scala +++ b/compiler/src/main/scala/aqua/compiler/AquaCompiler.scala @@ -9,17 +9,16 @@ import aqua.parser.lift.{LiftParser, Span} import aqua.parser.{Ast, ParserError} import aqua.semantics.Semantics import aqua.semantics.header.HeaderSem -import cats.data.Validated.{validNec, Invalid, Valid} -import cats.data.{Chain, NonEmptyChain, NonEmptyMap, Validated, ValidatedNec} +import cats.data.Validated.{Invalid, Valid, validNec} +import cats.data.* import cats.parse.Parser0 import cats.syntax.applicative.* import cats.syntax.flatMap.* import cats.syntax.functor.* -import cats.syntax.traverse.* import cats.syntax.monoid.* -import cats.{Comonad, Monad, Monoid, Order} +import cats.syntax.traverse.* +import cats.{Comonad, Monad, Monoid, Order, ~>} import scribe.Logging -import cats.~> object AquaCompiler extends Logging { @@ -33,7 +32,7 @@ object AquaCompiler extends Logging { type Err = AquaError[I, E, S] type Ctx = NonEmptyMap[I, AquaContext] type ValidatedCtx = ValidatedNec[Err, Ctx] - + logger.trace("starting resolving sources...") new AquaParser[F, E, I, S](sources, parser) .resolve[ValidatedCtx](mod => context => @@ -50,6 +49,7 @@ object AquaCompiler extends Logging { ) .andThen { headerSem => // Analyze the body, with prepared initial context + logger.trace("semantic processing...") Semantics .process( mod.body, @@ -65,6 +65,7 @@ object AquaCompiler extends Logging { ) .map( _.andThen(modules => + logger.trace("linking modules...") Linker .link[I, AquaError[I, E, S], ValidatedCtx]( modules, @@ -73,6 +74,7 @@ object AquaCompiler extends Logging { i => validNec(NonEmptyMap.one(i, Monoid.empty[AquaContext])) ) .andThen { filesWithContext => + logger.trace("linking finished") filesWithContext .foldLeft[ValidatedNec[Err, Chain[AquaProcessed[I]]]]( validNec(Chain.nil) @@ -86,6 +88,7 @@ object AquaCompiler extends Logging { } .map( _.map { ap => + logger.trace("generating output...") val res = AquaRes.fromContext(ap.context, config) val compiled = backend.generate(res) AquaCompiled(ap.id, compiled, res.funcs.length.toInt, res.services.length.toInt) diff --git a/compiler/src/main/scala/aqua/compiler/AquaParser.scala b/compiler/src/main/scala/aqua/compiler/AquaParser.scala index e5d6bc19..b2985c36 100644 --- a/compiler/src/main/scala/aqua/compiler/AquaParser.scala +++ b/compiler/src/main/scala/aqua/compiler/AquaParser.scala @@ -1,18 +1,18 @@ package aqua.compiler +import aqua.compiler.AquaCompiler.logger import aqua.linker.{AquaModule, Modules} -import aqua.parser.{Ast, ParserError} import aqua.parser.head.{FilenameExpr, ImportExpr} import aqua.parser.lift.{LiftParser, Span} +import aqua.parser.{Ast, ParserError} import cats.data.{Chain, NonEmptyChain, Validated, ValidatedNec} import cats.parse.Parser0 import cats.syntax.applicative.* import cats.syntax.flatMap.* import cats.syntax.functor.* import cats.syntax.traverse.* -import cats.{Comonad, Monad} +import cats.{Comonad, Monad, ~>} import scribe.Logging -import cats.~> // TODO: add tests class AquaParser[F[_]: Monad, E, I, S[_]: Comonad]( diff --git a/npm/index-java.js b/npm/index-java.js deleted file mode 100644 index 3b89e843..00000000 --- a/npm/index-java.js +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env node - -"use strict"; - -const { exec } = require("child_process"); -const path = require("path"); -const fs = require('fs'); - -let importArgs = [] - -const nm = path.join("./", "node_modules") -if (fs.existsSync(nm) && fs.lstatSync(nm).isDirectory()) { - importArgs = ["-m", "node_modules"] -} - -const args = [ - "java", - "-jar", - path.join(__dirname, "aqua.jar"), - ...importArgs, - ...process.argv.slice(2), -]; - -const argsString = args.join(" "); - -console.log("Aqua Java " + argsString); -exec(argsString, (err, stdout, stderr) => { - console.error("Aqua Java: " + stderr); - console.log("Aqua Java: " + stdout); - - if (err) { - process.exit(err.code); - } -}); diff --git a/npm/index.js b/npm/index.js index 0eb5418f..a264ef6a 100644 --- a/npm/index.js +++ b/npm/index.js @@ -2,41 +2,4 @@ "use strict"; -const { exec } = require("child_process"); -const path = require("path"); -const fs = require('fs'); - -const nm = path.join("./", "node_modules") -let initArgs = process.argv.slice(2) - -let args = []; -if ((initArgs.includes('-v') || initArgs.includes('--version'))) { - args = [ - "node", - path.join(__dirname, "aqua.js"), - "--version", - ]; -} else { - let importArgs = [] - if (fs.existsSync(nm) && fs.lstatSync(nm).isDirectory()) { - importArgs = ["-m", "node_modules"] - } - args = [ - "node", - path.join(__dirname, "aqua.js"), - ...importArgs, - ...initArgs, - ]; -} - -const argsString = args.join(" "); - -console.log("Aqua: " + argsString); -exec(argsString, (err, stdout, stderr) => { - console.error("Aqua: " + stderr); - console.log("Aqua: " + stdout); - - if (err) { - process.exit(err.code); - } -}); +import "./aqua.js"; diff --git a/npm/package.json b/npm/package.json index f9f70a08..8cce09c7 100644 --- a/npm/package.json +++ b/npm/package.json @@ -2,8 +2,8 @@ "name": "@fluencelabs/aqua", "version": "0.0.0", "description": "Aqua compiler", + "type": "module", "files": [ - "aqua.jar", "aqua.js", "index.js", "index-java.js", @@ -11,10 +11,14 @@ ], "bin": { "aqua": "index.js", - "aqua-cli": "error.js", - "aqua-j": "index-java.js" + "aqua-cli": "error.js" + }, + "scripts": { + "run": "node index.js" + }, + "dependencies": { + "@fluencelabs/fluence": "0.12.1" }, - "scripts": {}, "repository": { "type": "git", "url": "git+https://github.com/fluencelabs/aqua.git" diff --git a/parser/src/main/scala/aqua/parser/Parser.scala b/parser/src/main/scala/aqua/parser/Parser.scala index 02d3a2a2..2eebd33c 100644 --- a/parser/src/main/scala/aqua/parser/Parser.scala +++ b/parser/src/main/scala/aqua/parser/Parser.scala @@ -1,32 +1,32 @@ package aqua.parser -import cats.data.{Validated, ValidatedNec} -import aqua.parser.Ast import aqua.parser.Ast.Tree -import aqua.parser.ParserError -import aqua.parser.LexerError +import aqua.parser.{Ast, LexerError, ParserError} import aqua.parser.expr.RootExpr import aqua.parser.head.HeadExpr import aqua.parser.lexer.Token -import aqua.parser.lift.{FileSpan, LiftParser, Span} -import cats.{Comonad, Eval, ~>} -import cats.parse.LocationMap -import cats.parse.{Parser as P, Parser0 as P0} -import cats.Id import aqua.parser.lift.LiftParser.LiftErrorOps +import aqua.parser.lift.{FileSpan, LiftParser, Span} +import cats.data.{Validated, ValidatedNec} +import cats.parse.{LocationMap, Parser as P, Parser0 as P0} +import cats.{Comonad, Eval, Id, ~>} -object Parser { +object Parser extends scribe.Logging { import Span.spanLiftParser - lazy val spanParser = parserSchema[Span.F]() + val spanParser = parserSchema[Span.F]() import LiftParser.Implicits.idLiftParser lazy val idParser = parserSchema[Id]() - def parserSchema[S[_] : LiftParser : Comonad](): P0[ValidatedNec[ParserError[S], Ast[S]]] = - (HeadExpr.ast[S] ~ RootExpr.ast0[S]()).map { case (head, bodyMaybe) => + def parserSchema[S[_] : LiftParser : Comonad](): P0[ValidatedNec[ParserError[S], Ast[S]]] = { + logger.trace("creating schema...") + val parser = (HeadExpr.ast[S] ~ RootExpr.ast0[S]()).map { case (head, bodyMaybe) => bodyMaybe.map(Ast(head, _)) } + logger.trace("schema created") + parser + } def parser[S[_] : LiftParser : Comonad](p: P0[ValidatedNec[ParserError[S], Ast[S]]])(source: String): ValidatedNec[ParserError[S], Ast[S]] = { p.parseAll(source) match { diff --git a/parser/src/main/scala/aqua/parser/expr/CallArrowExpr.scala b/parser/src/main/scala/aqua/parser/expr/CallArrowExpr.scala index 69abe6ff..e48c4d4c 100644 --- a/parser/src/main/scala/aqua/parser/expr/CallArrowExpr.scala +++ b/parser/src/main/scala/aqua/parser/expr/CallArrowExpr.scala @@ -27,15 +27,20 @@ case class CallArrowExpr[F[_]]( object CallArrowExpr extends Expr.Leaf { + def ability[F[_]: LiftParser: Comonad]: P0[Option[Ability[F]]] = (Ability.dotted[F] <* `.`).? + def functionCallWithArgs[F[_]: LiftParser: Comonad] = Name.p[F] + ~ comma0(Value.`value`[F].surroundedBy(`/s*`)).between(`(` <* `/s*`, `/s*` *> `)`) + def funcCall[F[_]: LiftParser: Comonad] = ability.with1 ~ functionCallWithArgs + + def funcOnly[F[_]: LiftParser: Comonad] = funcCall.map { + case (ab, (name, args)) => + CallArrowExpr(Nil, ab, name, args) + } + override def p[F[_]: LiftParser: Comonad]: P[CallArrowExpr[F]] = { val variables: P0[Option[NonEmptyList[Name[F]]]] = (comma(Name.p[F]) <* ` <- `).backtrack.? - val ability: P0[Option[Ability[F]]] = (Ability.dotted[F] <* `.`).? - val functionCallWithArgs = Name.p[F] - ~ comma0(Value.`value`[F].surroundedBy(`/s*`)).between(`(` <* `/s*`, `/s*` *> `)`) - (variables.with1 ~ - (ability.with1 ~ functionCallWithArgs) - .withContext("Only results of a function call can be written to a stream") + (variables.with1 ~ funcCall.withContext("Only results of a function call can be written to a stream") ).map { case (variables, (ability, (funcName, args))) => CallArrowExpr(variables.toList.flatMap(_.toList), ability, funcName, args) diff --git a/parser/src/main/scala/aqua/parser/lexer/Token.scala b/parser/src/main/scala/aqua/parser/lexer/Token.scala index 4b0b22fb..4c49bc7a 100644 --- a/parser/src/main/scala/aqua/parser/lexer/Token.scala +++ b/parser/src/main/scala/aqua/parser/lexer/Token.scala @@ -1,9 +1,8 @@ package aqua.parser.lexer -import cats.{Comonad, Functor} import cats.data.NonEmptyList import cats.parse.{Accumulator0, Parser as P, Parser0 as P0} -import cats.~> +import cats.{Comonad, Functor, ~>} trait Token[F[_]] { def as[T](v: T): F[T] @@ -25,6 +24,10 @@ object Token { private val upperAnum_ = upperAnum ++ f_ private val nl = Set('\n', '\r') + val inAZ = P.charIn(AZ) + val inaz = P.charIn(az) + val whileAnum = P.charsWhile(anum_) + val ` *` : P0[String] = P.charsWhile0(fSpaces) val ` ` : P[String] = P.charsWhile(fSpaces) val `const`: P[Unit] = P.string("const") @@ -59,12 +62,12 @@ object Token { val `co`: P[Unit] = P.string("co") val `:` : P[Unit] = P.char(':') val ` : ` : P[Unit] = P.char(':').surroundedBy(` `.?) - val `anum_*` : P[Unit] = P.charsWhile(anum_).void + val `anum_*` : P[Unit] = whileAnum.void - val NAME: P[String] = (P.charIn(AZ) ~ P.charsWhile(upperAnum_).?).string - val `name`: P[String] = (P.charIn(az) ~ P.charsWhile(anum_).?).string + val NAME: P[String] = (inAZ ~ P.charsWhile(upperAnum_).?).string + val `name`: P[String] = (inaz ~ whileAnum.?).string - val `Class`: P[String] = (P.charIn(AZ) ~ P.charsWhile(anum_).backtrack.?).map { case (c, s) ⇒ + val `Class`: P[String] = (inAZ ~ whileAnum.backtrack.?).map { case (c, s) ⇒ c.toString ++ s.getOrElse("") } val `\n` : P[Unit] = P.string("\n\r") | P.char('\n') | P.string("\r\n") diff --git a/parser/src/main/scala/aqua/parser/lexer/Value.scala b/parser/src/main/scala/aqua/parser/lexer/Value.scala index 3324bf59..a849199e 100644 --- a/parser/src/main/scala/aqua/parser/lexer/Value.scala +++ b/parser/src/main/scala/aqua/parser/lexer/Value.scala @@ -6,11 +6,10 @@ import aqua.parser.lexer.Token.* import aqua.parser.lift.LiftParser import aqua.parser.lift.LiftParser.* import aqua.types.LiteralType -import cats.parse.{Numbers, Parser as P} +import cats.parse.{Numbers, Parser as P, Parser0 as P0} import cats.syntax.comonad.* import cats.syntax.functor.* -import cats.{Comonad, Functor} -import cats.~> +import cats.{Comonad, Functor, ~>} sealed trait Value[F[_]] extends Token[F] { def mapK[K[_]: Comonad](fk: F ~> K): Value[K] @@ -46,8 +45,11 @@ object Value { def initPeerId[F[_]: LiftParser: Comonad]: P[Literal[F]] = `%init_peer_id%`.string.lift.map(Literal(_, LiteralType.string)) + val minus = P.char('-') + val dot = P.char('.') + def num[F[_]: LiftParser: Comonad]: P[Literal[F]] = - (P.char('-').?.with1 ~ Numbers.nonNegativeIntString).lift.map(fu => + (minus.?.with1 ~ Numbers.nonNegativeIntString).lift.map(fu => fu.extract match { case (Some(_), n) ⇒ Literal(fu.as(s"-$n"), LiteralType.signed) case (None, n) ⇒ Literal(fu.as(n), LiteralType.number) @@ -55,14 +57,14 @@ object Value { ) def float[F[_]: LiftParser: Comonad]: P[Literal[F]] = - (P.char('-').?.with1 ~ (Numbers.nonNegativeIntString <* P.char( - '.' - )) ~ Numbers.nonNegativeIntString).string.lift + (minus.?.with1 ~ (Numbers.nonNegativeIntString <* dot) ~ Numbers.nonNegativeIntString).string.lift .map(Literal(_, LiteralType.float)) + val charsWhileQuotes = P.charsWhile0(_ != '"') + // TODO make more sophisticated escaping/unescaping def string[F[_]: LiftParser: Comonad]: P[Literal[F]] = - (`"` *> P.charsWhile0(_ != '"') <* `"`).string.lift + (`"` *> charsWhileQuotes <* `"`).string.lift .map(Literal(_, LiteralType.string)) def literal[F[_]: LiftParser: Comonad]: P[Literal[F]] =