mirror of
https://github.com/fluencelabs/aqua.git
synced 2024-12-04 22:50:18 +00:00
Run aqua code from CLI (#324)
This commit is contained in:
parent
4e63da83f5
commit
3844d8f2db
7
.github/workflows/test_branch.yml
vendored
7
.github/workflows/test_branch.yml
vendored
@ -55,6 +55,9 @@ jobs:
|
|||||||
cd ..
|
cd ..
|
||||||
sbt "cliJS/fastOptJS"
|
sbt "cliJS/fastOptJS"
|
||||||
rm -rf aqua-playground/src/compiled/examples/*
|
rm -rf aqua-playground/src/compiled/examples/*
|
||||||
node cli/.js/target/scala-3.0.2/cli-fastopt.js -i aqua-playground/aqua/examples -o aqua-playground/src/compiled/examples -m aqua-playground/node_modules -c "UNIQUE_CONST = 1" -c "ANOTHER_CONST = \"ab\""
|
mv cli/.js/target/scala-3.0.2/cli-fastopt.js npm/aqua.mjs
|
||||||
cd aqua-playground
|
cd npm
|
||||||
|
npm i
|
||||||
|
node aqua.mjs -i ../aqua-playground/aqua/examples -o ../aqua-playground/src/compiled/examples -m ../aqua-playground/node_modules -c "UNIQUE_CONST = 1" -c "ANOTHER_CONST = \"ab\""
|
||||||
|
cd ../aqua-playground
|
||||||
npm run examples
|
npm run examples
|
||||||
|
@ -2,7 +2,25 @@ package aqua.backend
|
|||||||
|
|
||||||
object Header {
|
object Header {
|
||||||
|
|
||||||
def header(isJs: Boolean): String =
|
def header(isJs: Boolean, isCommonJS: Boolean): String = {
|
||||||
|
val imports = if (isCommonJS) {
|
||||||
|
"""
|
||||||
|
|const { Fluence, FluencePeer } = require('@fluencelabs/fluence');
|
||||||
|
|const {
|
||||||
|
| ResultCodes,
|
||||||
|
| RequestFlow,
|
||||||
|
| RequestFlowBuilder,
|
||||||
|
| CallParams,} = require('@fluencelabs/fluence/dist/internal/compilerSupport/v1${if (isJs) ".js" else ""}');
|
||||||
|
|""".stripMargin
|
||||||
|
} else {
|
||||||
|
s"""import { Fluence, FluencePeer } from '@fluencelabs/fluence';
|
||||||
|
|import {
|
||||||
|
| ResultCodes,
|
||||||
|
| RequestFlow,
|
||||||
|
| RequestFlowBuilder,
|
||||||
|
| CallParams
|
||||||
|
|} from '@fluencelabs/fluence/dist/internal/compilerSupport/v1${if (isJs) ".js" else ""}';""".stripMargin
|
||||||
|
}
|
||||||
s"""/**
|
s"""/**
|
||||||
| *
|
| *
|
||||||
| * This file is auto-generated. Do not edit manually: changes may be erased.
|
| * This file is auto-generated. Do not edit manually: changes may be erased.
|
||||||
@ -11,12 +29,7 @@ object Header {
|
|||||||
| * Aqua version: ${Version.version}
|
| * Aqua version: ${Version.version}
|
||||||
| *
|
| *
|
||||||
| */
|
| */
|
||||||
|import { Fluence, FluencePeer } from '@fluencelabs/fluence';
|
|$imports
|
||||||
|import {
|
|
||||||
| ResultCodes,
|
|
||||||
| RequestFlow,
|
|
||||||
| RequestFlowBuilder,
|
|
||||||
| CallParams,
|
|
||||||
|} from '@fluencelabs/fluence/dist/internal/compilerSupport/v1${if (isJs) ".js" else ""}';
|
|
||||||
|""".stripMargin
|
|""".stripMargin
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,7 +6,7 @@ import aqua.model.transform.res.AquaRes
|
|||||||
|
|
||||||
case class OutputFile(res: AquaRes) {
|
case class OutputFile(res: AquaRes) {
|
||||||
|
|
||||||
def generate(types: Types): String = {
|
def generate(types: Types, isCommonJS: Boolean): String = {
|
||||||
import types.*
|
import types.*
|
||||||
val services = res.services
|
val services = res.services
|
||||||
.map(s => OutputService(s, types))
|
.map(s => OutputService(s, types))
|
||||||
@ -15,7 +15,7 @@ case class OutputFile(res: AquaRes) {
|
|||||||
.mkString("\n\n")
|
.mkString("\n\n")
|
||||||
val functions =
|
val functions =
|
||||||
res.funcs.map(f => OutputFunc(f, types)).map(_.generate).toList.mkString("\n\n")
|
res.funcs.map(f => OutputFunc(f, types)).map(_.generate).toList.mkString("\n\n")
|
||||||
s"""${Header.header(false)}
|
s"""${Header.header(false, isCommonJS)}
|
||||||
|
|
|
|
||||||
|function ${typed(
|
|function ${typed(
|
||||||
s"""missingFields(${typed("obj", "any")}, ${typed("fields", "string[]")})""",
|
s"""missingFields(${typed("obj", "any")}, ${typed("fields", "string[]")})""",
|
||||||
|
@ -4,7 +4,7 @@ import aqua.backend.ts.TypeScriptTypes
|
|||||||
import aqua.backend.{Backend, EmptyTypes, Generated, Header, OutputFile, OutputFunc, OutputService}
|
import aqua.backend.{Backend, EmptyTypes, Generated, Header, OutputFile, OutputFunc, OutputService}
|
||||||
import aqua.model.transform.res.AquaRes
|
import aqua.model.transform.res.AquaRes
|
||||||
|
|
||||||
object JavaScriptBackend extends Backend {
|
case class JavaScriptBackend(isCommonJS: Boolean) extends Backend {
|
||||||
|
|
||||||
val ext = ".js"
|
val ext = ".js"
|
||||||
val tsExt = ".d.ts"
|
val tsExt = ".d.ts"
|
||||||
@ -18,7 +18,7 @@ object JavaScriptBackend extends Backend {
|
|||||||
val functions =
|
val functions =
|
||||||
res.funcs.map(f => TypeScriptTypes.funcType(f)).map(_.generate).toList.mkString("\n\n")
|
res.funcs.map(f => TypeScriptTypes.funcType(f)).map(_.generate).toList.mkString("\n\n")
|
||||||
|
|
||||||
val body = s"""${Header.header(false)}
|
val body = s"""${Header.header(false, false)}
|
||||||
|
|
|
|
||||||
|// Services
|
|// Services
|
||||||
|$services
|
|$services
|
||||||
@ -33,6 +33,6 @@ object JavaScriptBackend extends Backend {
|
|||||||
override def generate(res: AquaRes): Seq[Generated] =
|
override def generate(res: AquaRes): Seq[Generated] =
|
||||||
if (res.isEmpty) Nil
|
if (res.isEmpty) Nil
|
||||||
else {
|
else {
|
||||||
Generated(ext, OutputFile(res).generate(EmptyTypes)):: typesFile(res) :: Nil
|
Generated(ext, OutputFile(res).generate(EmptyTypes, isCommonJS)):: typesFile(res) :: Nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -9,5 +9,5 @@ object TypeScriptBackend extends Backend {
|
|||||||
val ext = ".ts"
|
val ext = ".ts"
|
||||||
|
|
||||||
override def generate(res: AquaRes): Seq[Generated] =
|
override def generate(res: AquaRes): Seq[Generated] =
|
||||||
if (res.isEmpty) Nil else Generated(ext, OutputFile(res).generate(TypeScriptTypes)) :: Nil
|
if (res.isEmpty) Nil else Generated(ext, OutputFile(res).generate(TypeScriptTypes, false)) :: Nil
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,7 @@ import aqua.model.transform.res.AquaRes
|
|||||||
|
|
||||||
case class TypeScriptTypesFile(res: AquaRes) {
|
case class TypeScriptTypesFile(res: AquaRes) {
|
||||||
def generate: String =
|
def generate: String =
|
||||||
s"""${Header.header(false)}
|
s"""${Header.header(false, false)}
|
||||||
|
|
|
|
||||||
|// Services
|
|// Services
|
||||||
|${res.services.map(TSServiceTypes(_)).map(_.generate).toList.mkString("\n\n")}
|
|${res.services.map(TSServiceTypes(_)).map(_.generate).toList.mkString("\n\n")}
|
||||||
|
@ -17,7 +17,7 @@ val declineV = "2.1.0"
|
|||||||
name := "aqua-hll"
|
name := "aqua-hll"
|
||||||
|
|
||||||
val commons = Seq(
|
val commons = Seq(
|
||||||
baseAquaVersion := "0.3.1",
|
baseAquaVersion := "0.3.2",
|
||||||
version := baseAquaVersion.value + "-" + sys.env.getOrElse("BUILD_NUMBER", "SNAPSHOT"),
|
version := baseAquaVersion.value + "-" + sys.env.getOrElse("BUILD_NUMBER", "SNAPSHOT"),
|
||||||
scalaVersion := dottyVersion,
|
scalaVersion := dottyVersion,
|
||||||
libraryDependencies ++= Seq(
|
libraryDependencies ++= Seq(
|
||||||
@ -56,7 +56,7 @@ lazy val cli = crossProject(JSPlatform, JVMPlatform)
|
|||||||
|
|
||||||
lazy val cliJS = cli.js
|
lazy val cliJS = cli.js
|
||||||
.settings(
|
.settings(
|
||||||
scalaJSLinkerConfig ~= (_.withModuleKind(ModuleKind.CommonJSModule)),
|
scalaJSLinkerConfig ~= (_.withModuleKind(ModuleKind.ESModule)),
|
||||||
scalaJSUseMainModuleInitializer := true
|
scalaJSUseMainModuleInitializer := true
|
||||||
)
|
)
|
||||||
|
|
||||||
|
86
cli/.js/src/main/scala/aqua/CallJsFunction.scala
Normal file
86
cli/.js/src/main/scala/aqua/CallJsFunction.scala
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
package aqua
|
||||||
|
|
||||||
|
import aqua.model.transform.TransformConfig
|
||||||
|
import aqua.model.transform.res.FuncRes
|
||||||
|
import aqua.types.Type
|
||||||
|
|
||||||
|
import scala.concurrent.{ExecutionContext, Future, Promise}
|
||||||
|
import scala.scalajs.js
|
||||||
|
|
||||||
|
object CallJsFunction {
|
||||||
|
|
||||||
|
// Register a service that returns no result
|
||||||
|
def registerUnitService(
|
||||||
|
peer: FluencePeer,
|
||||||
|
serviceId: String,
|
||||||
|
fnName: String,
|
||||||
|
handler: (js.Array[js.Any]) => Unit
|
||||||
|
) = {
|
||||||
|
peer.internals.callServiceHandler.use((req, resp, next) => {
|
||||||
|
if (req.serviceId == serviceId && req.fnName == fnName) {
|
||||||
|
handler(req.args)
|
||||||
|
resp.retCode = ResultCodes.success
|
||||||
|
resp.result = new js.Object {}
|
||||||
|
}
|
||||||
|
|
||||||
|
next()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Call a function with generated air script
|
||||||
|
def funcCallJs(
|
||||||
|
peer: FluencePeer,
|
||||||
|
air: String,
|
||||||
|
args: List[(String, js.Any)],
|
||||||
|
returnType: Option[Type],
|
||||||
|
config: TransformConfig
|
||||||
|
)(implicit ec: ExecutionContext): Future[Any] = {
|
||||||
|
val resultPromise: Promise[js.Any] = Promise[js.Any]()
|
||||||
|
|
||||||
|
val requestBuilder = new RequestFlowBuilder()
|
||||||
|
val relayPeerId = peer.getStatus().relayPeerId
|
||||||
|
|
||||||
|
requestBuilder
|
||||||
|
.disableInjections()
|
||||||
|
.withRawScript(air)
|
||||||
|
.configHandler((handler, r) => {
|
||||||
|
handler.on(config.getDataService, config.relayVarName.getOrElse("-relay-"), (_, _) => { relayPeerId })
|
||||||
|
args.foreach { (fnName, arg) =>
|
||||||
|
handler.on(config.getDataService, fnName, (_, _) => arg)
|
||||||
|
}
|
||||||
|
handler.onEvent(
|
||||||
|
config.callbackService,
|
||||||
|
config.respFuncName,
|
||||||
|
(args, _) => {
|
||||||
|
if (args.length == 1) {
|
||||||
|
resultPromise.success(args.pop())
|
||||||
|
} else if (args.length == 0) {
|
||||||
|
resultPromise.success(())
|
||||||
|
} else {
|
||||||
|
resultPromise.success(args)
|
||||||
|
}
|
||||||
|
()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
handler.onEvent(
|
||||||
|
config.errorHandlingService,
|
||||||
|
config.errorFuncName,
|
||||||
|
(args, _) => {
|
||||||
|
resultPromise.failure(new RuntimeException(args.pop().toString))
|
||||||
|
()
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.handleScriptError((err) => {
|
||||||
|
resultPromise.failure(new RuntimeException("script error: " + err.toString))
|
||||||
|
})
|
||||||
|
.handleTimeout(() => {
|
||||||
|
if (!resultPromise.isCompleted)
|
||||||
|
resultPromise.failure(new RuntimeException(s"Request timed out"))
|
||||||
|
})
|
||||||
|
|
||||||
|
peer.internals.initiateFlow(requestBuilder.build()).toFuture.flatMap { _ =>
|
||||||
|
returnType.fold(resultPromise.success(()).future)(_ => resultPromise.future)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
136
cli/.js/src/main/scala/aqua/JsTypes.scala
Normal file
136
cli/.js/src/main/scala/aqua/JsTypes.scala
Normal file
@ -0,0 +1,136 @@
|
|||||||
|
package aqua
|
||||||
|
|
||||||
|
import scala.concurrent.Promise
|
||||||
|
import scala.scalajs.js
|
||||||
|
import scala.scalajs.js.annotation.JSImport
|
||||||
|
|
||||||
|
/***
|
||||||
|
* This is description of types from Fluence JS library.
|
||||||
|
* See here for details https://github.com/fluencelabs/fluence-js
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Particle context. Contains additional information about particle which triggered `call` air instruction from AVM
|
||||||
|
*/
|
||||||
|
trait ParticleContext {
|
||||||
|
def particleId: String
|
||||||
|
def initPeerId: String
|
||||||
|
def timestamp: Int
|
||||||
|
def ttl: Int
|
||||||
|
def signature: String
|
||||||
|
}
|
||||||
|
|
||||||
|
object ResultCodes {
|
||||||
|
val success = 0
|
||||||
|
val unknownError = 1
|
||||||
|
val exceptionInHandler = 2
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represents the result of the `call` air instruction to be returned into AVM
|
||||||
|
*/
|
||||||
|
trait CallServiceResult extends js.Object {
|
||||||
|
def retCode: Int
|
||||||
|
def retCode_=(code: Int): Unit
|
||||||
|
def result: js.Any
|
||||||
|
def result_=(res: js.Any): Unit
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represents the information passed from AVM when a `call` air instruction is executed on the local peer
|
||||||
|
*/
|
||||||
|
trait CallServiceData extends js.Object {
|
||||||
|
def serviceId: String
|
||||||
|
def fnName: String
|
||||||
|
def args: js.Array[js.Any]
|
||||||
|
def particleContext: ParticleContext
|
||||||
|
def tetraplets: js.Any
|
||||||
|
}
|
||||||
|
|
||||||
|
trait Internals extends js.Object {
|
||||||
|
def initiateFlow(r: RequestFlow): js.Promise[js.Any]
|
||||||
|
def callServiceHandler: CallServiceHandler
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Information about Fluence Peer connection
|
||||||
|
*/
|
||||||
|
trait PeerStatus extends js.Object {
|
||||||
|
def isInitialized: Boolean
|
||||||
|
def isConnected: Boolean
|
||||||
|
def peerId: String
|
||||||
|
def relayPeerId: String
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This class implements the Fluence protocol for javascript-based environments.
|
||||||
|
* It provides all the necessary features to communicate with Fluence network
|
||||||
|
*/
|
||||||
|
@js.native
|
||||||
|
@JSImport("@fluencelabs/fluence/dist/internal/compilerSupport/v1.js", "FluencePeer")
|
||||||
|
class FluencePeer extends js.Object {
|
||||||
|
val internals: Internals = js.native
|
||||||
|
def getStatus(): PeerStatus = js.native
|
||||||
|
def stop(): js.Promise[Unit] = js.native
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Public interface to Fluence JS SDK
|
||||||
|
*/
|
||||||
|
@js.native
|
||||||
|
@JSImport("@fluencelabs/fluence", "Fluence")
|
||||||
|
object Fluence extends js.Object {
|
||||||
|
def start(str: String): js.Promise[js.Any] = js.native
|
||||||
|
def stop(): js.Promise[js.Any] = js.native
|
||||||
|
def getPeer(): FluencePeer = js.native
|
||||||
|
def getStatus(): PeerStatus = js.native
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class defines the handling of a `call` air intruction executed by AVM on the local peer.
|
||||||
|
* All the execution process is defined by the chain of middlewares - architecture popular among backend web frameworks.
|
||||||
|
*/
|
||||||
|
@js.native
|
||||||
|
@JSImport("@fluencelabs/fluence/dist/internal/compilerSupport/v1.js", "CallServiceHandler")
|
||||||
|
class CallServiceHandler extends js.Object {
|
||||||
|
|
||||||
|
def on(
|
||||||
|
serviceId: String,
|
||||||
|
fnName: String,
|
||||||
|
handler: js.Function2[js.Array[js.Any], js.Any, js.Any]
|
||||||
|
): js.Function0[CallServiceHandler] = js.native
|
||||||
|
|
||||||
|
def onEvent(
|
||||||
|
serviceId: String,
|
||||||
|
fnName: String,
|
||||||
|
handler: js.Function2[js.Array[js.Any], js.Any, js.Any]
|
||||||
|
): js.Function0[CallServiceHandler] = js.native
|
||||||
|
|
||||||
|
def use(f: js.Function3[CallServiceData, CallServiceResult, js.Function0[Unit], Unit]): CallServiceHandler = js.native
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The class represents the current view (and state) of distributed the particle execution process from client's point of view.
|
||||||
|
* It stores the intermediate particles state during the process. RequestFlow is identified by the id of the particle that is executed during the flow.
|
||||||
|
* Each RequestFlow contains a separate (unique to the current flow) CallServiceHandler where the handling of `call` AIR instruction takes place
|
||||||
|
* Please note, that RequestFlow's is handler is combined with the handler from client before the execution occures.
|
||||||
|
* After the combination middlewares from RequestFlow are executed before client handler's middlewares.
|
||||||
|
*/
|
||||||
|
@js.native
|
||||||
|
@JSImport("@fluencelabs/fluence/dist/internal/compilerSupport/v1.js", "RequestFlow")
|
||||||
|
class RequestFlow extends js.Object {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builder class for configuring and creating Request Flows
|
||||||
|
*/
|
||||||
|
@js.native
|
||||||
|
@JSImport("@fluencelabs/fluence/dist/internal/compilerSupport/v1.js", "RequestFlowBuilder")
|
||||||
|
class RequestFlowBuilder extends js.Object {
|
||||||
|
def withRawScript(air: String): RequestFlowBuilder = js.native
|
||||||
|
def configHandler(f: js.Function2[CallServiceHandler, js.Any, Unit]): RequestFlowBuilder =
|
||||||
|
js.native
|
||||||
|
def disableInjections(): RequestFlowBuilder = js.native
|
||||||
|
def build(): RequestFlow = js.native
|
||||||
|
def handleScriptError(f: js.Function1[js.Any, Unit]): RequestFlowBuilder = js.native
|
||||||
|
def handleTimeout(f: js.Function0[Unit]): RequestFlowBuilder = js.native
|
||||||
|
}
|
142
cli/.js/src/main/scala/aqua/RunCommand.scala
Normal file
142
cli/.js/src/main/scala/aqua/RunCommand.scala
Normal file
@ -0,0 +1,142 @@
|
|||||||
|
package aqua
|
||||||
|
|
||||||
|
import aqua.backend.Generated
|
||||||
|
import aqua.backend.air.AirBackend
|
||||||
|
import aqua.backend.js.JavaScriptBackend
|
||||||
|
import aqua.backend.ts.TypeScriptBackend
|
||||||
|
import aqua.compiler.{AquaCompiled, AquaCompiler}
|
||||||
|
import aqua.files.{AquaFileSources, AquaFilesIO, FileModuleId}
|
||||||
|
import aqua.io.AquaFileError
|
||||||
|
import aqua.model.transform.TransformConfig
|
||||||
|
import aqua.model.transform.res.FuncRes
|
||||||
|
import aqua.parser.expr.CallArrowExpr
|
||||||
|
import aqua.parser.lexer.Literal
|
||||||
|
import aqua.parser.lift.FileSpan
|
||||||
|
import cats.data.*
|
||||||
|
import cats.effect.kernel.{Async, Clock}
|
||||||
|
import cats.effect.syntax.async.*
|
||||||
|
import cats.effect.{IO, IOApp, Sync}
|
||||||
|
import cats.syntax.applicative.*
|
||||||
|
import cats.syntax.apply.*
|
||||||
|
import cats.syntax.flatMap.*
|
||||||
|
import cats.syntax.functor.*
|
||||||
|
import cats.syntax.monad.*
|
||||||
|
import cats.syntax.show.*
|
||||||
|
import cats.{Id, Monad, ~>}
|
||||||
|
import fs2.io.file.{Files, Path}
|
||||||
|
import scribe.Logging
|
||||||
|
|
||||||
|
import scala.concurrent.{ExecutionContext, Future, Promise}
|
||||||
|
import scala.scalajs.js
|
||||||
|
import scala.scalajs.js.annotation.*
|
||||||
|
|
||||||
|
object RunCommand extends Logging {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calls an air code with FluenceJS SDK.
|
||||||
|
* @param multiaddr relay to connect to
|
||||||
|
* @param air code to call
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
def funcCall(multiaddr: String, air: Generated, config: TransformConfig)(implicit
|
||||||
|
ec: ExecutionContext
|
||||||
|
): Future[Validated[String, Unit]] = {
|
||||||
|
(for {
|
||||||
|
_ <- Fluence
|
||||||
|
.start(multiaddr)
|
||||||
|
.toFuture
|
||||||
|
peer = Fluence.getPeer()
|
||||||
|
_ = CallJsFunction.registerUnitService(
|
||||||
|
peer,
|
||||||
|
"console",
|
||||||
|
"print",
|
||||||
|
args => println("print: " + args)
|
||||||
|
)
|
||||||
|
result <- CallJsFunction.funcCallJs(
|
||||||
|
peer,
|
||||||
|
air.content,
|
||||||
|
Nil,
|
||||||
|
None, // TODO
|
||||||
|
config
|
||||||
|
)
|
||||||
|
_ <- peer.stop().toFuture
|
||||||
|
} yield {
|
||||||
|
Validated.Valid(())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
val generatedFuncName = "callerUniqueFunction"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Runs a function that is located in `input` file with FluenceJS SDK. Returns no output
|
||||||
|
* @param multiaddr relay to connect to
|
||||||
|
* @param func function name
|
||||||
|
* @param input path to an aqua code with a function
|
||||||
|
* @param imports the sources the input needs
|
||||||
|
*/
|
||||||
|
def run[F[_]: Files: AquaIO: Async](
|
||||||
|
multiaddr: String,
|
||||||
|
func: String,
|
||||||
|
input: Path,
|
||||||
|
imports: List[Path],
|
||||||
|
config: TransformConfig = TransformConfig()
|
||||||
|
)(implicit ec: ExecutionContext): F[Unit] = {
|
||||||
|
implicit val aio: AquaIO[IO] = new AquaFilesIO[IO]
|
||||||
|
|
||||||
|
val generatedFile = Path("./.aqua/call0.aqua").absolute
|
||||||
|
val absInput = input.absolute
|
||||||
|
val code =
|
||||||
|
s"""import "${absInput.toString}"
|
||||||
|
|
|
||||||
|
|func $generatedFuncName():
|
||||||
|
| $func
|
||||||
|
|""".stripMargin
|
||||||
|
|
||||||
|
for {
|
||||||
|
_ <- AquaIO[F].writeFile(generatedFile, code).value
|
||||||
|
importsWithInput = absInput +: imports.map(_.absolute)
|
||||||
|
sources = new AquaFileSources[F](generatedFile, importsWithInput)
|
||||||
|
compileResult <- Clock[F].timed(AquaCompiler
|
||||||
|
.compile[F, AquaFileError, FileModuleId, FileSpan.F](
|
||||||
|
sources,
|
||||||
|
SpanParser.parser,
|
||||||
|
AirBackend,
|
||||||
|
config
|
||||||
|
))
|
||||||
|
(compileTime, airV) = compileResult
|
||||||
|
callResult <- Clock[F].timed {
|
||||||
|
airV match {
|
||||||
|
case Validated.Valid(airC: Chain[AquaCompiled[FileModuleId]]) =>
|
||||||
|
// Cause we generate input with only one function, we should have only one air compiled content
|
||||||
|
airC.headOption
|
||||||
|
.flatMap(_.compiled.headOption)
|
||||||
|
.map { air =>
|
||||||
|
Async[F].fromFuture {
|
||||||
|
funcCall(multiaddr, air, config).map(_.toValidatedNec).pure[F]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.getOrElse {
|
||||||
|
Validated
|
||||||
|
.invalidNec("Unexpected. There could be only one generated function.")
|
||||||
|
.pure[F]
|
||||||
|
}
|
||||||
|
case Validated.Invalid(errs) =>
|
||||||
|
import ErrorRendering.showError
|
||||||
|
Validated.invalid(errs.map(_.show)).pure[F]
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(callTime, result) = callResult
|
||||||
|
} yield {
|
||||||
|
logger.debug(s"Compile time: ${compileTime.toMillis}ms")
|
||||||
|
logger.debug(s"Call time: ${callTime.toMillis}ms")
|
||||||
|
result.fold(
|
||||||
|
{ (errs: NonEmptyChain[String]) =>
|
||||||
|
errs.toChain.toList.foreach(err => println(err + "\n"))
|
||||||
|
},
|
||||||
|
identity
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
19
cli/.jvm/src/main/scala/aqua/RunCommand.scala
Normal file
19
cli/.jvm/src/main/scala/aqua/RunCommand.scala
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
package aqua
|
||||||
|
|
||||||
|
import aqua.parser.expr.CallArrowExpr
|
||||||
|
import cats.Monad
|
||||||
|
import cats.effect.IO
|
||||||
|
import cats.effect.kernel.Async
|
||||||
|
import fs2.io.file.{Files, Path}
|
||||||
|
|
||||||
|
import scala.concurrent.{ExecutionContext, Future}
|
||||||
|
|
||||||
|
object RunCommand {
|
||||||
|
|
||||||
|
def run[F[_]: Monad: Files: AquaIO: Async](
|
||||||
|
multiaddr: String,
|
||||||
|
func: String,
|
||||||
|
input: Path,
|
||||||
|
imps: List[Path]
|
||||||
|
)(implicit ec: ExecutionContext): F[Unit] = ???
|
||||||
|
}
|
@ -32,7 +32,7 @@ class WriteFileSpec extends AnyFlatSpec with Matchers {
|
|||||||
Files[IO].deleteIfExists(targetTsFile).unsafeRunSync()
|
Files[IO].deleteIfExists(targetTsFile).unsafeRunSync()
|
||||||
|
|
||||||
AquaPathCompiler
|
AquaPathCompiler
|
||||||
.compileFilesTo[IO](src, List.empty, Option(targetJs), JavaScriptBackend, bc)
|
.compileFilesTo[IO](src, List.empty, Option(targetJs), JavaScriptBackend(false), bc)
|
||||||
.unsafeRunSync()
|
.unsafeRunSync()
|
||||||
.leftMap { err =>
|
.leftMap { err =>
|
||||||
println(err)
|
println(err)
|
||||||
|
@ -7,19 +7,22 @@ import aqua.parser.expr.ConstantExpr
|
|||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import cats.data.Validated.{Invalid, Valid}
|
import cats.data.Validated.{Invalid, Valid}
|
||||||
import cats.data.{NonEmptyList, Validated, ValidatedNec, ValidatedNel}
|
import cats.data.{NonEmptyList, Validated, ValidatedNec, ValidatedNel}
|
||||||
import cats.effect.{ExitCode, IO}
|
import cats.effect.kernel.Async
|
||||||
import cats.effect.std.Console
|
import cats.effect.std.Console
|
||||||
import cats.syntax.functor.*
|
import cats.effect.{ExitCode, IO}
|
||||||
import cats.syntax.traverse.*
|
|
||||||
import cats.syntax.applicative.*
|
import cats.syntax.applicative.*
|
||||||
import cats.syntax.flatMap.*
|
import cats.syntax.flatMap.*
|
||||||
import cats.{Comonad, Functor, Monad}
|
import cats.syntax.functor.*
|
||||||
|
import cats.syntax.traverse.*
|
||||||
|
import cats.{Comonad, Functor, Monad, ~>}
|
||||||
import com.monovore.decline.Opts.help
|
import com.monovore.decline.Opts.help
|
||||||
import com.monovore.decline.{Opts, Visibility}
|
import com.monovore.decline.{Opts, Visibility}
|
||||||
import scribe.Level
|
|
||||||
import fs2.io.file.{Files, Path}
|
import fs2.io.file.{Files, Path}
|
||||||
|
import scribe.Level
|
||||||
|
|
||||||
object AppOps {
|
import scala.concurrent.{ExecutionContext, Future}
|
||||||
|
|
||||||
|
object AppOpts {
|
||||||
|
|
||||||
val helpOpt: Opts[Unit] =
|
val helpOpt: Opts[Unit] =
|
||||||
Opts.flag("help", help = "Display this help text", "h", Visibility.Partial).asHelp.as(())
|
Opts.flag("help", help = "Display this help text", "h", Visibility.Partial).asHelp.as(())
|
||||||
@ -33,6 +36,8 @@ object AppOps {
|
|||||||
Validated.fromEither(toLogLevel(str))
|
Validated.fromEither(toLogLevel(str))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def runOpt[F[_]: Files: AquaIO: Async](implicit ec: ExecutionContext): Opts[F[ExitCode]] = Opts.subcommand(RunOpts.runCommand[F])
|
||||||
|
|
||||||
def toLogLevel(logLevel: String): Either[NonEmptyList[String], Level] = {
|
def toLogLevel(logLevel: String): Either[NonEmptyList[String], Level] = {
|
||||||
LogLevel.stringToLogLevel
|
LogLevel.stringToLogLevel
|
||||||
.get(logLevel.toLowerCase)
|
.get(logLevel.toLowerCase)
|
@ -7,21 +7,24 @@ import aqua.backend.ts.TypeScriptBackend
|
|||||||
import aqua.files.AquaFilesIO
|
import aqua.files.AquaFilesIO
|
||||||
import aqua.model.transform.TransformConfig
|
import aqua.model.transform.TransformConfig
|
||||||
import aqua.parser.lift.LiftParser.Implicits.idLiftParser
|
import aqua.parser.lift.LiftParser.Implicits.idLiftParser
|
||||||
import cats.{Functor, Id, Monad}
|
import cats.data.*
|
||||||
import cats.data.{Chain, NonEmptyList, Validated, ValidatedNec, ValidatedNel}
|
|
||||||
import cats.effect.*
|
import cats.effect.*
|
||||||
import cats.effect.std.Console as ConsoleEff
|
import cats.effect.std.Console as ConsoleEff
|
||||||
import cats.syntax.apply.*
|
|
||||||
import cats.syntax.functor.*
|
|
||||||
import cats.syntax.applicative.*
|
import cats.syntax.applicative.*
|
||||||
|
import cats.syntax.apply.*
|
||||||
import cats.syntax.flatMap.*
|
import cats.syntax.flatMap.*
|
||||||
|
import cats.syntax.functor.*
|
||||||
|
import cats.{Functor, Id, Monad, ~>}
|
||||||
|
import com.monovore.decline
|
||||||
import com.monovore.decline.Opts
|
import com.monovore.decline.Opts
|
||||||
import com.monovore.decline.effect.CommandIOApp
|
import com.monovore.decline.effect.CommandIOApp
|
||||||
import fs2.io.file.Files
|
import fs2.io.file.Files
|
||||||
import scribe.Logging
|
import scribe.Logging
|
||||||
|
|
||||||
|
import scala.concurrent.Future
|
||||||
|
|
||||||
object AquaCli extends IOApp with Logging {
|
object AquaCli extends IOApp with Logging {
|
||||||
import AppOps.*
|
import AppOpts.*
|
||||||
|
|
||||||
sealed trait CompileTarget
|
sealed trait CompileTarget
|
||||||
case object TypescriptTarget extends CompileTarget
|
case object TypescriptTarget extends CompileTarget
|
||||||
@ -33,15 +36,18 @@ object AquaCli extends IOApp with Logging {
|
|||||||
case TypescriptTarget =>
|
case TypescriptTarget =>
|
||||||
TypeScriptBackend
|
TypeScriptBackend
|
||||||
case JavaScriptTarget =>
|
case JavaScriptTarget =>
|
||||||
JavaScriptBackend
|
JavaScriptBackend(false)
|
||||||
case AirTarget =>
|
case AirTarget =>
|
||||||
AirBackend
|
AirBackend
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
def main[F[_]: Concurrent: Files: ConsoleEff](runtime: unsafe.IORuntime): Opts[F[ExitCode]] = {
|
def main[F[_]: Files: ConsoleEff: Async](runtime: unsafe.IORuntime): Opts[F[ExitCode]] = {
|
||||||
implicit val r = runtime
|
implicit val r = runtime
|
||||||
versionOpt
|
implicit val aio: AquaIO[F] = new AquaFilesIO[F]
|
||||||
|
implicit val ec = r.compute
|
||||||
|
|
||||||
|
runOpt orElse versionOpt
|
||||||
.as(
|
.as(
|
||||||
versionAndExit
|
versionAndExit
|
||||||
) orElse helpOpt.as(
|
) orElse helpOpt.as(
|
||||||
@ -67,8 +73,6 @@ object AquaCli extends IOApp with Logging {
|
|||||||
.withHandler(formatter = LogFormatter.formatter, minimumLevel = Some(logLevel))
|
.withHandler(formatter = LogFormatter.formatter, minimumLevel = Some(logLevel))
|
||||||
.replace()
|
.replace()
|
||||||
|
|
||||||
implicit val aio: AquaIO[F] = new AquaFilesIO[F]
|
|
||||||
|
|
||||||
// if there is `--help` or `--version` flag - show help and version
|
// if there is `--help` or `--version` flag - show help and version
|
||||||
// otherwise continue program execution
|
// otherwise continue program execution
|
||||||
h.map(_ => helpAndExit) orElse v.map(_ => versionAndExit) getOrElse {
|
h.map(_ => helpAndExit) orElse v.map(_ => versionAndExit) getOrElse {
|
||||||
|
@ -43,20 +43,7 @@ object AquaPathCompiler extends Logging {
|
|||||||
AquaCompiler
|
AquaCompiler
|
||||||
.compileTo[F, AquaFileError, FileModuleId, FileSpan.F, String](
|
.compileTo[F, AquaFileError, FileModuleId, FileSpan.F, String](
|
||||||
sources,
|
sources,
|
||||||
id => {
|
SpanParser.parser,
|
||||||
source => {
|
|
||||||
val nat = new (Span.F ~> FileSpan.F) {
|
|
||||||
override def apply[A](span: Span.F[A]): FileSpan.F[A] = {
|
|
||||||
(
|
|
||||||
FileSpan(id.file.absolute.toString, Eval.later(LocationMap(source)), span._1),
|
|
||||||
span._2
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
import Span.spanLiftParser
|
|
||||||
Parser.natParser(Parser.spanParser, nat)(source)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
backend,
|
backend,
|
||||||
transformConfig,
|
transformConfig,
|
||||||
targetPath.map(sources.write).getOrElse(dry[F])
|
targetPath.map(sources.write).getOrElse(dry[F])
|
||||||
|
52
cli/src/main/scala/aqua/RunOpts.scala
Normal file
52
cli/src/main/scala/aqua/RunOpts.scala
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
package aqua
|
||||||
|
|
||||||
|
import aqua.RunCommand
|
||||||
|
import aqua.parser.expr.CallArrowExpr
|
||||||
|
import aqua.parser.lift.LiftParser.Implicits.idLiftParser
|
||||||
|
import cats.data.{NonEmptyList, Validated}
|
||||||
|
import cats.effect.kernel.Async
|
||||||
|
import cats.effect.{ExitCode, IO}
|
||||||
|
import cats.syntax.applicative.*
|
||||||
|
import cats.syntax.apply.*
|
||||||
|
import cats.syntax.flatMap.*
|
||||||
|
import cats.syntax.functor.*
|
||||||
|
import cats.{Id, Monad, ~>}
|
||||||
|
import com.monovore.decline.{Command, Opts}
|
||||||
|
import fs2.io.file.Files
|
||||||
|
|
||||||
|
import scala.concurrent.{ExecutionContext, Future}
|
||||||
|
|
||||||
|
object RunOpts {
|
||||||
|
|
||||||
|
val multiaddrOpt: Opts[String] =
|
||||||
|
Opts
|
||||||
|
.option[String]("addr", "Relay multiaddress", "a")
|
||||||
|
.withDefault("/dns4/kras-00.fluence.dev/tcp/19001/wss/p2p/12D3KooWR4cv1a8tv7pps4HH6wePNaK6gf1Hww5wcCMzeWxyNw51")
|
||||||
|
|
||||||
|
val funcNameOpt: Opts[String] =
|
||||||
|
Opts
|
||||||
|
.option[String]("func", "Function to call with args", "f")
|
||||||
|
|
||||||
|
def runOptions[F[_]: Files: AquaIO: Async](implicit ec: ExecutionContext): Opts[F[cats.effect.ExitCode]] =
|
||||||
|
(AppOpts.inputOpts[F], AppOpts.importOpts[F], multiaddrOpt, funcNameOpt).mapN { (inputF, importF, multiaddr, func) =>
|
||||||
|
for {
|
||||||
|
inputV <- inputF
|
||||||
|
impsV <- importF
|
||||||
|
result <- inputV.fold(_ => cats.effect.ExitCode.Error.pure[F], { input =>
|
||||||
|
impsV.fold(_ => cats.effect.ExitCode.Error.pure[F], { imps =>
|
||||||
|
RunCommand.run(multiaddr, func, input, imps).map(_ => cats.effect.ExitCode.Success)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
} yield {
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
def runCommand[F[_]: Files: AquaIO: Async](implicit ec: ExecutionContext): Command[F[ExitCode]] = Command(
|
||||||
|
name = "run",
|
||||||
|
header = "Run a function from an aqua code"
|
||||||
|
) {
|
||||||
|
runOptions
|
||||||
|
}
|
||||||
|
}
|
30
cli/src/main/scala/aqua/SpanParser.scala
Normal file
30
cli/src/main/scala/aqua/SpanParser.scala
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
package aqua
|
||||||
|
|
||||||
|
import aqua.files.FileModuleId
|
||||||
|
import aqua.parser.lift.{FileSpan, Span}
|
||||||
|
import aqua.parser.{Ast, Parser, ParserError}
|
||||||
|
import cats.data.*
|
||||||
|
import cats.parse.LocationMap
|
||||||
|
import cats.{Comonad, Eval, Monad, Monoid, Order, ~>}
|
||||||
|
|
||||||
|
object SpanParser extends scribe.Logging {
|
||||||
|
def parser: FileModuleId => String => ValidatedNec[ParserError[FileSpan.F], Ast[FileSpan.F]] = {
|
||||||
|
id => {
|
||||||
|
source => {
|
||||||
|
logger.trace("creating parser...")
|
||||||
|
val nat = new (Span.F ~> FileSpan.F) {
|
||||||
|
override def apply[A](span: Span.F[A]): FileSpan.F[A] = {
|
||||||
|
(
|
||||||
|
FileSpan(id.file.absolute.toString, Eval.later(LocationMap(source)), span._1),
|
||||||
|
span._2
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
import Span.spanLiftParser
|
||||||
|
val parser = Parser.natParser(Parser.spanParser, nat)(source)
|
||||||
|
logger.trace("parser created")
|
||||||
|
parser
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -3,7 +3,6 @@ package aqua.files
|
|||||||
import aqua.AquaIO
|
import aqua.AquaIO
|
||||||
import aqua.compiler.{AquaCompiled, AquaSources}
|
import aqua.compiler.{AquaCompiled, AquaSources}
|
||||||
import aqua.io.{AquaFileError, FileSystemError, ListAquaErrors}
|
import aqua.io.{AquaFileError, FileSystemError, ListAquaErrors}
|
||||||
import cats.{Functor, Monad}
|
|
||||||
import cats.data.{Chain, NonEmptyChain, Validated, ValidatedNec}
|
import cats.data.{Chain, NonEmptyChain, Validated, ValidatedNec}
|
||||||
import cats.implicits.catsSyntaxApplicativeId
|
import cats.implicits.catsSyntaxApplicativeId
|
||||||
import cats.syntax.either.*
|
import cats.syntax.either.*
|
||||||
@ -11,6 +10,7 @@ import cats.syntax.flatMap.*
|
|||||||
import cats.syntax.functor.*
|
import cats.syntax.functor.*
|
||||||
import cats.syntax.monad.*
|
import cats.syntax.monad.*
|
||||||
import cats.syntax.traverse.*
|
import cats.syntax.traverse.*
|
||||||
|
import cats.{Functor, Monad}
|
||||||
import fs2.io.file.{Files, Path}
|
import fs2.io.file.{Files, Path}
|
||||||
import scribe.Logging
|
import scribe.Logging
|
||||||
|
|
||||||
|
@ -9,17 +9,16 @@ import aqua.parser.lift.{LiftParser, Span}
|
|||||||
import aqua.parser.{Ast, ParserError}
|
import aqua.parser.{Ast, ParserError}
|
||||||
import aqua.semantics.Semantics
|
import aqua.semantics.Semantics
|
||||||
import aqua.semantics.header.HeaderSem
|
import aqua.semantics.header.HeaderSem
|
||||||
import cats.data.Validated.{validNec, Invalid, Valid}
|
import cats.data.Validated.{Invalid, Valid, validNec}
|
||||||
import cats.data.{Chain, NonEmptyChain, NonEmptyMap, Validated, ValidatedNec}
|
import cats.data.*
|
||||||
import cats.parse.Parser0
|
import cats.parse.Parser0
|
||||||
import cats.syntax.applicative.*
|
import cats.syntax.applicative.*
|
||||||
import cats.syntax.flatMap.*
|
import cats.syntax.flatMap.*
|
||||||
import cats.syntax.functor.*
|
import cats.syntax.functor.*
|
||||||
import cats.syntax.traverse.*
|
|
||||||
import cats.syntax.monoid.*
|
import cats.syntax.monoid.*
|
||||||
import cats.{Comonad, Monad, Monoid, Order}
|
import cats.syntax.traverse.*
|
||||||
|
import cats.{Comonad, Monad, Monoid, Order, ~>}
|
||||||
import scribe.Logging
|
import scribe.Logging
|
||||||
import cats.~>
|
|
||||||
|
|
||||||
object AquaCompiler extends Logging {
|
object AquaCompiler extends Logging {
|
||||||
|
|
||||||
@ -33,7 +32,7 @@ object AquaCompiler extends Logging {
|
|||||||
type Err = AquaError[I, E, S]
|
type Err = AquaError[I, E, S]
|
||||||
type Ctx = NonEmptyMap[I, AquaContext]
|
type Ctx = NonEmptyMap[I, AquaContext]
|
||||||
type ValidatedCtx = ValidatedNec[Err, Ctx]
|
type ValidatedCtx = ValidatedNec[Err, Ctx]
|
||||||
|
logger.trace("starting resolving sources...")
|
||||||
new AquaParser[F, E, I, S](sources, parser)
|
new AquaParser[F, E, I, S](sources, parser)
|
||||||
.resolve[ValidatedCtx](mod =>
|
.resolve[ValidatedCtx](mod =>
|
||||||
context =>
|
context =>
|
||||||
@ -50,6 +49,7 @@ object AquaCompiler extends Logging {
|
|||||||
)
|
)
|
||||||
.andThen { headerSem =>
|
.andThen { headerSem =>
|
||||||
// Analyze the body, with prepared initial context
|
// Analyze the body, with prepared initial context
|
||||||
|
logger.trace("semantic processing...")
|
||||||
Semantics
|
Semantics
|
||||||
.process(
|
.process(
|
||||||
mod.body,
|
mod.body,
|
||||||
@ -65,6 +65,7 @@ object AquaCompiler extends Logging {
|
|||||||
)
|
)
|
||||||
.map(
|
.map(
|
||||||
_.andThen(modules =>
|
_.andThen(modules =>
|
||||||
|
logger.trace("linking modules...")
|
||||||
Linker
|
Linker
|
||||||
.link[I, AquaError[I, E, S], ValidatedCtx](
|
.link[I, AquaError[I, E, S], ValidatedCtx](
|
||||||
modules,
|
modules,
|
||||||
@ -73,6 +74,7 @@ object AquaCompiler extends Logging {
|
|||||||
i => validNec(NonEmptyMap.one(i, Monoid.empty[AquaContext]))
|
i => validNec(NonEmptyMap.one(i, Monoid.empty[AquaContext]))
|
||||||
)
|
)
|
||||||
.andThen { filesWithContext =>
|
.andThen { filesWithContext =>
|
||||||
|
logger.trace("linking finished")
|
||||||
filesWithContext
|
filesWithContext
|
||||||
.foldLeft[ValidatedNec[Err, Chain[AquaProcessed[I]]]](
|
.foldLeft[ValidatedNec[Err, Chain[AquaProcessed[I]]]](
|
||||||
validNec(Chain.nil)
|
validNec(Chain.nil)
|
||||||
@ -86,6 +88,7 @@ object AquaCompiler extends Logging {
|
|||||||
}
|
}
|
||||||
.map(
|
.map(
|
||||||
_.map { ap =>
|
_.map { ap =>
|
||||||
|
logger.trace("generating output...")
|
||||||
val res = AquaRes.fromContext(ap.context, config)
|
val res = AquaRes.fromContext(ap.context, config)
|
||||||
val compiled = backend.generate(res)
|
val compiled = backend.generate(res)
|
||||||
AquaCompiled(ap.id, compiled, res.funcs.length.toInt, res.services.length.toInt)
|
AquaCompiled(ap.id, compiled, res.funcs.length.toInt, res.services.length.toInt)
|
||||||
|
@ -1,18 +1,18 @@
|
|||||||
package aqua.compiler
|
package aqua.compiler
|
||||||
|
|
||||||
|
import aqua.compiler.AquaCompiler.logger
|
||||||
import aqua.linker.{AquaModule, Modules}
|
import aqua.linker.{AquaModule, Modules}
|
||||||
import aqua.parser.{Ast, ParserError}
|
|
||||||
import aqua.parser.head.{FilenameExpr, ImportExpr}
|
import aqua.parser.head.{FilenameExpr, ImportExpr}
|
||||||
import aqua.parser.lift.{LiftParser, Span}
|
import aqua.parser.lift.{LiftParser, Span}
|
||||||
|
import aqua.parser.{Ast, ParserError}
|
||||||
import cats.data.{Chain, NonEmptyChain, Validated, ValidatedNec}
|
import cats.data.{Chain, NonEmptyChain, Validated, ValidatedNec}
|
||||||
import cats.parse.Parser0
|
import cats.parse.Parser0
|
||||||
import cats.syntax.applicative.*
|
import cats.syntax.applicative.*
|
||||||
import cats.syntax.flatMap.*
|
import cats.syntax.flatMap.*
|
||||||
import cats.syntax.functor.*
|
import cats.syntax.functor.*
|
||||||
import cats.syntax.traverse.*
|
import cats.syntax.traverse.*
|
||||||
import cats.{Comonad, Monad}
|
import cats.{Comonad, Monad, ~>}
|
||||||
import scribe.Logging
|
import scribe.Logging
|
||||||
import cats.~>
|
|
||||||
|
|
||||||
// TODO: add tests
|
// TODO: add tests
|
||||||
class AquaParser[F[_]: Monad, E, I, S[_]: Comonad](
|
class AquaParser[F[_]: Monad, E, I, S[_]: Comonad](
|
||||||
|
@ -1,34 +0,0 @@
|
|||||||
#!/usr/bin/env node
|
|
||||||
|
|
||||||
"use strict";
|
|
||||||
|
|
||||||
const { exec } = require("child_process");
|
|
||||||
const path = require("path");
|
|
||||||
const fs = require('fs');
|
|
||||||
|
|
||||||
let importArgs = []
|
|
||||||
|
|
||||||
const nm = path.join("./", "node_modules")
|
|
||||||
if (fs.existsSync(nm) && fs.lstatSync(nm).isDirectory()) {
|
|
||||||
importArgs = ["-m", "node_modules"]
|
|
||||||
}
|
|
||||||
|
|
||||||
const args = [
|
|
||||||
"java",
|
|
||||||
"-jar",
|
|
||||||
path.join(__dirname, "aqua.jar"),
|
|
||||||
...importArgs,
|
|
||||||
...process.argv.slice(2),
|
|
||||||
];
|
|
||||||
|
|
||||||
const argsString = args.join(" ");
|
|
||||||
|
|
||||||
console.log("Aqua Java " + argsString);
|
|
||||||
exec(argsString, (err, stdout, stderr) => {
|
|
||||||
console.error("Aqua Java: " + stderr);
|
|
||||||
console.log("Aqua Java: " + stdout);
|
|
||||||
|
|
||||||
if (err) {
|
|
||||||
process.exit(err.code);
|
|
||||||
}
|
|
||||||
});
|
|
39
npm/index.js
39
npm/index.js
@ -2,41 +2,4 @@
|
|||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
const { exec } = require("child_process");
|
import "./aqua.js";
|
||||||
const path = require("path");
|
|
||||||
const fs = require('fs');
|
|
||||||
|
|
||||||
const nm = path.join("./", "node_modules")
|
|
||||||
let initArgs = process.argv.slice(2)
|
|
||||||
|
|
||||||
let args = [];
|
|
||||||
if ((initArgs.includes('-v') || initArgs.includes('--version'))) {
|
|
||||||
args = [
|
|
||||||
"node",
|
|
||||||
path.join(__dirname, "aqua.js"),
|
|
||||||
"--version",
|
|
||||||
];
|
|
||||||
} else {
|
|
||||||
let importArgs = []
|
|
||||||
if (fs.existsSync(nm) && fs.lstatSync(nm).isDirectory()) {
|
|
||||||
importArgs = ["-m", "node_modules"]
|
|
||||||
}
|
|
||||||
args = [
|
|
||||||
"node",
|
|
||||||
path.join(__dirname, "aqua.js"),
|
|
||||||
...importArgs,
|
|
||||||
...initArgs,
|
|
||||||
];
|
|
||||||
}
|
|
||||||
|
|
||||||
const argsString = args.join(" ");
|
|
||||||
|
|
||||||
console.log("Aqua: " + argsString);
|
|
||||||
exec(argsString, (err, stdout, stderr) => {
|
|
||||||
console.error("Aqua: " + stderr);
|
|
||||||
console.log("Aqua: " + stdout);
|
|
||||||
|
|
||||||
if (err) {
|
|
||||||
process.exit(err.code);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
@ -2,8 +2,8 @@
|
|||||||
"name": "@fluencelabs/aqua",
|
"name": "@fluencelabs/aqua",
|
||||||
"version": "0.0.0",
|
"version": "0.0.0",
|
||||||
"description": "Aqua compiler",
|
"description": "Aqua compiler",
|
||||||
|
"type": "module",
|
||||||
"files": [
|
"files": [
|
||||||
"aqua.jar",
|
|
||||||
"aqua.js",
|
"aqua.js",
|
||||||
"index.js",
|
"index.js",
|
||||||
"index-java.js",
|
"index-java.js",
|
||||||
@ -11,10 +11,14 @@
|
|||||||
],
|
],
|
||||||
"bin": {
|
"bin": {
|
||||||
"aqua": "index.js",
|
"aqua": "index.js",
|
||||||
"aqua-cli": "error.js",
|
"aqua-cli": "error.js"
|
||||||
"aqua-j": "index-java.js"
|
},
|
||||||
|
"scripts": {
|
||||||
|
"run": "node index.js"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@fluencelabs/fluence": "0.12.1"
|
||||||
},
|
},
|
||||||
"scripts": {},
|
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "git+https://github.com/fluencelabs/aqua.git"
|
"url": "git+https://github.com/fluencelabs/aqua.git"
|
||||||
|
@ -1,32 +1,32 @@
|
|||||||
package aqua.parser
|
package aqua.parser
|
||||||
|
|
||||||
import cats.data.{Validated, ValidatedNec}
|
|
||||||
import aqua.parser.Ast
|
|
||||||
import aqua.parser.Ast.Tree
|
import aqua.parser.Ast.Tree
|
||||||
import aqua.parser.ParserError
|
import aqua.parser.{Ast, LexerError, ParserError}
|
||||||
import aqua.parser.LexerError
|
|
||||||
import aqua.parser.expr.RootExpr
|
import aqua.parser.expr.RootExpr
|
||||||
import aqua.parser.head.HeadExpr
|
import aqua.parser.head.HeadExpr
|
||||||
import aqua.parser.lexer.Token
|
import aqua.parser.lexer.Token
|
||||||
import aqua.parser.lift.{FileSpan, LiftParser, Span}
|
|
||||||
import cats.{Comonad, Eval, ~>}
|
|
||||||
import cats.parse.LocationMap
|
|
||||||
import cats.parse.{Parser as P, Parser0 as P0}
|
|
||||||
import cats.Id
|
|
||||||
import aqua.parser.lift.LiftParser.LiftErrorOps
|
import aqua.parser.lift.LiftParser.LiftErrorOps
|
||||||
|
import aqua.parser.lift.{FileSpan, LiftParser, Span}
|
||||||
|
import cats.data.{Validated, ValidatedNec}
|
||||||
|
import cats.parse.{LocationMap, Parser as P, Parser0 as P0}
|
||||||
|
import cats.{Comonad, Eval, Id, ~>}
|
||||||
|
|
||||||
|
|
||||||
object Parser {
|
object Parser extends scribe.Logging {
|
||||||
|
|
||||||
import Span.spanLiftParser
|
import Span.spanLiftParser
|
||||||
lazy val spanParser = parserSchema[Span.F]()
|
val spanParser = parserSchema[Span.F]()
|
||||||
import LiftParser.Implicits.idLiftParser
|
import LiftParser.Implicits.idLiftParser
|
||||||
lazy val idParser = parserSchema[Id]()
|
lazy val idParser = parserSchema[Id]()
|
||||||
|
|
||||||
def parserSchema[S[_] : LiftParser : Comonad](): P0[ValidatedNec[ParserError[S], Ast[S]]] =
|
def parserSchema[S[_] : LiftParser : Comonad](): P0[ValidatedNec[ParserError[S], Ast[S]]] = {
|
||||||
(HeadExpr.ast[S] ~ RootExpr.ast0[S]()).map { case (head, bodyMaybe) =>
|
logger.trace("creating schema...")
|
||||||
|
val parser = (HeadExpr.ast[S] ~ RootExpr.ast0[S]()).map { case (head, bodyMaybe) =>
|
||||||
bodyMaybe.map(Ast(head, _))
|
bodyMaybe.map(Ast(head, _))
|
||||||
}
|
}
|
||||||
|
logger.trace("schema created")
|
||||||
|
parser
|
||||||
|
}
|
||||||
|
|
||||||
def parser[S[_] : LiftParser : Comonad](p: P0[ValidatedNec[ParserError[S], Ast[S]]])(source: String): ValidatedNec[ParserError[S], Ast[S]] = {
|
def parser[S[_] : LiftParser : Comonad](p: P0[ValidatedNec[ParserError[S], Ast[S]]])(source: String): ValidatedNec[ParserError[S], Ast[S]] = {
|
||||||
p.parseAll(source) match {
|
p.parseAll(source) match {
|
||||||
|
@ -27,15 +27,20 @@ case class CallArrowExpr[F[_]](
|
|||||||
|
|
||||||
object CallArrowExpr extends Expr.Leaf {
|
object CallArrowExpr extends Expr.Leaf {
|
||||||
|
|
||||||
|
def ability[F[_]: LiftParser: Comonad]: P0[Option[Ability[F]]] = (Ability.dotted[F] <* `.`).?
|
||||||
|
def functionCallWithArgs[F[_]: LiftParser: Comonad] = Name.p[F]
|
||||||
|
~ comma0(Value.`value`[F].surroundedBy(`/s*`)).between(`(` <* `/s*`, `/s*` *> `)`)
|
||||||
|
def funcCall[F[_]: LiftParser: Comonad] = ability.with1 ~ functionCallWithArgs
|
||||||
|
|
||||||
|
def funcOnly[F[_]: LiftParser: Comonad] = funcCall.map {
|
||||||
|
case (ab, (name, args)) =>
|
||||||
|
CallArrowExpr(Nil, ab, name, args)
|
||||||
|
}
|
||||||
|
|
||||||
override def p[F[_]: LiftParser: Comonad]: P[CallArrowExpr[F]] = {
|
override def p[F[_]: LiftParser: Comonad]: P[CallArrowExpr[F]] = {
|
||||||
val variables: P0[Option[NonEmptyList[Name[F]]]] = (comma(Name.p[F]) <* ` <- `).backtrack.?
|
val variables: P0[Option[NonEmptyList[Name[F]]]] = (comma(Name.p[F]) <* ` <- `).backtrack.?
|
||||||
val ability: P0[Option[Ability[F]]] = (Ability.dotted[F] <* `.`).?
|
|
||||||
val functionCallWithArgs = Name.p[F]
|
|
||||||
~ comma0(Value.`value`[F].surroundedBy(`/s*`)).between(`(` <* `/s*`, `/s*` *> `)`)
|
|
||||||
|
|
||||||
(variables.with1 ~
|
(variables.with1 ~ funcCall.withContext("Only results of a function call can be written to a stream")
|
||||||
(ability.with1 ~ functionCallWithArgs)
|
|
||||||
.withContext("Only results of a function call can be written to a stream")
|
|
||||||
).map {
|
).map {
|
||||||
case (variables, (ability, (funcName, args))) =>
|
case (variables, (ability, (funcName, args))) =>
|
||||||
CallArrowExpr(variables.toList.flatMap(_.toList), ability, funcName, args)
|
CallArrowExpr(variables.toList.flatMap(_.toList), ability, funcName, args)
|
||||||
|
@ -1,9 +1,8 @@
|
|||||||
package aqua.parser.lexer
|
package aqua.parser.lexer
|
||||||
|
|
||||||
import cats.{Comonad, Functor}
|
|
||||||
import cats.data.NonEmptyList
|
import cats.data.NonEmptyList
|
||||||
import cats.parse.{Accumulator0, Parser as P, Parser0 as P0}
|
import cats.parse.{Accumulator0, Parser as P, Parser0 as P0}
|
||||||
import cats.~>
|
import cats.{Comonad, Functor, ~>}
|
||||||
|
|
||||||
trait Token[F[_]] {
|
trait Token[F[_]] {
|
||||||
def as[T](v: T): F[T]
|
def as[T](v: T): F[T]
|
||||||
@ -25,6 +24,10 @@ object Token {
|
|||||||
private val upperAnum_ = upperAnum ++ f_
|
private val upperAnum_ = upperAnum ++ f_
|
||||||
private val nl = Set('\n', '\r')
|
private val nl = Set('\n', '\r')
|
||||||
|
|
||||||
|
val inAZ = P.charIn(AZ)
|
||||||
|
val inaz = P.charIn(az)
|
||||||
|
val whileAnum = P.charsWhile(anum_)
|
||||||
|
|
||||||
val ` *` : P0[String] = P.charsWhile0(fSpaces)
|
val ` *` : P0[String] = P.charsWhile0(fSpaces)
|
||||||
val ` ` : P[String] = P.charsWhile(fSpaces)
|
val ` ` : P[String] = P.charsWhile(fSpaces)
|
||||||
val `const`: P[Unit] = P.string("const")
|
val `const`: P[Unit] = P.string("const")
|
||||||
@ -59,12 +62,12 @@ object Token {
|
|||||||
val `co`: P[Unit] = P.string("co")
|
val `co`: P[Unit] = P.string("co")
|
||||||
val `:` : P[Unit] = P.char(':')
|
val `:` : P[Unit] = P.char(':')
|
||||||
val ` : ` : P[Unit] = P.char(':').surroundedBy(` `.?)
|
val ` : ` : P[Unit] = P.char(':').surroundedBy(` `.?)
|
||||||
val `anum_*` : P[Unit] = P.charsWhile(anum_).void
|
val `anum_*` : P[Unit] = whileAnum.void
|
||||||
|
|
||||||
val NAME: P[String] = (P.charIn(AZ) ~ P.charsWhile(upperAnum_).?).string
|
val NAME: P[String] = (inAZ ~ P.charsWhile(upperAnum_).?).string
|
||||||
val `name`: P[String] = (P.charIn(az) ~ P.charsWhile(anum_).?).string
|
val `name`: P[String] = (inaz ~ whileAnum.?).string
|
||||||
|
|
||||||
val `Class`: P[String] = (P.charIn(AZ) ~ P.charsWhile(anum_).backtrack.?).map { case (c, s) ⇒
|
val `Class`: P[String] = (inAZ ~ whileAnum.backtrack.?).map { case (c, s) ⇒
|
||||||
c.toString ++ s.getOrElse("")
|
c.toString ++ s.getOrElse("")
|
||||||
}
|
}
|
||||||
val `\n` : P[Unit] = P.string("\n\r") | P.char('\n') | P.string("\r\n")
|
val `\n` : P[Unit] = P.string("\n\r") | P.char('\n') | P.string("\r\n")
|
||||||
|
@ -6,11 +6,10 @@ import aqua.parser.lexer.Token.*
|
|||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.LiftParser.*
|
import aqua.parser.lift.LiftParser.*
|
||||||
import aqua.types.LiteralType
|
import aqua.types.LiteralType
|
||||||
import cats.parse.{Numbers, Parser as P}
|
import cats.parse.{Numbers, Parser as P, Parser0 as P0}
|
||||||
import cats.syntax.comonad.*
|
import cats.syntax.comonad.*
|
||||||
import cats.syntax.functor.*
|
import cats.syntax.functor.*
|
||||||
import cats.{Comonad, Functor}
|
import cats.{Comonad, Functor, ~>}
|
||||||
import cats.~>
|
|
||||||
|
|
||||||
sealed trait Value[F[_]] extends Token[F] {
|
sealed trait Value[F[_]] extends Token[F] {
|
||||||
def mapK[K[_]: Comonad](fk: F ~> K): Value[K]
|
def mapK[K[_]: Comonad](fk: F ~> K): Value[K]
|
||||||
@ -46,8 +45,11 @@ object Value {
|
|||||||
def initPeerId[F[_]: LiftParser: Comonad]: P[Literal[F]] =
|
def initPeerId[F[_]: LiftParser: Comonad]: P[Literal[F]] =
|
||||||
`%init_peer_id%`.string.lift.map(Literal(_, LiteralType.string))
|
`%init_peer_id%`.string.lift.map(Literal(_, LiteralType.string))
|
||||||
|
|
||||||
|
val minus = P.char('-')
|
||||||
|
val dot = P.char('.')
|
||||||
|
|
||||||
def num[F[_]: LiftParser: Comonad]: P[Literal[F]] =
|
def num[F[_]: LiftParser: Comonad]: P[Literal[F]] =
|
||||||
(P.char('-').?.with1 ~ Numbers.nonNegativeIntString).lift.map(fu =>
|
(minus.?.with1 ~ Numbers.nonNegativeIntString).lift.map(fu =>
|
||||||
fu.extract match {
|
fu.extract match {
|
||||||
case (Some(_), n) ⇒ Literal(fu.as(s"-$n"), LiteralType.signed)
|
case (Some(_), n) ⇒ Literal(fu.as(s"-$n"), LiteralType.signed)
|
||||||
case (None, n) ⇒ Literal(fu.as(n), LiteralType.number)
|
case (None, n) ⇒ Literal(fu.as(n), LiteralType.number)
|
||||||
@ -55,14 +57,14 @@ object Value {
|
|||||||
)
|
)
|
||||||
|
|
||||||
def float[F[_]: LiftParser: Comonad]: P[Literal[F]] =
|
def float[F[_]: LiftParser: Comonad]: P[Literal[F]] =
|
||||||
(P.char('-').?.with1 ~ (Numbers.nonNegativeIntString <* P.char(
|
(minus.?.with1 ~ (Numbers.nonNegativeIntString <* dot) ~ Numbers.nonNegativeIntString).string.lift
|
||||||
'.'
|
|
||||||
)) ~ Numbers.nonNegativeIntString).string.lift
|
|
||||||
.map(Literal(_, LiteralType.float))
|
.map(Literal(_, LiteralType.float))
|
||||||
|
|
||||||
|
val charsWhileQuotes = P.charsWhile0(_ != '"')
|
||||||
|
|
||||||
// TODO make more sophisticated escaping/unescaping
|
// TODO make more sophisticated escaping/unescaping
|
||||||
def string[F[_]: LiftParser: Comonad]: P[Literal[F]] =
|
def string[F[_]: LiftParser: Comonad]: P[Literal[F]] =
|
||||||
(`"` *> P.charsWhile0(_ != '"') <* `"`).string.lift
|
(`"` *> charsWhileQuotes <* `"`).string.lift
|
||||||
.map(Literal(_, LiteralType.string))
|
.map(Literal(_, LiteralType.string))
|
||||||
|
|
||||||
def literal[F[_]: LiftParser: Comonad]: P[Literal[F]] =
|
def literal[F[_]: LiftParser: Comonad]: P[Literal[F]] =
|
||||||
|
Loading…
Reference in New Issue
Block a user