mirror of
https://github.com/fluencelabs/aqua.git
synced 2024-12-04 06:30:17 +00:00
feat(api): Accept structured imports [LNG-288] (#989)
* Refactor sources * Fix FuncCompiler compilation * Normalize imports * Add relative imports * Remove Prelude * Remove import * Add a log * Add more logs * Use snapshot of fs2 * Remove prints * Add comments * Savepoint * Rewrite imports resolution * Fix relative import * Add comment * Added comments * Fix comment * Add comments * Refactor * Refactor * Add tests * Fix tests * Update tests * Add comment * Lower number of tests * Comment, rename * Add comment * Add emptiness check
This commit is contained in:
parent
00252fe8a7
commit
f7bfa8317b
40
api/api-npm/index.d.ts
vendored
40
api/api-npm/index.d.ts
vendored
@ -20,11 +20,47 @@ export declare class CompilationResult {
|
||||
warnings: string[];
|
||||
generatedSources: GeneratedSource[];
|
||||
}
|
||||
/**
|
||||
* Imports configuration for the compiler.
|
||||
* Structure:
|
||||
* {
|
||||
* "<compiled-path-prefix-1>": {
|
||||
* "<import-path-prefix-1>": ["<import-path-1>", "<import-path-2>"],
|
||||
* "<import-path-prefix-2>": "<import-path-3>",
|
||||
* ...
|
||||
* }
|
||||
* ...
|
||||
* }
|
||||
* Import `import` written in file with path `path`
|
||||
* is resolved as follows:
|
||||
* 1. Try to resolve `import` as relative import from `path`
|
||||
* 2. If relative resolution failed:
|
||||
* a. Find **the longest** <compiled-path-prefix>
|
||||
* that is a prefix of `path` in the imports configuration
|
||||
* b. In obtained map, find **the longest** <import-path-prefix>
|
||||
* that is a prefix of `import`
|
||||
* c. Replace prefix in `import` with <import-path>
|
||||
* d. Try to resolve import with obtained path
|
||||
* (try a few paths if array was provided)
|
||||
*
|
||||
* WARNING: <compiled-path-prefix> in 2.a is compared with
|
||||
* absolute normalized path of `path`, so <compiled-path-prefix>
|
||||
* should be absolute normalized path as well
|
||||
* NOTE: <import-path-prefix> could be empty string,
|
||||
* in which case it will match any import
|
||||
* NOTE: passing just an array of strings is a shorthand for
|
||||
* {
|
||||
* "/": {
|
||||
* "": <array>
|
||||
* }
|
||||
* }
|
||||
*/
|
||||
type Imports = Record<string, Record<string, string[] | string>> | string[];
|
||||
|
||||
/** Common arguments for all compile functions */
|
||||
type CommonArgs = {
|
||||
/** Paths to directories, which you want to import .aqua files from. Example: ["./path/to/dir"] */
|
||||
imports?: string[] | undefined;
|
||||
/** Imports */
|
||||
imports?: Imports | undefined;
|
||||
/** Constants to be passed to the compiler. Example: ["CONSTANT1=1", "CONSTANT2=2"] */
|
||||
constants?: string[] | undefined;
|
||||
/** Set log level for the compiler. Must be one of: Must be one of: all, trace, debug, info, warn, error, off. Default: info */
|
||||
|
@ -24,6 +24,33 @@ function getConfig({
|
||||
);
|
||||
}
|
||||
|
||||
function normalizeImports(imports) {
|
||||
if (imports === undefined || imports === null) {
|
||||
return {}; // No imports
|
||||
}
|
||||
|
||||
if (Array.isArray(imports)) {
|
||||
return {
|
||||
"/": {
|
||||
"": imports,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Transform each inner string into an array
|
||||
return Object.fromEntries(
|
||||
Object.entries(imports).map(([pathPrefix, info]) => [
|
||||
pathPrefix,
|
||||
Object.fromEntries(
|
||||
Object.entries(info).map(([importPrefix, locations]) => [
|
||||
importPrefix,
|
||||
Array.isArray(locations) ? locations : [locations],
|
||||
]),
|
||||
),
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
async function compile(...args) {
|
||||
try {
|
||||
const res = await Aqua.compile(...args);
|
||||
@ -42,11 +69,19 @@ async function compile(...args) {
|
||||
}
|
||||
|
||||
export function compileFromString({ code, imports = [], ...commonArgs }) {
|
||||
return compile(new Input(code), imports, getConfig(commonArgs));
|
||||
return compile(
|
||||
new Input(code),
|
||||
normalizeImports(imports),
|
||||
getConfig(commonArgs),
|
||||
);
|
||||
}
|
||||
|
||||
export function compileFromPath({ filePath, imports = [], ...commonArgs }) {
|
||||
return compile(new Path(filePath), imports, getConfig(commonArgs));
|
||||
return compile(
|
||||
new Path(filePath),
|
||||
normalizeImports(imports),
|
||||
getConfig(commonArgs),
|
||||
);
|
||||
}
|
||||
|
||||
export function compileAquaCallFromString({
|
||||
@ -58,7 +93,7 @@ export function compileAquaCallFromString({
|
||||
}) {
|
||||
return compile(
|
||||
new Call(funcCall, data, new Input(code)),
|
||||
imports,
|
||||
normalizeImports(imports),
|
||||
getConfig(commonArgs),
|
||||
);
|
||||
}
|
||||
@ -72,7 +107,7 @@ export function compileAquaCallFromPath({
|
||||
}) {
|
||||
return compile(
|
||||
new Call(funcCall, data, new Input(filePath)),
|
||||
imports,
|
||||
normalizeImports(imports),
|
||||
getConfig(commonArgs),
|
||||
);
|
||||
}
|
||||
|
@ -1,84 +1,95 @@
|
||||
package api
|
||||
|
||||
import api.types.{AquaConfig, AquaFunction, CompilationResult, GeneratedSource, Input}
|
||||
import aqua.Rendering.given
|
||||
import aqua.raw.value.ValueRaw
|
||||
import aqua.api.{APICompilation, APIResult, AquaAPIConfig}
|
||||
import aqua.SpanParser
|
||||
import aqua.api.TargetType.*
|
||||
import aqua.api.{APICompilation, APIResult, AquaAPIConfig, Imports}
|
||||
import aqua.backend.air.AirBackend
|
||||
import aqua.backend.api.APIBackend
|
||||
import aqua.backend.js.JavaScriptBackend
|
||||
import aqua.backend.ts.TypeScriptBackend
|
||||
import aqua.backend.{AirFunction, Backend, Generated}
|
||||
import aqua.compiler.*
|
||||
import aqua.files.{AquaFileSources, AquaFilesIO, FileModuleId}
|
||||
import aqua.logging.{LogFormatter, LogLevels}
|
||||
import aqua.constants.Constants
|
||||
import aqua.definitions.FunctionDef
|
||||
import aqua.files.{AquaFileSources, AquaFilesIO, FileModuleId}
|
||||
import aqua.io.*
|
||||
import aqua.raw.ops.Call
|
||||
import aqua.run.{CliFunc, FuncCompiler}
|
||||
import aqua.js.{FunctionDefJs, ServiceDefJs, VarJson}
|
||||
import aqua.logging.{LogFormatter, LogLevels}
|
||||
import aqua.model.AquaContext
|
||||
import aqua.model.transform.{Transform, TransformConfig}
|
||||
import aqua.parser.lexer.{LiteralToken, Token}
|
||||
import aqua.parser.lift.FileSpan.F
|
||||
import aqua.parser.lift.{FileSpan, Span}
|
||||
import aqua.parser.{ArrowReturnError, BlockIndentError, LexerError, ParserError}
|
||||
import aqua.{AquaIO, SpanParser}
|
||||
import aqua.model.transform.{Transform, TransformConfig}
|
||||
import aqua.backend.api.APIBackend
|
||||
import aqua.backend.js.JavaScriptBackend
|
||||
import aqua.backend.ts.TypeScriptBackend
|
||||
import aqua.definitions.FunctionDef
|
||||
import aqua.js.{FunctionDefJs, ServiceDefJs, VarJson}
|
||||
import aqua.model.AquaContext
|
||||
import aqua.raw.ops.Call
|
||||
import aqua.raw.ops.CallArrowRawTag
|
||||
import aqua.raw.value.ValueRaw
|
||||
import aqua.raw.value.{LiteralRaw, VarRaw}
|
||||
import aqua.res.AquaRes
|
||||
|
||||
import api.types.{AquaConfig, AquaFunction, CompilationResult, GeneratedSource, Input}
|
||||
import cats.Applicative
|
||||
import cats.data.Validated.{Invalid, Valid, invalidNec, validNec}
|
||||
import cats.data.{Chain, NonEmptyChain, Validated, ValidatedNec}
|
||||
import cats.data.Validated.{invalidNec, validNec, Invalid, Valid}
|
||||
import cats.syntax.applicative.*
|
||||
import cats.syntax.apply.*
|
||||
import cats.syntax.flatMap.*
|
||||
import cats.syntax.functor.*
|
||||
import cats.effect.IO
|
||||
import cats.effect.unsafe.implicits.global
|
||||
import cats.syntax.applicative.*
|
||||
import cats.syntax.apply.*
|
||||
import cats.syntax.either.*
|
||||
import cats.syntax.flatMap.*
|
||||
import cats.syntax.functor.*
|
||||
import cats.syntax.show.*
|
||||
import cats.syntax.traverse.*
|
||||
import cats.syntax.either.*
|
||||
import fs2.io.file.{Files, Path}
|
||||
import scribe.Logging
|
||||
|
||||
import scala.concurrent.ExecutionContext.Implicits.global
|
||||
import scala.concurrent.Future
|
||||
import scala.scalajs.js.{|, undefined, Promise, UndefOr}
|
||||
import scala.scalajs.js
|
||||
import scala.scalajs.js.JSConverters.*
|
||||
import scala.scalajs.js.annotation.*
|
||||
import scala.scalajs.js.{Promise, UndefOr, undefined, |}
|
||||
import scribe.Logging
|
||||
|
||||
@JSExportTopLevel("Aqua")
|
||||
object AquaAPI extends App with Logging {
|
||||
|
||||
// See api-npm package for description of imports config
|
||||
type ImportsJS = js.Dictionary[
|
||||
js.Dictionary[js.Array[String]]
|
||||
]
|
||||
|
||||
/**
|
||||
* All-in-one function that support different inputs and backends
|
||||
* @param input can be a path to aqua file, string with a code or a function call
|
||||
* @param imports list of paths
|
||||
* @param imports imports configuration
|
||||
* @param aquaConfigJS compiler config
|
||||
* @return compiler results depends on input and config
|
||||
* @return compiler results depending on input and config
|
||||
*/
|
||||
@JSExport
|
||||
def compile(
|
||||
input: types.Input | types.Path | types.Call,
|
||||
imports: js.Array[String],
|
||||
imports: ImportsJS,
|
||||
aquaConfigJS: js.UndefOr[AquaConfig]
|
||||
): Promise[CompilationResult] = {
|
||||
aquaConfigJS.toOption
|
||||
.map(AquaConfig.fromJS)
|
||||
.getOrElse(validNec(AquaAPIConfig()))
|
||||
.traverse { config =>
|
||||
val importsList = imports.toList
|
||||
val apiImports = Imports.fromMap(
|
||||
imports.view
|
||||
.mapValues(
|
||||
_.toMap.view
|
||||
.mapValues(_.toList)
|
||||
.toMap
|
||||
)
|
||||
.toMap
|
||||
)
|
||||
|
||||
input match {
|
||||
case i: (types.Input | types.Path) =>
|
||||
compileAll(i, importsList, config)
|
||||
compileAll(i, apiImports, config)
|
||||
case c: types.Call =>
|
||||
compileCall(c, importsList, config)
|
||||
compileCall(c, apiImports, config)
|
||||
|
||||
}
|
||||
}
|
||||
@ -90,7 +101,7 @@ object AquaAPI extends App with Logging {
|
||||
// Compile all non-call inputs
|
||||
private def compileAll(
|
||||
input: types.Input | types.Path,
|
||||
imports: List[String],
|
||||
imports: Imports,
|
||||
config: AquaAPIConfig
|
||||
): IO[CompilationResult] = {
|
||||
val backend: Backend = config.targetType match {
|
||||
@ -138,7 +149,7 @@ object AquaAPI extends App with Logging {
|
||||
// Compile a function call
|
||||
private def compileCall(
|
||||
call: types.Call,
|
||||
imports: List[String],
|
||||
imports: Imports,
|
||||
config: AquaAPIConfig
|
||||
): IO[CompilationResult] = {
|
||||
val path = call.input match {
|
||||
|
@ -17,7 +17,7 @@ object Test extends IOApp.Simple {
|
||||
APICompilation
|
||||
.compilePath(
|
||||
"./aqua-src/antithesis.aqua",
|
||||
"./aqua" :: Nil,
|
||||
Imports.fromMap(Map("/" -> Map("" -> List("./aqua")))),
|
||||
AquaAPIConfig(targetType = TypeScriptType),
|
||||
TypeScriptBackend(false, "IFluenceClient$$")
|
||||
)
|
||||
|
@ -1,50 +1,42 @@
|
||||
package aqua.api
|
||||
|
||||
import aqua.Rendering.given
|
||||
import aqua.raw.value.ValueRaw
|
||||
import aqua.raw.ConstantRaw
|
||||
import aqua.api.AquaAPIConfig
|
||||
import aqua.backend.api.APIBackend
|
||||
import aqua.backend.{AirFunction, Backend, Generated}
|
||||
import aqua.compiler.*
|
||||
import aqua.files.{AquaFileSources, AquaFilesIO, FileModuleId}
|
||||
import aqua.logging.{LogFormatter, LogLevels}
|
||||
import aqua.constants.Constants
|
||||
import aqua.definitions.FunctionDef
|
||||
import aqua.files.{AquaFileSources, AquaFilesIO, AquaStringSources, FileModuleId}
|
||||
import aqua.io.*
|
||||
import aqua.raw.ops.Call
|
||||
import aqua.run.{CliFunc, FuncCompiler, RunPreparer}
|
||||
import aqua.logging.{LogFormatter, LogLevels}
|
||||
import aqua.model.AquaContext
|
||||
import aqua.model.transform.{Transform, TransformConfig}
|
||||
import aqua.parser.expr.AbilityExpr.p
|
||||
import aqua.parser.lexer.{LiteralToken, Token}
|
||||
import aqua.parser.lift.FileSpan.F
|
||||
import aqua.parser.lift.{FileSpan, Span}
|
||||
import aqua.parser.{ArrowReturnError, BlockIndentError, LexerError, ParserError}
|
||||
import aqua.{AquaIO, SpanParser}
|
||||
import aqua.model.transform.{Transform, TransformConfig}
|
||||
import aqua.backend.api.APIBackend
|
||||
import aqua.definitions.FunctionDef
|
||||
import aqua.model.AquaContext
|
||||
import aqua.raw.ConstantRaw
|
||||
import aqua.raw.ops.Call
|
||||
import aqua.raw.value.ValueRaw
|
||||
import aqua.res.AquaRes
|
||||
import aqua.run.{CliFunc, FuncCompiler, RunPreparer}
|
||||
import aqua.{AquaIO, SpanParser}
|
||||
|
||||
import cats.Applicative
|
||||
import cats.~>
|
||||
import cats.data.{
|
||||
Chain,
|
||||
EitherT,
|
||||
NonEmptyChain,
|
||||
NonEmptyList,
|
||||
Validated,
|
||||
ValidatedNec,
|
||||
ValidatedNel,
|
||||
Writer
|
||||
}
|
||||
import cats.data.Validated.{invalid, invalidNec, validNec, Invalid, Valid}
|
||||
import cats.syntax.applicative.*
|
||||
import cats.syntax.apply.*
|
||||
import cats.syntax.flatMap.*
|
||||
import cats.syntax.functor.*
|
||||
import cats.data.*
|
||||
import cats.data.Validated.*
|
||||
import cats.effect.IO
|
||||
import cats.effect.unsafe.implicits.global
|
||||
import cats.syntax.applicative.*
|
||||
import cats.syntax.apply.*
|
||||
import cats.syntax.either.*
|
||||
import cats.syntax.flatMap.*
|
||||
import cats.syntax.functor.*
|
||||
import cats.syntax.show.*
|
||||
import cats.syntax.traverse.*
|
||||
import cats.syntax.either.*
|
||||
import cats.~>
|
||||
import fs2.io.file.{Files, Path}
|
||||
import scribe.{Level, Logging}
|
||||
|
||||
@ -53,7 +45,7 @@ object APICompilation {
|
||||
def compileCall(
|
||||
functionStr: String,
|
||||
pathStr: String,
|
||||
imports: List[String],
|
||||
imports: Imports,
|
||||
aquaConfig: AquaAPIConfig,
|
||||
fillWithTypes: List[ValueRaw] => ValidatedNec[String, List[ValueRaw]]
|
||||
): IO[APIResult[(FunctionDef, String)]] = {
|
||||
@ -69,7 +61,7 @@ object APICompilation {
|
||||
|
||||
new FuncCompiler[IO](
|
||||
Some(RelativePath(Path(pathStr))),
|
||||
imports.map(Path.apply),
|
||||
imports.toIO,
|
||||
transformConfig
|
||||
).compile().map { contextV =>
|
||||
for {
|
||||
@ -95,14 +87,17 @@ object APICompilation {
|
||||
|
||||
def compilePath(
|
||||
pathStr: String,
|
||||
imports: List[String],
|
||||
imports: Imports,
|
||||
aquaConfig: AquaAPIConfig,
|
||||
backend: Backend
|
||||
): IO[APIResult[Chain[AquaCompiled[FileModuleId]]]] = {
|
||||
given AquaIO[IO] = new AquaFilesIO[IO]
|
||||
|
||||
val path = Path(pathStr)
|
||||
val sources = new AquaFileSources[IO](path, imports.map(Path.apply))
|
||||
val sources = new AquaFileSources[IO](
|
||||
path,
|
||||
imports.toIO
|
||||
)
|
||||
|
||||
compileRaw(
|
||||
aquaConfig,
|
||||
@ -113,7 +108,7 @@ object APICompilation {
|
||||
|
||||
def compileString(
|
||||
input: String,
|
||||
imports: List[String],
|
||||
imports: Imports,
|
||||
aquaConfig: AquaAPIConfig,
|
||||
backend: Backend
|
||||
): IO[APIResult[Chain[AquaCompiled[FileModuleId]]]] = {
|
||||
@ -121,12 +116,11 @@ object APICompilation {
|
||||
|
||||
val path = Path("")
|
||||
|
||||
val strSources: AquaFileSources[IO] =
|
||||
new AquaFileSources[IO](path, imports.map(Path.apply)) {
|
||||
override def sources: IO[ValidatedNec[AquaFileError, Chain[(FileModuleId, String)]]] = {
|
||||
IO.pure(Valid(Chain.one((FileModuleId(path), input))))
|
||||
}
|
||||
}
|
||||
val strSources: AquaStringSources[IO] =
|
||||
new AquaStringSources(
|
||||
Map(FileModuleId(path) -> input),
|
||||
imports.toIO
|
||||
)
|
||||
|
||||
compileRaw(
|
||||
aquaConfig,
|
||||
|
49
api/api/src/main/scala/aqua/api/Imports.scala
Normal file
49
api/api/src/main/scala/aqua/api/Imports.scala
Normal file
@ -0,0 +1,49 @@
|
||||
package aqua.api
|
||||
|
||||
import aqua.files.Imports as IOImports
|
||||
|
||||
import fs2.io.file.Path
|
||||
|
||||
/**
|
||||
* Imports configuration passed to the compiler
|
||||
* @param settings map of path prefixes to imports settings
|
||||
*/
|
||||
final case class Imports(
|
||||
settings: Map[Path, Imports.PathSettings]
|
||||
) {
|
||||
|
||||
def toIO: IOImports =
|
||||
IOImports(
|
||||
settings.view
|
||||
.mapValues(
|
||||
_.toIO
|
||||
)
|
||||
.toMap
|
||||
)
|
||||
}
|
||||
|
||||
object Imports {
|
||||
|
||||
/**
|
||||
* Imports settings for a single path prefix.
|
||||
* @param imports map of import prefixes to locations
|
||||
*/
|
||||
final case class PathSettings(
|
||||
imports: Map[String, List[Path]]
|
||||
) {
|
||||
|
||||
def toIO: IOImports.PathSettings =
|
||||
IOImports.PathSettings(imports)
|
||||
}
|
||||
|
||||
def fromMap(m: Map[String, Map[String, List[String]]]): Imports =
|
||||
Imports(
|
||||
m.map { case (pathPrefix, settings) =>
|
||||
Path(pathPrefix) -> PathSettings(
|
||||
settings.map { case (importPrefix, locations) =>
|
||||
importPrefix -> locations.map(Path.apply)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
@ -1,7 +1,7 @@
|
||||
package aqua.run
|
||||
|
||||
import aqua.compiler.{AquaCompilerConf, CompileResult, CompilerAPI}
|
||||
import aqua.files.{AquaFileSources, FileModuleId}
|
||||
import aqua.files.{AquaFileSources, FileModuleId, Imports}
|
||||
import aqua.io.{AquaFileError, AquaPath, PackagePath}
|
||||
import aqua.model.transform.TransformConfig
|
||||
import aqua.model.{AquaContext, FuncArrow}
|
||||
@ -21,7 +21,7 @@ import scribe.Logging
|
||||
|
||||
class FuncCompiler[F[_]: Files: AquaIO: Async](
|
||||
input: Option[AquaPath],
|
||||
imports: List[Path],
|
||||
imports: Imports,
|
||||
transformConfig: TransformConfig
|
||||
) extends Logging {
|
||||
|
||||
@ -29,7 +29,6 @@ class FuncCompiler[F[_]: Files: AquaIO: Async](
|
||||
|
||||
private def compileToContext(
|
||||
path: Path,
|
||||
imports: List[Path],
|
||||
config: AquaCompilerConf = AquaCompilerConf(transformConfig.constantsList)
|
||||
): F[Result[Chain[AquaContext]]] = {
|
||||
val sources = new AquaFileSources[F](path, imports)
|
||||
@ -43,12 +42,11 @@ class FuncCompiler[F[_]: Files: AquaIO: Async](
|
||||
private def compileBuiltins(): F[Result[Chain[AquaContext]]] =
|
||||
for {
|
||||
path <- PackagePath.builtin.getPath()
|
||||
context <- compileToContext(path, Nil)
|
||||
context <- compileToContext(path)
|
||||
} yield context
|
||||
|
||||
// Compile and get only one function
|
||||
def compile(
|
||||
preludeImports: List[Path] = Nil,
|
||||
withBuiltins: Boolean = false
|
||||
): F[Result[Chain[AquaContext]]] = {
|
||||
for {
|
||||
@ -59,7 +57,7 @@ class FuncCompiler[F[_]: Files: AquaIO: Async](
|
||||
compileResult <- input.traverse { ap =>
|
||||
// compile only context to wrap and call function later
|
||||
Clock[F].timed(
|
||||
ap.getPath().flatMap(p => compileToContext(p, preludeImports ++ imports))
|
||||
ap.getPath().flatMap(p => compileToContext(p))
|
||||
)
|
||||
}
|
||||
(compileTime, contextV) = compileResult.orEmpty
|
||||
|
15
build.sbt
15
build.sbt
@ -9,7 +9,8 @@ val monocleV = "3.1.0"
|
||||
val scalaTestV = "3.2.17"
|
||||
val scalaTestScalaCheckV = "3.2.17.0"
|
||||
val sourcecodeV = "0.3.0"
|
||||
val fs2V = "3.9.3"
|
||||
// Snapshot is used to get latest fixes
|
||||
val fs2V = "3.9.3-37-8badc91-SNAPSHOT"
|
||||
val catsEffectV = "3.6-1f95fd7"
|
||||
val declineV = "2.3.0"
|
||||
val circeVersion = "0.14.2"
|
||||
@ -38,7 +39,9 @@ val commons = Seq(
|
||||
"-Ykind-projector"
|
||||
// "-Xfatal-warnings"
|
||||
)
|
||||
}
|
||||
},
|
||||
// Needed to resolve snapshot versions
|
||||
resolvers ++= Resolver.sonatypeOssRepos("snapshots")
|
||||
)
|
||||
|
||||
commons
|
||||
@ -60,9 +63,13 @@ lazy val io = crossProject(JVMPlatform, JSPlatform)
|
||||
"co.fs2" %%% "fs2-io" % fs2V
|
||||
)
|
||||
)
|
||||
.dependsOn(compiler, parser)
|
||||
.dependsOn(compiler, parser, helpers)
|
||||
|
||||
lazy val ioJS = io.js.dependsOn(`js-imports`)
|
||||
lazy val ioJS = io.js
|
||||
.settings(
|
||||
scalaJSLinkerConfig ~= (_.withModuleKind(ModuleKind.CommonJSModule))
|
||||
)
|
||||
.dependsOn(`js-imports`)
|
||||
|
||||
lazy val `language-server-api` = crossProject(JSPlatform, JVMPlatform)
|
||||
.withoutSuffixFor(JVMPlatform)
|
||||
|
@ -1,23 +1,20 @@
|
||||
package aqua
|
||||
|
||||
import aqua.io.AquaFileError
|
||||
import cats.data.{Chain, EitherT, ValidatedNec}
|
||||
|
||||
import cats.data.{Chain, EitherT, ValidatedNec}
|
||||
import fs2.io.file.Path
|
||||
|
||||
trait AquaIO[F[_]] {
|
||||
def readFile(file: Path): EitherT[F, AquaFileError, String]
|
||||
|
||||
def resolve(
|
||||
src: Path,
|
||||
imports: List[Path]
|
||||
): EitherT[F, AquaFileError, Path]
|
||||
def resolve(paths: List[Path]): EitherT[F, AquaFileError, Path]
|
||||
|
||||
def listAqua(folder: Path): F[ValidatedNec[AquaFileError, Chain[Path]]]
|
||||
def listAqua(path: Path): EitherT[F, AquaFileError, Chain[Path]]
|
||||
|
||||
def writeFile(file: Path, content: String): EitherT[F, AquaFileError, Unit]
|
||||
}
|
||||
|
||||
object AquaIO {
|
||||
def apply[F[_]](implicit aio: AquaIO[F]): AquaIO[F] = aio
|
||||
def apply[F[_]](using aio: AquaIO[F]): AquaIO[F] = aio
|
||||
}
|
||||
|
@ -2,161 +2,87 @@ package aqua.files
|
||||
|
||||
import aqua.AquaIO
|
||||
import aqua.compiler.{AquaCompiled, AquaSources}
|
||||
import aqua.io.FilesUnresolved
|
||||
import aqua.io.{AquaFileError, FileSystemError, ListAquaErrors}
|
||||
import aqua.syntax.eithert.*
|
||||
|
||||
import cats.data.EitherT
|
||||
import cats.data.{Chain, NonEmptyChain, Validated, ValidatedNec}
|
||||
import cats.implicits.catsSyntaxApplicativeId
|
||||
import cats.syntax.applicative.*
|
||||
import cats.syntax.either.*
|
||||
import cats.syntax.flatMap.*
|
||||
import cats.syntax.foldable.*
|
||||
import cats.syntax.functor.*
|
||||
import cats.syntax.applicative.*
|
||||
import cats.syntax.monad.*
|
||||
import cats.syntax.traverse.*
|
||||
import cats.syntax.validated.*
|
||||
import cats.{Functor, Monad}
|
||||
import fs2.io.file.{Files, Path}
|
||||
import scala.util.Try
|
||||
import scribe.Logging
|
||||
|
||||
import scala.util.Try
|
||||
trait AquaFileImports[F[_]: Functor: AquaIO] extends AquaSources[F, AquaFileError, FileModuleId] {
|
||||
def imports: Imports
|
||||
|
||||
class AquaFileSources[F[_]: AquaIO: Monad: Files: Functor](
|
||||
sourcesPath: Path,
|
||||
importFrom: List[Path]
|
||||
) extends AquaSources[F, AquaFileError, FileModuleId] with Logging {
|
||||
private val filesIO = implicitly[AquaIO[F]]
|
||||
|
||||
override def sources: F[ValidatedNec[AquaFileError, Chain[(FileModuleId, String)]]] =
|
||||
filesIO.listAqua(sourcesPath).flatMap {
|
||||
case Validated.Valid(files) =>
|
||||
files
|
||||
.map(f =>
|
||||
filesIO
|
||||
.readFile(f)
|
||||
.value
|
||||
.map[ValidatedNec[AquaFileError, Chain[(FileModuleId, String)]]] {
|
||||
case Left(err) => Validated.invalidNec(err)
|
||||
case Right(content) => Validated.validNec(Chain.one(FileModuleId(f) -> content))
|
||||
}
|
||||
)
|
||||
.traverse(identity)
|
||||
.map(
|
||||
_.foldLeft[ValidatedNec[AquaFileError, Chain[(FileModuleId, String)]]](
|
||||
Validated.validNec(Chain.nil)
|
||||
)(_ combine _)
|
||||
)
|
||||
case Validated.Invalid(e) =>
|
||||
Validated
|
||||
.invalidNec[AquaFileError, Chain[(FileModuleId, String)]](ListAquaErrors(e))
|
||||
.pure[F]
|
||||
}
|
||||
|
||||
// Resolve an import that was written in a 'from' file
|
||||
// Try to find it in a list of given imports or near 'from' file
|
||||
override def resolveImport(
|
||||
from: FileModuleId,
|
||||
imp: String
|
||||
): F[ValidatedNec[AquaFileError, FileModuleId]] = {
|
||||
val validatedPath = Validated.fromEither(Try(Path(imp)).toEither.leftMap(FileSystemError.apply))
|
||||
validatedPath match {
|
||||
case Validated.Valid(importP) =>
|
||||
// if there is no `.aqua` extension, than add it
|
||||
filesIO
|
||||
.resolve(importP, importFrom.prependedAll(from.file.parent))
|
||||
.bimap(NonEmptyChain.one, FileModuleId(_))
|
||||
.value
|
||||
.map(Validated.fromEither)
|
||||
case Validated.Invalid(err) => Validated.invalidNec[AquaFileError, FileModuleId](err).pure[F]
|
||||
}
|
||||
|
||||
}
|
||||
imported: String
|
||||
): F[ValidatedNec[AquaFileError, FileModuleId]] =
|
||||
AquaIO[F]
|
||||
.resolve(
|
||||
imports.resolutions(
|
||||
// NOTE: It is important to use normalized absolute path here
|
||||
from.file.normalize.absolute,
|
||||
imported
|
||||
)
|
||||
)
|
||||
.leftMap {
|
||||
case e: FilesUnresolved =>
|
||||
e.toImportUnresolved(imported)
|
||||
case e => e
|
||||
}
|
||||
.map(FileModuleId.apply)
|
||||
.toValidatedNec
|
||||
|
||||
override def load(file: FileModuleId): F[ValidatedNec[AquaFileError, String]] =
|
||||
filesIO.readFile(file.file).leftMap(NonEmptyChain.one).value.map(Validated.fromEither)
|
||||
|
||||
// Get a directory of a file, or this file if it is a directory itself
|
||||
private def getDir(path: Path): F[Path] = {
|
||||
Files[F]
|
||||
.isDirectory(path)
|
||||
.map { res =>
|
||||
if (res) path else path.parent.getOrElse(path)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param srcFile aqua source
|
||||
* @param targetPath a main path where all output files will be written
|
||||
* @param suffix `.aqua` will be replaced with this suffix
|
||||
* @return
|
||||
*/
|
||||
def resolveTargetPath(
|
||||
srcFile: Path,
|
||||
targetPath: Path,
|
||||
suffix: String
|
||||
): F[Validated[Throwable, Path]] =
|
||||
Files[F].isDirectory(sourcesPath).flatMap {
|
||||
case false =>
|
||||
Validated.catchNonFatal {
|
||||
targetPath.absolute.normalize
|
||||
.resolve(srcFile.fileName.toString.stripSuffix(".aqua") + suffix)
|
||||
}.pure[F]
|
||||
case true =>
|
||||
getDir(sourcesPath).map { srcDir =>
|
||||
Validated.catchNonFatal {
|
||||
val srcFilePath = srcDir.absolute.normalize
|
||||
.relativize(srcFile.absolute.normalize)
|
||||
|
||||
// use `srcFilePath` as a suffix for target file path, so the directory structure is replicated
|
||||
val targetDir =
|
||||
targetPath.absolute.normalize
|
||||
.resolve(
|
||||
srcFilePath
|
||||
)
|
||||
|
||||
targetDir.parent
|
||||
.getOrElse(targetDir)
|
||||
.resolve(srcFile.fileName.toString.stripSuffix(".aqua") + suffix)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Write content to a file and return a success message
|
||||
private def writeWithResult(
|
||||
target: Path,
|
||||
content: String,
|
||||
funcsCount: Int,
|
||||
servicesCount: Int
|
||||
) = {
|
||||
filesIO
|
||||
.writeFile(
|
||||
target,
|
||||
content
|
||||
)
|
||||
.as(s"Result $target: compilation OK ($funcsCount functions, $servicesCount services)")
|
||||
.value
|
||||
.map(Validated.fromEither)
|
||||
}
|
||||
|
||||
def write(
|
||||
targetPath: Path
|
||||
)(ac: AquaCompiled[FileModuleId]): F[Seq[Validated[AquaFileError, String]]] =
|
||||
if (ac.compiled.isEmpty)
|
||||
Seq(
|
||||
Validated.valid[AquaFileError, String](
|
||||
s"Source ${ac.sourceId.file}: compilation OK (nothing to emit)"
|
||||
)
|
||||
).pure[F]
|
||||
else
|
||||
ac.compiled.map { compiled =>
|
||||
resolveTargetPath(
|
||||
ac.sourceId.file,
|
||||
targetPath,
|
||||
compiled.suffix
|
||||
).flatMap { result =>
|
||||
result
|
||||
.leftMap(FileSystemError.apply)
|
||||
.map { target =>
|
||||
writeWithResult(target, compiled.content, ac.funcsCount, ac.servicesCount)
|
||||
}
|
||||
.traverse(identity)
|
||||
}
|
||||
}.traverse(identity)
|
||||
.map(_.map(_.andThen(identity)))
|
||||
AquaIO[F].readFile(file.file).toValidatedNec
|
||||
}
|
||||
|
||||
/**
|
||||
* Aqua sources that are read from file system.
|
||||
*/
|
||||
class AquaFileSources[F[_]: Monad: AquaIO](
|
||||
sourcesPath: Path,
|
||||
override val imports: Imports
|
||||
) extends AquaFileImports[F] with Logging {
|
||||
|
||||
override def sources: F[ValidatedNec[AquaFileError, Chain[(FileModuleId, String)]]] =
|
||||
(for {
|
||||
files <- AquaIO[F]
|
||||
.listAqua(sourcesPath)
|
||||
.transform(_.toEitherNec)
|
||||
contents <- EitherT.fromValidatedF(
|
||||
files
|
||||
.traverse(file =>
|
||||
AquaIO[F]
|
||||
.readFile(file)
|
||||
.map(content => FileModuleId(file) -> content)
|
||||
.toValidatedNec
|
||||
)
|
||||
.map(_.sequence)
|
||||
)
|
||||
} yield contents).toValidated
|
||||
}
|
||||
|
||||
/**
|
||||
* Aqua sources that are read from string map.
|
||||
*/
|
||||
class AquaStringSources[F[_]: Monad: AquaIO](
|
||||
sourcesMap: Map[FileModuleId, String],
|
||||
override val imports: Imports
|
||||
) extends AquaFileImports[F] {
|
||||
|
||||
override def sources: F[ValidatedNec[AquaFileError, Chain[(FileModuleId, String)]]] =
|
||||
Chain.fromSeq(sourcesMap.toSeq).validNec.pure[F]
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ package aqua.files
|
||||
|
||||
import aqua.AquaIO
|
||||
import aqua.io.*
|
||||
|
||||
import cats.data.*
|
||||
import cats.data.Validated.{Invalid, Valid}
|
||||
import cats.effect.kernel.Concurrent
|
||||
@ -10,12 +11,11 @@ import cats.syntax.applicativeError.*
|
||||
import cats.syntax.apply.*
|
||||
import cats.syntax.either.*
|
||||
import cats.syntax.flatMap.*
|
||||
import cats.syntax.foldable.*
|
||||
import cats.syntax.functor.*
|
||||
import cats.syntax.traverse.*
|
||||
import cats.syntax.foldable.*
|
||||
import fs2.io.file.{Files, Path}
|
||||
import fs2.text
|
||||
|
||||
import scala.util.Try
|
||||
|
||||
class AquaFilesIO[F[_]: Files: Concurrent] extends AquaIO[F] {
|
||||
@ -23,124 +23,61 @@ class AquaFilesIO[F[_]: Files: Concurrent] extends AquaIO[F] {
|
||||
override def readFile(file: Path): EitherT[F, AquaFileError, String] =
|
||||
EitherT(
|
||||
Files[F]
|
||||
.readAll(file)
|
||||
.fold(Vector.empty[Byte])((acc, b) => acc :+ b)
|
||||
.readUtf8(file)
|
||||
.foldMonoid
|
||||
// TODO fix for comment on last line in air
|
||||
// TODO should be fixed by parser
|
||||
.map(_.appendedAll("\n\r".getBytes))
|
||||
.flatMap(fs2.Stream.emits)
|
||||
.through(text.utf8.decode)
|
||||
.map(_.appendedAll("\n\r"))
|
||||
.attempt
|
||||
.map(_.leftMap(FileSystemError.apply))
|
||||
.compile
|
||||
.last
|
||||
.map(
|
||||
_.fold((EmptyFileError(file): AquaFileError).asLeft[String])(
|
||||
_.left.map(FileSystemError.apply)
|
||||
)
|
||||
)
|
||||
.map(_.getOrElse(EmptyFileError(file).asLeft))
|
||||
)
|
||||
|
||||
/**
|
||||
* Find the first file that exists in the given list of paths
|
||||
* If there is no such file - error
|
||||
* Return first path that is a regular file
|
||||
*/
|
||||
private def findFirstF(
|
||||
in: List[Path],
|
||||
notFound: EitherT[F, AquaFileError, Path]
|
||||
override def resolve(
|
||||
paths: List[Path]
|
||||
): EitherT[F, AquaFileError, Path] =
|
||||
in.headOption.fold(notFound)(p =>
|
||||
EitherT(
|
||||
Concurrent[F].attempt(Files[F].isRegularFile(p))
|
||||
paths
|
||||
.collectFirstSomeM(p =>
|
||||
Concurrent[F]
|
||||
.attemptT(Files[F].isRegularFile(p))
|
||||
.recover(_ => false)
|
||||
.leftMap(FileSystemError.apply)
|
||||
.map(Option.when(_)(p))
|
||||
)
|
||||
.leftMap[AquaFileError](FileSystemError.apply)
|
||||
.recover({ case _ => false })
|
||||
.flatMap {
|
||||
case true =>
|
||||
EitherT(
|
||||
Concurrent[F].attempt(p.absolute.normalize.pure[F])
|
||||
).leftMap[AquaFileError](FileSystemError.apply)
|
||||
case false =>
|
||||
findFirstF(in.tail, notFound)
|
||||
}
|
||||
)
|
||||
|
||||
/**
|
||||
* Checks if a file exists in the list of possible paths
|
||||
*/
|
||||
def resolve(
|
||||
src: Path,
|
||||
imports: List[Path]
|
||||
): EitherT[F, AquaFileError, Path] =
|
||||
findFirstF(
|
||||
imports
|
||||
.map(_.resolve(src)),
|
||||
EitherT.leftT(FileNotFound(src, imports))
|
||||
)
|
||||
|
||||
// Get all files for every path if the path in the list is a directory or this path otherwise
|
||||
private def gatherFiles(
|
||||
files: List[Path],
|
||||
listFunction: (f: Path) => F[ValidatedNec[AquaFileError, Chain[Path]]]
|
||||
): List[F[ValidatedNec[AquaFileError, Chain[Path]]]] = {
|
||||
files.map(f => gatherFile(f, listFunction))
|
||||
}
|
||||
|
||||
// Get all files if the path is a directory or this path otherwise
|
||||
private def gatherFile(
|
||||
f: Path,
|
||||
listFunction: (f: Path) => F[ValidatedNec[AquaFileError, Chain[Path]]]
|
||||
): F[ValidatedNec[AquaFileError, Chain[Path]]] = {
|
||||
Files[F].isDirectory(f).flatMap { isDir =>
|
||||
if (isDir)
|
||||
listFunction(f)
|
||||
else
|
||||
Files[F].isRegularFile(f).map { isFile =>
|
||||
if (isFile)
|
||||
Validated.validNec(Chain.one(f.absolute.normalize))
|
||||
else
|
||||
Validated.invalidNec(FileNotFound(f, Nil))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get all files if the path is a directory or this path otherwise
|
||||
override def listAqua(folder: Path): F[ValidatedNec[AquaFileError, Chain[Path]]] = {
|
||||
Files[F]
|
||||
.exists(folder)
|
||||
.flatMap { exists =>
|
||||
if (!exists) {
|
||||
Left(FileNotFound(folder, Nil): AquaFileError).pure[F]
|
||||
} else {
|
||||
Files[F].isDirectory(folder).flatMap { isDir =>
|
||||
if (isDir) {
|
||||
Files[F]
|
||||
.list(folder)
|
||||
.evalFilter(p =>
|
||||
if (p.extName == ".aqua") true.pure[F]
|
||||
else Files[F].isDirectory(p)
|
||||
)
|
||||
.compile
|
||||
.toList
|
||||
.map(Right(_))
|
||||
} else {
|
||||
Right(folder :: Nil).pure[F]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
.map(Validated.fromEither)
|
||||
.map(_.leftMap(NonEmptyChain.one))
|
||||
.flatMap {
|
||||
case Valid(files) =>
|
||||
gatherFiles(files, listAqua).foldLeft(
|
||||
Validated.validNec[AquaFileError, Chain[Path]](Chain.nil).pure[F]
|
||||
) { case (acc, v) =>
|
||||
(acc, v).mapN(_ combine _)
|
||||
}
|
||||
case Invalid(errs) =>
|
||||
Validated.invalid[NonEmptyChain[AquaFileError], Chain[Path]](errs).pure[F]
|
||||
case None =>
|
||||
EitherT.leftT(
|
||||
FilesUnresolved(paths)
|
||||
)
|
||||
case Some(p) =>
|
||||
Try(
|
||||
p.absolute.normalize
|
||||
).toEither.leftMap(FileSystemError.apply).toEitherT
|
||||
}
|
||||
}
|
||||
|
||||
// Get all `.aqua` files inside if the path is a directory or
|
||||
// this path if it is an `.aqua` file otherwise
|
||||
override def listAqua(path: Path): EitherT[F, AquaFileError, Chain[Path]] =
|
||||
for {
|
||||
exists <- EitherT.liftF(Files[F].exists(path))
|
||||
_ <- EitherT.cond(exists, (), FileNotFound(path): AquaFileError)
|
||||
paths <- EitherT.liftF(
|
||||
Files[F]
|
||||
.walk(path)
|
||||
.evalFilter(p =>
|
||||
Files[F]
|
||||
.isRegularFile(p)
|
||||
.map(_ && p.extName == ".aqua")
|
||||
)
|
||||
.compile
|
||||
.toList
|
||||
)
|
||||
} yield Chain.fromSeq(paths)
|
||||
|
||||
private def deleteIfExists(file: Path): EitherT[F, AquaFileError, Boolean] =
|
||||
Files[F].deleteIfExists(file).attemptT.leftMap(FileSystemError.apply)
|
||||
@ -166,7 +103,3 @@ class AquaFilesIO[F[_]: Files: Concurrent] extends AquaIO[F] {
|
||||
.leftMap(FileWriteError(file, _))
|
||||
|
||||
}
|
||||
|
||||
object AquaFilesIO {
|
||||
implicit def summon[F[_]: Files: Concurrent]: AquaIO[F] = new AquaFilesIO[F]
|
||||
}
|
||||
|
56
io/src/main/scala/aqua/files/Imports.scala
Normal file
56
io/src/main/scala/aqua/files/Imports.scala
Normal file
@ -0,0 +1,56 @@
|
||||
package aqua.files
|
||||
|
||||
import fs2.io.file.Path
|
||||
import scala.util.Try
|
||||
|
||||
/**
|
||||
* Imports resolution configuration.
|
||||
*/
|
||||
final case class Imports(
|
||||
settings: Map[Path, Imports.PathSettings]
|
||||
) {
|
||||
|
||||
/**
|
||||
* Get all possible resolutions for a given import.
|
||||
*
|
||||
* @param from path of the file that imports
|
||||
* @param imported import string
|
||||
* @return list of possible resolutions
|
||||
*/
|
||||
def resolutions(from: Path, imported: String): List[Path] =
|
||||
relative(from, imported).toList ::: gather(from, imported)
|
||||
|
||||
// Return relative resolution if possible
|
||||
private def relative(from: Path, imported: String): Option[Path] =
|
||||
for {
|
||||
fromParent <- from.parent
|
||||
importedPath <- Try(Path(imported)).toOption
|
||||
} yield fromParent.resolve(importedPath)
|
||||
|
||||
// Gather all possible resolutions from imports config
|
||||
private def gather(from: Path, imported: String): List[Path] =
|
||||
// First - find the longest matching prefix for path
|
||||
settings.filter { case (prefix, _) =>
|
||||
from.startsWith(prefix)
|
||||
}.maxByOption { case (prefix, _) =>
|
||||
prefix.toString.length
|
||||
}.flatMap { case (_, s) =>
|
||||
// Then - find the longest matching prefix for import
|
||||
s.imports.filter { case (prefix, _) =>
|
||||
imported.startsWith(prefix)
|
||||
}.maxByOption { case (prefix, _) =>
|
||||
prefix.length
|
||||
}
|
||||
}.map { case (prefix, paths) =>
|
||||
// Drop the prefix from import and append to the path
|
||||
val dropped = imported.drop(prefix.length)
|
||||
paths.map(_ / dropped)
|
||||
}.toList.flatten
|
||||
}
|
||||
|
||||
object Imports {
|
||||
|
||||
final case class PathSettings(
|
||||
imports: Map[String, List[Path]]
|
||||
)
|
||||
}
|
@ -1,7 +1,6 @@
|
||||
package aqua.io
|
||||
|
||||
import cats.data.NonEmptyChain
|
||||
|
||||
import fs2.io.file.Path
|
||||
|
||||
sealed trait AquaFileError {
|
||||
@ -16,13 +15,28 @@ case class ListAquaErrors(errors: NonEmptyChain[AquaFileError]) extends AquaFile
|
||||
s"Cannot read '*.aqua' files:\n" + errors.map(_.showForConsole)
|
||||
}
|
||||
|
||||
case class FileNotFound(name: Path, imports: Seq[Path]) extends AquaFileError {
|
||||
case class FileNotFound(path: Path) extends AquaFileError {
|
||||
override def showForConsole: String = s"File not found: $path"
|
||||
}
|
||||
|
||||
// TODO: Refactor? This is more high-level error
|
||||
// not related to file system
|
||||
case class ImportUnresolved(name: String, resolutions: Seq[Path]) extends AquaFileError {
|
||||
|
||||
override def showForConsole: String =
|
||||
if (imports.nonEmpty)
|
||||
s"File '$name' not found, looking in ${imports.mkString(", ")}"
|
||||
if (resolutions.nonEmpty)
|
||||
s"Import '$name' could not be resolved, tried: ${resolutions.mkString(", ")}"
|
||||
else
|
||||
s"File '$name' not found"
|
||||
s"Import '$name' could not be resolved"
|
||||
}
|
||||
|
||||
case class FilesUnresolved(files: Seq[Path]) extends AquaFileError {
|
||||
|
||||
def toImportUnresolved(name: String): ImportUnresolved =
|
||||
ImportUnresolved(name, files)
|
||||
|
||||
override def showForConsole: String =
|
||||
s"Cannot resolve any of files: ${files.mkString(", ")}"
|
||||
}
|
||||
|
||||
case class EmptyFileError(path: Path) extends AquaFileError {
|
||||
|
@ -1,6 +1,7 @@
|
||||
package aqua.io
|
||||
|
||||
import aqua.PlatformPackagePath
|
||||
|
||||
import cats.effect.kernel.Async
|
||||
import cats.syntax.applicative.*
|
||||
import cats.syntax.flatMap.*
|
||||
|
@ -1,42 +0,0 @@
|
||||
package aqua.io
|
||||
|
||||
import aqua.PlatformPackagePath
|
||||
import cats.Monad
|
||||
import cats.syntax.applicative.*
|
||||
import cats.syntax.flatMap.*
|
||||
import cats.syntax.functor.*
|
||||
import fs2.io.file.{Files, Path}
|
||||
import scribe.Logging
|
||||
|
||||
import scala.util.Try
|
||||
|
||||
/**
|
||||
* @param importPaths list of paths where imports will be searched
|
||||
*/
|
||||
case class Prelude(importPaths: List[Path])
|
||||
|
||||
// JS-specific functions
|
||||
object Prelude extends Logging {
|
||||
|
||||
lazy val runImports: List[Path] = Path("aqua/run-builtins") :: Nil
|
||||
|
||||
def init[F[_]: Files: Monad](withRunImports: Boolean = false): F[Prelude] = {
|
||||
// check if node_modules directory exists and add it in imports list
|
||||
val nodeModules = Path("node_modules")
|
||||
val nodeImportF: F[Option[Path]] = Files[F].exists(nodeModules).flatMap {
|
||||
case true =>
|
||||
Files[F].isDirectory(nodeModules).map(isDir => if (isDir) Some(nodeModules) else None)
|
||||
case false => None.pure[F]
|
||||
}
|
||||
|
||||
nodeImportF.map { nodeImport =>
|
||||
val imports =
|
||||
nodeImport.toList ++ PlatformPackagePath.getGlobalNodeModulePath ++ (if (withRunImports)
|
||||
runImports
|
||||
else Nil)
|
||||
|
||||
new Prelude(imports)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
165
io/src/test/scala/aqua/ImportsSpec.scala
Normal file
165
io/src/test/scala/aqua/ImportsSpec.scala
Normal file
@ -0,0 +1,165 @@
|
||||
package aqua.files
|
||||
|
||||
import fs2.io.file.Path
|
||||
import org.scalacheck.*
|
||||
import org.scalatest.flatspec.AnyFlatSpec
|
||||
import org.scalatest.matchers.should.Matchers
|
||||
import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks
|
||||
import scala.math.sqrt
|
||||
|
||||
class ImportsSpec extends AnyFlatSpec with ScalaCheckPropertyChecks with Matchers {
|
||||
|
||||
implicit override val generatorDrivenConfig =
|
||||
// Tests here are lightweight, so we can afford to run more of them
|
||||
PropertyCheckConfiguration(minSuccessful = 500)
|
||||
|
||||
val shortAlphaNumStr = for {
|
||||
length <- Gen.choose(1, 10)
|
||||
chars <- Gen.listOfN(5, Gen.alphaNumChar)
|
||||
} yield chars.mkString
|
||||
|
||||
val fileNameWithExt = Gen
|
||||
.zip(
|
||||
shortAlphaNumStr,
|
||||
shortAlphaNumStr
|
||||
)
|
||||
.map((name, ext) => s"$name.$ext")
|
||||
|
||||
given Arbitrary[Path] = Arbitrary(
|
||||
Gen.sized(size =>
|
||||
for {
|
||||
segments <- Gen.listOfN(
|
||||
size / 5,
|
||||
Gen.oneOf(
|
||||
shortAlphaNumStr,
|
||||
Gen.oneOf(".", "..")
|
||||
)
|
||||
)
|
||||
prefix <- Gen.oneOf("", "/", "~/")
|
||||
suffix <- Gen.oneOf(
|
||||
Gen.oneOf("", "/"),
|
||||
fileNameWithExt
|
||||
)
|
||||
str = (prefix +: segments :+ suffix).mkString("/")
|
||||
} yield Path(str)
|
||||
)
|
||||
)
|
||||
|
||||
// Paths without "..", ".", "~" and absolute paths
|
||||
val simplePath: Gen[Path] = Gen.sized(size =>
|
||||
for {
|
||||
segments <- Gen.listOfN(
|
||||
size / 5,
|
||||
shortAlphaNumStr
|
||||
)
|
||||
suffix <- Gen.option(
|
||||
fileNameWithExt
|
||||
)
|
||||
path = segments.appendedAll(suffix).mkString("/")
|
||||
} yield Path(path)
|
||||
)
|
||||
|
||||
val simpleNonEmptyPath: Gen[Path] =
|
||||
for {
|
||||
prefix <- shortAlphaNumStr.map(Path.apply)
|
||||
suffix <- simplePath
|
||||
} yield prefix / suffix
|
||||
|
||||
given Arbitrary[Imports] = Arbitrary(
|
||||
Gen.sized { size =>
|
||||
val N = sqrt(size).toInt
|
||||
val pathResized = Gen.resize(N, Arbitrary.arbitrary[Path])
|
||||
Gen
|
||||
.mapOfN(
|
||||
N,
|
||||
Gen.zip(
|
||||
pathResized,
|
||||
Gen
|
||||
.mapOfN(
|
||||
N,
|
||||
Gen.zip(
|
||||
Gen.asciiPrintableStr,
|
||||
Gen.listOfN(N, pathResized)
|
||||
)
|
||||
)
|
||||
.map(Imports.PathSettings.apply)
|
||||
)
|
||||
)
|
||||
}.map(Imports.apply)
|
||||
)
|
||||
|
||||
val nonEmptyAsciiPrintableStr: Gen[String] =
|
||||
Gen.nonEmptyListOf(Gen.asciiPrintableChar).map(_.mkString)
|
||||
|
||||
"Imports" should "resolve relative import first" in {
|
||||
forAll(
|
||||
Arbitrary.arbitrary[Imports],
|
||||
Arbitrary.arbitrary[Path].filter(_.parent.isDefined),
|
||||
Arbitrary.arbitrary[Path]
|
||||
) { (imports, path, imported) =>
|
||||
val resolved = imports.resolutions(path, imported.toString)
|
||||
val parent = path.parent.get
|
||||
resolved.headOption should be(Some(parent.resolve(imported)))
|
||||
}
|
||||
}
|
||||
|
||||
it should "take the longest path prefix" in {
|
||||
forAll(
|
||||
Arbitrary.arbitrary[Imports],
|
||||
Arbitrary.arbitrary[Path],
|
||||
simpleNonEmptyPath,
|
||||
simpleNonEmptyPath,
|
||||
Gen.asciiPrintableStr
|
||||
) { (imports, prefix, middle, suffix, imported) =>
|
||||
val shortPrefix = prefix
|
||||
val longPrefix = prefix / middle
|
||||
val path = prefix / middle / suffix
|
||||
val shortLocation = Path("short/path")
|
||||
val longLocation = Path("long/path")
|
||||
val importsPrepared = imports.copy(
|
||||
settings = imports.settings
|
||||
.filterKeys(p => !p.startsWith(prefix))
|
||||
.toMap
|
||||
.updated(shortPrefix, Imports.PathSettings(Map(imported -> List(shortLocation))))
|
||||
.updated(longPrefix, Imports.PathSettings(Map(imported -> List(longLocation))))
|
||||
)
|
||||
val resolved = importsPrepared.resolutions(path, imported)
|
||||
resolved should not contain (shortLocation)
|
||||
resolved should contain(longLocation)
|
||||
}
|
||||
}
|
||||
|
||||
it should "rewrite the longest import prefix" in {
|
||||
forAll(
|
||||
Arbitrary.arbitrary[Imports],
|
||||
simpleNonEmptyPath,
|
||||
simplePath,
|
||||
nonEmptyAsciiPrintableStr,
|
||||
nonEmptyAsciiPrintableStr,
|
||||
nonEmptyAsciiPrintableStr
|
||||
) { (imports, pathPrefix, pathSuffix, prefix, middle, suffix) =>
|
||||
val path = pathPrefix / pathSuffix
|
||||
val shortPrefix = prefix
|
||||
val longPrefix = prefix + middle
|
||||
val imported = prefix + middle + suffix
|
||||
val shortLocation = Path("short/path")
|
||||
val longLocation = Path("long/path")
|
||||
val importsPrepared = imports.copy(
|
||||
settings = imports.settings
|
||||
.filterKeys(p => !p.startsWith(pathPrefix))
|
||||
.toMap
|
||||
.updated(
|
||||
pathPrefix,
|
||||
Imports.PathSettings(
|
||||
Map(
|
||||
shortPrefix -> List(shortLocation),
|
||||
longPrefix -> List(longLocation)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
val resolved = importsPrepared.resolutions(path, imported)
|
||||
resolved should contain(longLocation / suffix)
|
||||
}
|
||||
}
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
package aqua
|
||||
package aqua.files
|
||||
|
||||
import aqua.Rendering.given
|
||||
import aqua.compiler.AquaError
|
||||
@ -26,7 +26,6 @@ class RenderingSpec extends AnyFlatSpec with Matchers with Inside with Inspector
|
||||
val error = Parser.Error(8, NonEmptyList.one(InRange(36, ':', ':')))
|
||||
val fileSpan = FileSpan("file", Eval.now(LocationMap(script)), Span(8, 9))
|
||||
|
||||
|
||||
val result: AquaError[FileModuleId, AquaFileError, FileSpan.F] =
|
||||
AquaError.ParserError(LexerError((fileSpan, error)))
|
||||
|
||||
|
@ -2,15 +2,15 @@ package aqua.lsp
|
||||
|
||||
import aqua.compiler.*
|
||||
import aqua.compiler.AquaError.SourcesError
|
||||
import aqua.files.{AquaFileSources, AquaFilesIO, FileModuleId}
|
||||
import aqua.files.{AquaFileSources, AquaFilesIO, FileModuleId, Imports}
|
||||
import aqua.io.*
|
||||
import aqua.parser.lift.FileSpan
|
||||
import aqua.parser.lift.FileSpan.F
|
||||
import aqua.raw.ConstantRaw
|
||||
import aqua.{AquaIO, SpanParser}
|
||||
|
||||
import cats.data.Validated
|
||||
import cats.data.Validated.{Invalid, Valid}
|
||||
import cats.data.{NonEmptyChain, Validated}
|
||||
import cats.effect.IO
|
||||
import cats.effect.unsafe.implicits.global
|
||||
import cats.syntax.option.*
|
||||
@ -26,18 +26,25 @@ object AquaLSP extends Logging {
|
||||
|
||||
import ResultHelper.*
|
||||
|
||||
type ImportsJS = js.Dictionary[
|
||||
js.Dictionary[js.Array[String]]
|
||||
]
|
||||
|
||||
@JSExport
|
||||
def compile(
|
||||
pathStr: String,
|
||||
imports: scalajs.js.Array[String]
|
||||
): scalajs.js.Promise[CompilationResult] = {
|
||||
imports: ImportsJS
|
||||
): js.Promise[CompilationResult] = {
|
||||
logger.debug(s"Compiling '$pathStr' with imports: $imports")
|
||||
|
||||
given AquaIO[IO] = new AquaFilesIO[IO]
|
||||
|
||||
val path = Path(pathStr)
|
||||
val pathId = FileModuleId(path)
|
||||
val sources = new AquaFileSources[IO](path, imports.toList.map(Path.apply))
|
||||
val sources = new AquaFileSources[IO](
|
||||
path,
|
||||
importsToIO(imports)
|
||||
)
|
||||
val config = AquaCompilerConf(ConstantRaw.defaultConstants(None))
|
||||
|
||||
val proc = for {
|
||||
@ -66,6 +73,17 @@ object AquaLSP extends Logging {
|
||||
}
|
||||
|
||||
proc.unsafeToFuture().toJSPromise
|
||||
|
||||
}
|
||||
|
||||
private def importsToIO(
|
||||
imports: ImportsJS
|
||||
): Imports = Imports(
|
||||
imports.toMap.map { case (pathPrefix, settings) =>
|
||||
Path(pathPrefix) -> Imports.PathSettings(
|
||||
settings.toMap.map { case (importPrefix, locations) =>
|
||||
importPrefix -> locations.toList.map(Path.apply)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
package aqua.lsp
|
||||
|
||||
import aqua.compiler.AquaCompilerConf
|
||||
import aqua.files.{AquaFileSources, AquaFilesIO, FileModuleId}
|
||||
import aqua.files.{AquaFileSources, AquaFilesIO, FileModuleId, Imports}
|
||||
import aqua.io.AquaFileError
|
||||
import aqua.lsp.LSPCompiler
|
||||
import aqua.parser.lift.FileSpan
|
||||
@ -15,11 +15,20 @@ import scribe.Level
|
||||
|
||||
object Test extends IOApp.Simple {
|
||||
|
||||
implicit val aio: AquaIO[IO] = new AquaFilesIO[IO]
|
||||
given AquaIO[IO] = new AquaFilesIO[IO]
|
||||
|
||||
override def run: IO[Unit] = {
|
||||
|
||||
val sources = new AquaFileSources[IO](Path("./aqua-src/antithesis.aqua"), List(Path("./aqua")))
|
||||
val sources = new AquaFileSources[IO](
|
||||
Path("./aqua-src/antithesis.aqua"),
|
||||
Imports(
|
||||
Map(
|
||||
Path("/") -> Imports.PathSettings(
|
||||
Map("" -> List(Path("./aqua")))
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
val config = AquaCompilerConf(ConstantRaw.defaultConstants(None))
|
||||
|
||||
for {
|
||||
|
@ -0,0 +1,17 @@
|
||||
package aqua.syntax
|
||||
|
||||
import cats.Functor
|
||||
import cats.data.{EitherT, Validated}
|
||||
import cats.syntax.functor.*
|
||||
|
||||
object eithert {
|
||||
|
||||
extension (e: EitherT.type) {
|
||||
|
||||
/**
|
||||
* Converts a `F[Validated[A, B]]` into an `EitherT[F, A, B]`.
|
||||
*/
|
||||
def fromValidatedF[F[_]: Functor, A, B](v: F[Validated[A, B]]): EitherT[F, A, B] =
|
||||
EitherT(v.map(_.toEither))
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user