mirror of
https://github.com/fluencelabs/aqua.git
synced 2024-12-04 22:50:18 +00:00
chore: Add extension to imports on preparing import tokens (#1139)
This commit is contained in:
parent
e0513a6218
commit
7468f6fd18
@ -1,13 +1,10 @@
|
||||
aqua Job declares *
|
||||
|
||||
use "declare.aqua"
|
||||
use "declare"
|
||||
|
||||
export timeout
|
||||
|
||||
data Worker:
|
||||
field: string
|
||||
|
||||
func timeout() -> Worker:
|
||||
func timeout() -> AquaName.Worker:
|
||||
w <- AquaName.getWorker()
|
||||
a = w.host_id
|
||||
<- w
|
@ -1,8 +1,8 @@
|
||||
package aqua.files
|
||||
|
||||
import aqua.AquaIO
|
||||
import aqua.helpers.ext.Extension
|
||||
import aqua.io.*
|
||||
|
||||
import cats.data.*
|
||||
import cats.data.Validated.{Invalid, Valid}
|
||||
import cats.effect.kernel.Concurrent
|
||||
@ -16,6 +16,7 @@ import cats.syntax.functor.*
|
||||
import cats.syntax.traverse.*
|
||||
import fs2.io.file.{Files, Path}
|
||||
import fs2.text
|
||||
|
||||
import scala.util.Try
|
||||
|
||||
class AquaFilesIO[F[_]: Files: Concurrent] extends AquaIO[F] {
|
||||
@ -72,7 +73,7 @@ class AquaFilesIO[F[_]: Files: Concurrent] extends AquaIO[F] {
|
||||
.evalFilter(p =>
|
||||
Files[F]
|
||||
.isRegularFile(p)
|
||||
.map(_ && p.extName == ".aqua")
|
||||
.map(_ && p.extName == Extension.aqua)
|
||||
)
|
||||
.compile
|
||||
.toList
|
||||
|
@ -4,6 +4,7 @@ import aqua.compiler.AquaError.{ParserError as AquaParserError, *}
|
||||
import aqua.compiler.AquaWarning.CompileWarning
|
||||
import aqua.compiler.{AquaError, AquaWarning}
|
||||
import aqua.files.FileModuleId
|
||||
import aqua.helpers.ext.Extension
|
||||
import aqua.io.AquaFileError
|
||||
import aqua.lsp.AquaLSP.logger
|
||||
import aqua.parser.lexer.LiteralToken
|
||||
@ -11,8 +12,8 @@ import aqua.parser.lift.{FileSpan, Span}
|
||||
import aqua.parser.{ArrowReturnError, BlockIndentError, LexerError, ParserError}
|
||||
import aqua.semantics.rules.locations.{DefinitionInfo, TokenLocation as TokenLoc}
|
||||
import aqua.semantics.{HeaderError, RulesViolated, SemanticWarning, WrongAST}
|
||||
|
||||
import cats.syntax.show.*
|
||||
|
||||
import scala.scalajs.js
|
||||
import scala.scalajs.js.JSConverters.*
|
||||
import scribe.Logging
|
||||
@ -107,12 +108,10 @@ object ResultHelper extends Logging {
|
||||
link.toList
|
||||
}.toJSArray
|
||||
|
||||
private def importsToTokenImport(imports: List[LiteralToken[FileSpan.F]], importPaths: Map[String, String]): js.Array[TokenImport] =
|
||||
imports.flatMap { lt =>
|
||||
val (span, str) = lt.valueToken
|
||||
val unquoted = str.substring(1, str.length - 1)
|
||||
val path = importPaths.getOrElse(unquoted, unquoted)
|
||||
TokenLocation.fromSpan(span).map(l => TokenImport(l, path))
|
||||
private def importsToTokenImport(paths: List[TokenImportPath[FileSpan.F]]): js.Array[TokenImport] =
|
||||
paths.flatMap { path =>
|
||||
val (span, _) = path.token.valueToken
|
||||
TokenLocation.fromSpan(span).map(l => TokenImport(l, path.path))
|
||||
}.toJSArray
|
||||
|
||||
def lspToCompilationResult(lsp: LspContext[FileSpan.F]): CompilationResult = {
|
||||
@ -125,7 +124,7 @@ object ResultHelper extends Logging {
|
||||
case errs =>
|
||||
logger.debug("Errors: " + errs.mkString("\n"))
|
||||
|
||||
val importTokens = importsToTokenImport(lsp.importTokens, lsp.importPaths)
|
||||
val importTokens = importsToTokenImport(lsp.tokenPaths)
|
||||
|
||||
CompilationResult(
|
||||
errors.toJSArray,
|
||||
|
@ -29,6 +29,8 @@ case class LspContext[S[_]](
|
||||
importPaths: Map[String, String] = Map.empty
|
||||
) {
|
||||
lazy val allLocations: List[TokenLocation[S]] = variables.allLocations
|
||||
|
||||
lazy val tokenPaths: List[TokenImportPath[S]] = TokenImportPath.importPathsFromContext(this)
|
||||
}
|
||||
|
||||
object LspContext {
|
||||
|
@ -0,0 +1,21 @@
|
||||
package aqua.lsp
|
||||
|
||||
import aqua.helpers.ext.Extension
|
||||
import aqua.parser.lexer.LiteralToken
|
||||
import aqua.parser.lift.FileSpan
|
||||
|
||||
// String literal from 'import' or 'use' with full path to imported file
|
||||
case class TokenImportPath[S[_]](token: LiteralToken[S], path: String)
|
||||
|
||||
object TokenImportPath {
|
||||
def importPathsFromContext[S[_]](lspContext: LspContext[S]): List[TokenImportPath[S]] = {
|
||||
val importTokens = lspContext.importTokens
|
||||
val importPaths = lspContext.importPaths
|
||||
importTokens.map { lt =>
|
||||
val str = lt.value
|
||||
val unquoted = Extension.add(str.substring(1, str.length - 1))
|
||||
val path = importPaths.getOrElse(unquoted, unquoted)
|
||||
TokenImportPath(lt, path)
|
||||
}
|
||||
}
|
||||
}
|
@ -1,11 +1,13 @@
|
||||
package aqua.lsp
|
||||
|
||||
import aqua.SpanParser
|
||||
import aqua.compiler.FileIdString.given_FileId_String
|
||||
import aqua.compiler.{AquaCompilerConf, AquaError, AquaSources}
|
||||
import aqua.lsp.Utils.*
|
||||
import aqua.parser.Parser
|
||||
import aqua.parser.lexer.Token
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.S
|
||||
import aqua.parser.lift.{FileSpan, Span}
|
||||
import aqua.raw.ConstantRaw
|
||||
import aqua.semantics.rules.locations.{DefinitionInfo, TokenLocation, VariableInfo}
|
||||
import aqua.semantics.{RulesViolated, SemanticError}
|
||||
@ -19,16 +21,6 @@ import org.scalatest.matchers.should.Matchers
|
||||
|
||||
class AquaLSPSpec extends AnyFlatSpec with Matchers with Inside {
|
||||
|
||||
private def getByPosition(code: String, str: String, position: Int): Option[(Int, Int)] = {
|
||||
str.r.findAllMatchIn(code).toList.lift(position).map(r => (r.start, r.end))
|
||||
}
|
||||
|
||||
extension [T](o: Option[T]) {
|
||||
|
||||
def tapNone(f: => Unit): Option[T] =
|
||||
o.orElse { f; None }
|
||||
}
|
||||
|
||||
extension (c: LspContext[Span.S]) {
|
||||
|
||||
def checkLocations(
|
||||
@ -84,11 +76,14 @@ class AquaLSPSpec extends AnyFlatSpec with Matchers with Inside {
|
||||
): Boolean = {
|
||||
|
||||
getByPosition(code, checkName, position).exists { case (start, end) =>
|
||||
val res = c.variables.variables.iterator.flatMap(_._2).exists { case VariableInfo(definition, _) =>
|
||||
val span = definition.token.unit._1
|
||||
definition.name == fullName.getOrElse(
|
||||
checkName
|
||||
) && span.startIndex == start && span.endIndex == end && definition.`type` == `type`
|
||||
val res =
|
||||
c.variables.variables.values.flatten.exists { case VariableInfo(definition, _) =>
|
||||
val (span, _) = definition.token.unit
|
||||
|
||||
definition.name == fullName.getOrElse(checkName) &&
|
||||
span.startIndex == start &&
|
||||
span.endIndex == end &&
|
||||
definition.`type` == `type`
|
||||
}
|
||||
|
||||
if (printFiltered)
|
||||
|
@ -0,0 +1,152 @@
|
||||
package aqua.lsp
|
||||
|
||||
import aqua.SpanParser
|
||||
import aqua.compiler.FileIdString.given
|
||||
import aqua.compiler.{AquaCompilerConf, AquaError, AquaSources}
|
||||
import aqua.lsp.Utils.*
|
||||
import aqua.parser.lexer.Token
|
||||
import aqua.parser.lift.Span.S
|
||||
import aqua.parser.lift.{FileSpan, Span}
|
||||
import aqua.parser.{Ast, Parser, ParserError}
|
||||
import aqua.raw.ConstantRaw
|
||||
import aqua.semantics.rules.locations.{DefinitionInfo, TokenLocation, VariableInfo}
|
||||
import aqua.semantics.{RulesViolated, SemanticError}
|
||||
import aqua.types.*
|
||||
|
||||
import cats.data.*
|
||||
import cats.parse.{LocationMap, Parser as P, Parser0}
|
||||
import cats.{Comonad, Eval, Id, Monad, Monoid, Order, ~>}
|
||||
import org.scalatest.Inside
|
||||
import org.scalatest.flatspec.AnyFlatSpec
|
||||
import org.scalatest.matchers.should.Matchers
|
||||
|
||||
class FileLSPSpec extends AnyFlatSpec with Matchers with Inside {
|
||||
|
||||
extension (c: LspContext[FileSpan.F]) {
|
||||
|
||||
def checkImportLocation(
|
||||
name: String,
|
||||
position: Int,
|
||||
code: String,
|
||||
sourceFile: String,
|
||||
targetFile: String
|
||||
): Boolean = {
|
||||
(for {
|
||||
pos <- getByPosition(code, name, position).tapNone(
|
||||
fail(s"Didn't find definition of '$name'")
|
||||
)
|
||||
} yield {
|
||||
c.tokenPaths.exists { tip =>
|
||||
val (fileSpan, str) = tip.token.valueToken
|
||||
fileSpan.span.startIndex == pos._1 &&
|
||||
fileSpan.span.endIndex == pos._2 &&
|
||||
str == name &&
|
||||
tip.path == targetFile &&
|
||||
fileSpan.name == sourceFile
|
||||
}
|
||||
}).getOrElse(false)
|
||||
}
|
||||
}
|
||||
|
||||
def spanStringParser: String => String => ValidatedNec[ParserError[FileSpan.F], Ast[FileSpan.F]] =
|
||||
id =>
|
||||
source => {
|
||||
val nat = new (Span.S ~> FileSpan.F) {
|
||||
override def apply[A](span: Span.S[A]): FileSpan.F[A] = {
|
||||
(
|
||||
FileSpan(id, Eval.later(LocationMap(source)), span._1),
|
||||
span._2
|
||||
)
|
||||
}
|
||||
}
|
||||
Parser.natParser(Parser.spanParser, nat)(source)
|
||||
}
|
||||
|
||||
private def aquaSourceFile(src: Map[String, String], imports: Map[String, String]) = {
|
||||
new AquaSources[Id, String, String] {
|
||||
|
||||
override def sources: Id[ValidatedNec[String, Chain[(String, String)]]] =
|
||||
Validated.validNec(Chain.fromSeq(src.toSeq))
|
||||
|
||||
override def resolveImport(
|
||||
from: String,
|
||||
imp: String
|
||||
): Id[ValidatedNec[String, String]] =
|
||||
Validated.validNec(imp)
|
||||
|
||||
override def load(file: String): Id[ValidatedNec[String, String]] =
|
||||
Validated.fromEither(
|
||||
(imports ++ src)
|
||||
.get(file)
|
||||
.toRight(NonEmptyChain.one(s"Cannot load imported file $file"))
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
def compileFileSpan(
|
||||
src: Map[String, String],
|
||||
imports: Map[String, String] = Map.empty
|
||||
): ValidatedNec[AquaError[String, String, FileSpan.F], Map[String, LspContext[
|
||||
FileSpan.F
|
||||
]]] = {
|
||||
LSPCompiler
|
||||
.compileToLsp[Id, String, String, FileSpan.F](
|
||||
aquaSourceFile(src, imports),
|
||||
spanStringParser,
|
||||
AquaCompilerConf(ConstantRaw.defaultConstants(None))
|
||||
)
|
||||
}
|
||||
|
||||
it should "return right tokens from 'import' and 'use' paths" in {
|
||||
val main =
|
||||
"""aqua Import declares *
|
||||
|
|
||||
|use "first.aqua" as Export
|
||||
|import secondF from "second"
|
||||
|
|
||||
|""".stripMargin
|
||||
val src = Map(
|
||||
"index.aqua" -> main
|
||||
)
|
||||
|
||||
val firstImport =
|
||||
"""aqua First declares firstF
|
||||
|
|
||||
|func firstF() -> string:
|
||||
| <- "firstStr"
|
||||
|
|
||||
|""".stripMargin
|
||||
|
||||
val secondImport =
|
||||
"""aqua Second declares secondF
|
||||
|
|
||||
|func secondF() -> string:
|
||||
| <- "secondStr"
|
||||
|
|
||||
|""".stripMargin
|
||||
|
||||
val imports = Map(
|
||||
"first.aqua" ->
|
||||
firstImport,
|
||||
"second.aqua" -> secondImport
|
||||
)
|
||||
|
||||
val res = compileFileSpan(src, imports).toOption.get.values.head
|
||||
|
||||
res.errors shouldBe empty
|
||||
res.checkImportLocation(
|
||||
"\"first.aqua\"",
|
||||
0,
|
||||
main,
|
||||
"index.aqua",
|
||||
"first.aqua"
|
||||
) shouldBe true
|
||||
res.checkImportLocation(
|
||||
"\"second\"",
|
||||
0,
|
||||
main,
|
||||
"index.aqua",
|
||||
"second.aqua"
|
||||
) shouldBe true
|
||||
}
|
||||
}
|
@ -0,0 +1,16 @@
|
||||
package aqua.lsp
|
||||
|
||||
object Utils {
|
||||
|
||||
def getByPosition(code: String, str: String, position: Int): Option[(Int, Int)] = {
|
||||
str.r.findAllMatchIn(code).toList.lift(position).map(r => (r.start, r.end))
|
||||
}
|
||||
|
||||
extension [T](o: Option[T]) {
|
||||
|
||||
def tapNone(f: => Unit): Option[T] =
|
||||
o.orElse {
|
||||
f; None
|
||||
}
|
||||
}
|
||||
}
|
@ -1,5 +1,6 @@
|
||||
package aqua.parser.head
|
||||
|
||||
import aqua.helpers.ext.Extension
|
||||
import aqua.parser.lexer.{LiteralToken, Token}
|
||||
import cats.Comonad
|
||||
import cats.~>
|
||||
@ -11,10 +12,7 @@ trait FilenameExpr[F[_]] extends HeaderExpr[F] {
|
||||
|
||||
def fileValue: String = {
|
||||
val raw = filename.value.drop(1).dropRight(1)
|
||||
if (raw.endsWith(".aqua"))
|
||||
raw
|
||||
else
|
||||
raw + ".aqua"
|
||||
Extension.add(raw)
|
||||
}
|
||||
|
||||
override def mapK[K[_]: Comonad](fk: F ~> K): FilenameExpr[K]
|
||||
|
@ -0,0 +1,14 @@
|
||||
package aqua.helpers.ext
|
||||
|
||||
object Extension {
|
||||
|
||||
val aqua = ".aqua"
|
||||
|
||||
// def add '.aqua' extension if there is no one
|
||||
def add(path: String): String = {
|
||||
if (path.endsWith(aqua))
|
||||
path
|
||||
else
|
||||
path + aqua
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user