mirror of
https://github.com/fluencelabs/aqua.git
synced 2024-12-04 22:50:18 +00:00
feat(language-server): Support declares and exports in LSP [LNG-304, LNG-319] (#1070)
Co-authored-by: InversionSpaces <inversionspaces@vivaldi.net>
This commit is contained in:
parent
d03211b492
commit
f7194f0a54
@ -1,19 +1,15 @@
|
||||
package aqua.compiler
|
||||
|
||||
import aqua.compiler.AquaError.{ParserError as AquaParserError, *}
|
||||
import aqua.linker.Linker.link
|
||||
import aqua.linker.{AquaModule, Linker, Modules}
|
||||
import aqua.compiler.AquaError.*
|
||||
import aqua.linker.Linker
|
||||
import aqua.parser.{Ast, ParserError}
|
||||
import aqua.semantics.header.{HeaderHandler, Picker}
|
||||
import aqua.semantics.{SemanticError, Semantics}
|
||||
|
||||
import cats.arrow.FunctionK
|
||||
import cats.data.*
|
||||
import cats.syntax.applicative.*
|
||||
import cats.syntax.either.*
|
||||
import cats.syntax.flatMap.*
|
||||
import cats.syntax.functor.*
|
||||
import cats.syntax.traverse.*
|
||||
import cats.{Comonad, Monad, Monoid, Order, ~>}
|
||||
import scribe.Logging
|
||||
|
||||
|
@ -7,9 +7,9 @@ import aqua.parser.{Ast, ParserError}
|
||||
import aqua.raw.RawContext
|
||||
import aqua.semantics.RawSemantics
|
||||
import aqua.semantics.header.{HeaderHandler, HeaderSem}
|
||||
import aqua.semantics.rules.locations.{LocationsAlgebra, DummyLocationsInterpreter}
|
||||
|
||||
import cats.data.*
|
||||
import cats.syntax.applicative.*
|
||||
import cats.syntax.either.*
|
||||
import cats.syntax.flatMap.*
|
||||
import cats.syntax.functor.*
|
||||
@ -55,6 +55,9 @@ object CompilerAPI extends Logging {
|
||||
.rawContextMonoid
|
||||
|
||||
val semantics = new RawSemantics[S]()
|
||||
|
||||
given LocationsAlgebra[S, State[RawContext, *]] =
|
||||
DummyLocationsInterpreter()
|
||||
|
||||
new AquaCompiler[F, E, I, S, RawContext](
|
||||
new HeaderHandler(),
|
||||
|
@ -4,9 +4,10 @@ import aqua.compiler.{AquaCompiler, AquaCompilerConf, AquaError, AquaSources}
|
||||
import aqua.parser.{Ast, ParserError}
|
||||
import aqua.raw.RawContext
|
||||
import aqua.semantics.header.{HeaderHandler, HeaderSem}
|
||||
import aqua.semantics.rules.locations.LocationsAlgebra
|
||||
|
||||
import cats.data.Validated.validNec
|
||||
import cats.data.{Chain, Validated, ValidatedNec}
|
||||
import cats.data.{State, Chain, Validated, ValidatedNec}
|
||||
import cats.syntax.either.*
|
||||
import cats.syntax.functor.*
|
||||
import cats.syntax.monoid.*
|
||||
@ -47,6 +48,9 @@ object LSPCompiler {
|
||||
|
||||
val semantics = new LspSemantics[S]()
|
||||
|
||||
given LocationsAlgebra[S, State[LspContext[S], *]] =
|
||||
LocationsInterpreter[S, LspContext[S]]()
|
||||
|
||||
new AquaCompiler[F, E, I, S, LspContext[S]](
|
||||
new HeaderHandler(),
|
||||
semantics
|
||||
|
@ -6,8 +6,11 @@ import aqua.semantics.header.Picker
|
||||
import aqua.semantics.rules.locations.{TokenLocation, VariableInfo}
|
||||
import aqua.semantics.{SemanticError, SemanticWarning}
|
||||
import aqua.types.{AbilityType, ArrowType, Type}
|
||||
import aqua.semantics.rules.locations.LocationsState
|
||||
|
||||
import cats.syntax.monoid.*
|
||||
import cats.{Monoid, Semigroup}
|
||||
import monocle.Lens
|
||||
|
||||
// Context with info that necessary for language server
|
||||
case class LspContext[S[_]](
|
||||
@ -15,6 +18,7 @@ case class LspContext[S[_]](
|
||||
abDefinitions: Map[String, NamedTypeToken[S]] = Map.empty[String, NamedTypeToken[S]],
|
||||
rootArrows: Map[String, ArrowType] = Map.empty[String, ArrowType],
|
||||
constants: Map[String, Type] = Map.empty[String, Type],
|
||||
// TODO: Can this field be refactored into LocationsState?
|
||||
variables: List[VariableInfo[S]] = Nil,
|
||||
importTokens: List[LiteralToken[S]] = Nil,
|
||||
errors: List[SemanticError[S]] = Nil,
|
||||
@ -150,4 +154,17 @@ object LspContext {
|
||||
)(using Semigroup[LspContext[S]]): LspContext[S] =
|
||||
ctx.copy(raw = ctx.raw.pickDeclared)
|
||||
}
|
||||
|
||||
/*
|
||||
NOTE: This instance is used to generate LocationsAlgebra[S, State[LspContext[S], *]]
|
||||
to reuse the code from the body semantics in the header semantics
|
||||
*/
|
||||
given [S[_]]: Lens[LspContext[S], LocationsState[S]] = {
|
||||
val get: LspContext[S] => LocationsState[S] =
|
||||
ctx => LocationsState(ctx.variables)
|
||||
val replace: LocationsState[S] => LspContext[S] => LspContext[S] =
|
||||
locs => ctx => ctx.copy(variables = locs.variables)
|
||||
|
||||
Lens(get)(replace)
|
||||
}
|
||||
}
|
||||
|
@ -29,18 +29,29 @@ class AquaLSPSpec extends AnyFlatSpec with Matchers with Inside {
|
||||
usePosition: Int,
|
||||
defCode: String,
|
||||
useCode: Option[String] = None,
|
||||
fieldName: Option[String] = None
|
||||
fieldOrSynonym: Option[String] = None
|
||||
): Boolean = {
|
||||
(for {
|
||||
defPos <- getByPosition(defCode, name, defPosition)
|
||||
usePos <- getByPosition(useCode.getOrElse(defCode), fieldName.getOrElse(name), usePosition)
|
||||
usePos <- getByPosition(
|
||||
useCode.getOrElse(defCode),
|
||||
fieldOrSynonym.getOrElse(name),
|
||||
usePosition
|
||||
)
|
||||
} yield {
|
||||
val (defStart, defEnd) = defPos
|
||||
val (useStart, useEnd) = usePos
|
||||
c.allLocations.exists { case TokenLocation(useT, defT) =>
|
||||
val defSpan = defT.unit._1
|
||||
val useSpan = useT.unit._1
|
||||
defSpan.startIndex == defStart && defSpan.endIndex == defEnd && useSpan.startIndex == useStart && useSpan.endIndex == useEnd
|
||||
c.variables.exists { case VariableInfo(defI, occs) =>
|
||||
val defSpan = defI.token.unit._1
|
||||
if (defSpan.startIndex == defStart && defSpan.endIndex == defEnd) {
|
||||
occs.exists { useT =>
|
||||
val useSpan = useT.unit._1
|
||||
useSpan.startIndex == useStart && useSpan.endIndex == useEnd
|
||||
}
|
||||
} else {
|
||||
false
|
||||
}
|
||||
|
||||
}
|
||||
}).getOrElse(false)
|
||||
}
|
||||
@ -115,15 +126,23 @@ class AquaLSPSpec extends AnyFlatSpec with Matchers with Inside {
|
||||
id => txt => Parser.parse(Parser.parserSchema)(txt),
|
||||
AquaCompilerConf(ConstantRaw.defaultConstants(None))
|
||||
)
|
||||
.leftMap { ee =>
|
||||
println(ee)
|
||||
ee
|
||||
}
|
||||
|
||||
it should "return right tokens" in {
|
||||
val main =
|
||||
"""aqua Import
|
||||
"""aqua Import declares foo_wrapper, Ab, Str, useAbAndStruct, SOME_CONST
|
||||
|
|
||||
|import foo, strFunc, num from "export2.aqua"
|
||||
|import foo, strFunc, num, absb as otherName from "export2.aqua"
|
||||
|
|
||||
|use thirdFunc as thirdRenamed from "third.aqua" as Third
|
||||
|
|
||||
|import "../gen/OneMore.aqua"
|
||||
|
|
||||
|export foo_wrapper, SOME_CONST, EXPORTED as NEW_NAME
|
||||
|
|
||||
|func foo_wrapper() -> string:
|
||||
| fooResult <- foo()
|
||||
| if 1 == 1:
|
||||
@ -146,13 +165,16 @@ class AquaLSPSpec extends AnyFlatSpec with Matchers with Inside {
|
||||
| strFunc(s.someField)
|
||||
| num(Ab.someField)
|
||||
|
|
||||
|const SOME_CONST = 1
|
||||
|const EXPORTED = 1
|
||||
|
|
||||
|""".stripMargin
|
||||
val src = Map(
|
||||
"index.aqua" -> main
|
||||
)
|
||||
|
||||
val firstImport =
|
||||
"""aqua Export declares strFunc, num, foo
|
||||
"""aqua Export declares strFunc, num, foo, absb
|
||||
|
|
||||
|func absb() -> string:
|
||||
| <- "ff"
|
||||
@ -176,11 +198,21 @@ class AquaLSPSpec extends AnyFlatSpec with Matchers with Inside {
|
||||
| consume(s: string)
|
||||
|""".stripMargin
|
||||
|
||||
val thirdImport =
|
||||
"""aqua Third declares thirdFunc
|
||||
|
|
||||
|func thirdFunc() -> string:
|
||||
| <- "I am MyFooBar foo"
|
||||
|
|
||||
|""".stripMargin
|
||||
|
||||
val imports = Map(
|
||||
"export2.aqua" ->
|
||||
firstImport,
|
||||
"../gen/OneMore.aqua" ->
|
||||
secondImport
|
||||
secondImport,
|
||||
"third.aqua" ->
|
||||
thirdImport
|
||||
)
|
||||
|
||||
val res = compile(src, imports).toOption.get.values.head
|
||||
@ -193,6 +225,50 @@ class AquaLSPSpec extends AnyFlatSpec with Matchers with Inside {
|
||||
)
|
||||
)
|
||||
|
||||
res.checkTokenLoc(
|
||||
main,
|
||||
"foo_wrapper",
|
||||
2,
|
||||
ArrowType(
|
||||
NilType,
|
||||
ProductType(ScalarType.string :: Nil)
|
||||
)
|
||||
) shouldBe true
|
||||
res.checkTokenLoc(
|
||||
main,
|
||||
"SOME_CONST",
|
||||
2,
|
||||
LiteralType.unsigned
|
||||
) shouldBe true
|
||||
|
||||
// exports
|
||||
res.checkLocations("foo_wrapper", 2, 1, main) shouldBe true
|
||||
res.checkLocations("SOME_CONST", 2, 1, main) shouldBe true
|
||||
res.checkLocations("EXPORTED", 1, 0, main) shouldBe true
|
||||
res.checkLocations("EXPORTED", 1, 0, main, None, Some("NEW_NAME")) shouldBe true
|
||||
|
||||
// declares
|
||||
res.checkLocations("foo_wrapper", 2, 0, main) shouldBe true
|
||||
res.checkLocations("SOME_CONST", 2, 0, main) shouldBe true
|
||||
|
||||
// imports
|
||||
res.checkLocations("foo", 1, 1, firstImport, Some(main)) shouldBe true
|
||||
res.checkLocations("strFunc", 1, 0, firstImport, Some(main)) shouldBe true
|
||||
res.checkLocations("num", 1, 0, firstImport, Some(main)) shouldBe true
|
||||
res.checkLocations("absb", 1, 0, firstImport, Some(main)) shouldBe true
|
||||
res.checkLocations("absb", 1, 0, firstImport, Some(main), Some("otherName")) shouldBe true
|
||||
|
||||
// use
|
||||
res.checkLocations("thirdFunc", 1, 0, thirdImport, Some(main)) shouldBe true
|
||||
res.checkLocations(
|
||||
"thirdFunc",
|
||||
1,
|
||||
0,
|
||||
thirdImport,
|
||||
Some(main),
|
||||
Some("thirdRenamed")
|
||||
) shouldBe true
|
||||
|
||||
// inside `foo_wrapper` func
|
||||
res.checkTokenLoc(main, "fooResult", 0, ScalarType.string) shouldBe true
|
||||
res.checkLocations("fooResult", 0, 1, main) shouldBe true
|
||||
|
@ -3,15 +3,12 @@ package aqua.linker
|
||||
import aqua.errors.Errors.internalError
|
||||
|
||||
import cats.MonadError
|
||||
import cats.data.{NonEmptyChain, Validated, ValidatedNec}
|
||||
import cats.data.NonEmptyChain
|
||||
import cats.instances.list.*
|
||||
import cats.kernel.{Monoid, Semigroup}
|
||||
import cats.syntax.applicative.*
|
||||
import cats.syntax.flatMap.*
|
||||
import cats.syntax.functor.*
|
||||
import cats.syntax.semigroup.*
|
||||
import cats.syntax.traverse.*
|
||||
import cats.syntax.validated.*
|
||||
import scala.annotation.tailrec
|
||||
import scribe.Logging
|
||||
|
||||
@ -116,10 +113,10 @@ object Linker extends Logging {
|
||||
} else
|
||||
canHandle.traverse { mod =>
|
||||
// Gather all imports for module
|
||||
val imports = mod.imports.mapValues { imp =>
|
||||
val imports = mod.imports.view.mapValues { imp =>
|
||||
proc
|
||||
.get(imp)
|
||||
.getOrElse(
|
||||
imp,
|
||||
// Should not happen as we check it above
|
||||
internalError(s"Module $imp not found in $proc")
|
||||
)
|
||||
@ -155,6 +152,6 @@ object Linker extends Logging {
|
||||
else
|
||||
iter(modules.loaded.values.toList, Map.empty, cycle).map(
|
||||
// Remove all modules that are not exported from result
|
||||
_.filterKeys(modules.exports.contains).toMap
|
||||
_.view.filterKeys(modules.exports.contains).toMap
|
||||
)
|
||||
}
|
||||
|
@ -23,8 +23,6 @@ class LinkerSpec extends AnyFlatSpec with Matchers {
|
||||
imports = Map("mod2" -> "mod2"),
|
||||
dependsOn = Map("mod2" -> "unresolved mod2 in mod1"),
|
||||
body = imports => {
|
||||
println(s"mod1: $imports")
|
||||
|
||||
imports
|
||||
.get("mod2")
|
||||
.toRight("mod2 not found in mod1")
|
||||
|
@ -18,7 +18,7 @@ case class UseExpr[F[_]](
|
||||
override def mapK[K[_]: Comonad](fk: F ~> K): UseExpr[K] =
|
||||
copy(filename.mapK(fk), asModule.map(_.mapK(fk)))
|
||||
|
||||
override def toString(): String =
|
||||
override def toString: String =
|
||||
s"use ${filename.value}${asModule.map(_.value).fold("")(" as " + _)}"
|
||||
}
|
||||
|
||||
|
@ -15,7 +15,6 @@ class ArrowTypeSem[S[_]](val expr: ArrowTypeExpr[S]) extends AnyVal {
|
||||
|
||||
def program[Alg[_]: Monad](implicit
|
||||
T: TypesAlgebra[S, Alg],
|
||||
A: AbilitiesAlgebra[S, Alg],
|
||||
D: DefinitionsAlgebra[S, Alg]
|
||||
): Prog[Alg, Raw] =
|
||||
T.resolveArrowDef(expr.`type`).flatMap {
|
||||
|
@ -24,7 +24,6 @@ class ServiceSem[S[_]](val expr: ServiceExpr[S]) extends AnyVal {
|
||||
|
||||
private def define[Alg[_]: Monad](using
|
||||
A: AbilitiesAlgebra[S, Alg],
|
||||
N: NamesAlgebra[S, Alg],
|
||||
T: TypesAlgebra[S, Alg],
|
||||
V: ValuesAlgebra[S, Alg],
|
||||
D: DefinitionsAlgebra[S, Alg]
|
||||
|
@ -0,0 +1,99 @@
|
||||
package aqua.semantics.header
|
||||
|
||||
import aqua.parser.head.*
|
||||
import aqua.parser.lexer.Token
|
||||
import aqua.semantics.SemanticError
|
||||
import aqua.semantics.header.HeaderHandler.*
|
||||
import aqua.semantics.header.Picker.*
|
||||
import aqua.semantics.rules.locations.LocationsAlgebra
|
||||
|
||||
import cats.data.*
|
||||
import cats.data.Validated.*
|
||||
import cats.instances.option.*
|
||||
import cats.kernel.Semigroup
|
||||
import cats.syntax.apply.*
|
||||
import cats.syntax.bifunctor.*
|
||||
import cats.syntax.foldable.*
|
||||
import cats.syntax.functor.*
|
||||
import cats.syntax.option.*
|
||||
import cats.syntax.semigroup.*
|
||||
import cats.syntax.validated.*
|
||||
import cats.{Comonad, Monoid}
|
||||
|
||||
class ExportSem[S[_]: Comonad, C](expr: ExportExpr[S])(using
|
||||
acm: Monoid[C],
|
||||
picker: Picker[C],
|
||||
locations: LocationsAlgebra[S, State[C, *]]
|
||||
) {
|
||||
|
||||
private def exportFuncChecks(
|
||||
ctx: C,
|
||||
token: Token[S],
|
||||
name: String
|
||||
): ValidatedNec[SemanticError[S], Unit] =
|
||||
Validated.condNec(
|
||||
!ctx.funcReturnAbilityOrArrow(name),
|
||||
(),
|
||||
error(
|
||||
token,
|
||||
s"The function '$name' cannot be exported, because it returns an arrow or an ability"
|
||||
)
|
||||
) combine Validated.condNec(
|
||||
!ctx.funcAcceptAbility(name),
|
||||
(),
|
||||
error(
|
||||
token,
|
||||
s"The function '$name' cannot be exported, because it accepts an ability"
|
||||
)
|
||||
)
|
||||
|
||||
def headerSem: Res[S, C] = {
|
||||
// Save exports, finally handle them
|
||||
HeaderSem(
|
||||
// Nothing there
|
||||
picker.blank,
|
||||
finSem
|
||||
).validNec
|
||||
}
|
||||
|
||||
private def finSem(ctx: C, initCtx: C): ValidatedNec[SemanticError[S], C] = {
|
||||
val pubs = expr.pubs
|
||||
.map(
|
||||
_.bimap(
|
||||
_.bimap(n => (n, n.value), n => (n, n.map(_.value))),
|
||||
_.bimap(n => (n, n.value), n => (n, n.map(_.value)))
|
||||
).merge
|
||||
)
|
||||
|
||||
val tokens = pubs.toList.flatMap {
|
||||
case ((token, name), (renameToken, _)) =>
|
||||
renameToken.map(name -> _).toList :+ (name, token)
|
||||
}
|
||||
|
||||
val ctxWithExportLocations = ctx.addOccurences(tokens)
|
||||
val sumCtx = initCtx |+| ctxWithExportLocations
|
||||
|
||||
pubs.map { case ((token, name), (_, rename)) =>
|
||||
sumCtx
|
||||
.pick(name, rename, declared = false)
|
||||
.as(Map(name -> rename))
|
||||
.toValid(
|
||||
error(
|
||||
token,
|
||||
s"Files has no $name declaration or import, " +
|
||||
s"cannot export, available functions: ${sumCtx.funcNames.mkString(", ")}"
|
||||
)
|
||||
)
|
||||
.ensure(
|
||||
error(
|
||||
token,
|
||||
s"Can not export '$name' as it is an ability"
|
||||
)
|
||||
)(_ => !sumCtx.isAbility(name))
|
||||
.toValidatedNec <* exportFuncChecks(sumCtx, token, name)
|
||||
}
|
||||
.prepend(validNec(sumCtx.exports))
|
||||
.combineAll
|
||||
.map(sumCtx.setExports)
|
||||
}
|
||||
}
|
@ -2,68 +2,34 @@ package aqua.semantics.header
|
||||
|
||||
import aqua.parser.Ast
|
||||
import aqua.parser.head.*
|
||||
import aqua.parser.lexer.{Ability, Name, Token}
|
||||
import aqua.parser.lexer.{Ability, Token}
|
||||
import aqua.semantics.header.Picker.*
|
||||
import aqua.semantics.{HeaderError, SemanticError}
|
||||
import aqua.semantics.rules.locations.LocationsAlgebra
|
||||
|
||||
import cats.data.*
|
||||
import cats.data.Validated.*
|
||||
import cats.free.Cofree
|
||||
import cats.instances.list.*
|
||||
import cats.instances.option.*
|
||||
import cats.kernel.Semigroup
|
||||
import cats.syntax.apply.*
|
||||
import cats.syntax.bifunctor.*
|
||||
import cats.syntax.foldable.*
|
||||
import cats.syntax.functor.*
|
||||
import cats.syntax.option.*
|
||||
import cats.syntax.semigroup.*
|
||||
import cats.syntax.traverse.*
|
||||
import cats.syntax.validated.*
|
||||
import cats.{Comonad, Eval, Monoid}
|
||||
import cats.{Comonad, Monoid}
|
||||
|
||||
class HeaderHandler[S[_]: Comonad, C](using
|
||||
acm: Monoid[C],
|
||||
headMonoid: Monoid[HeaderSem[S, C]],
|
||||
picker: Picker[C]
|
||||
picker: Picker[C],
|
||||
// NOTE: This typeclass is here to reuse
|
||||
// the code from the body semantics
|
||||
locations: LocationsAlgebra[S, State[C, *]]
|
||||
) {
|
||||
|
||||
type Res[S[_], C] = ValidatedNec[SemanticError[S], HeaderSem[S, C]]
|
||||
type ResAC[S[_]] = ValidatedNec[SemanticError[S], C]
|
||||
type ResT[S[_], T] = ValidatedNec[SemanticError[S], T]
|
||||
|
||||
// Helper: monoidal combine of all the childrens after parent res
|
||||
private def combineAnd(children: Chain[Res[S, C]])(
|
||||
parent: Res[S, C]
|
||||
): Eval[Res[S, C]] =
|
||||
Eval.later(parent |+| children.combineAll)
|
||||
|
||||
// Error generator with token pointer
|
||||
private def error[T](
|
||||
token: Token[S],
|
||||
msg: String
|
||||
): SemanticError[S] = HeaderError(token, msg)
|
||||
|
||||
private def exportFuncChecks(ctx: C, token: Token[S], name: String): ResT[S, Unit] =
|
||||
Validated.condNec(
|
||||
!ctx.funcReturnAbilityOrArrow(name),
|
||||
(),
|
||||
error(
|
||||
token,
|
||||
s"The function '$name' cannot be exported, because it returns an arrow or an ability"
|
||||
)
|
||||
) combine Validated.condNec(
|
||||
!ctx.funcAcceptAbility(name),
|
||||
(),
|
||||
error(
|
||||
token,
|
||||
s"The function '$name' cannot be exported, because it accepts an ability"
|
||||
)
|
||||
)
|
||||
import HeaderHandler.*
|
||||
|
||||
def sem(imports: Map[String, C], header: Ast.Head[S]): Res[S, C] = {
|
||||
// Resolve a filename from given imports or fail
|
||||
def resolve(f: FilenameExpr[S]): ResAC[S] =
|
||||
def resolve(f: FilenameExpr[S]): ResAC[S, C] =
|
||||
imports
|
||||
.get(f.fileValue)
|
||||
.map(_.pickDeclared)
|
||||
@ -72,16 +38,21 @@ class HeaderHandler[S[_]: Comonad, C](using
|
||||
)
|
||||
|
||||
// Get part of the declared context (for import/use ... from ... expressions)
|
||||
def getFrom(f: FromExpr[S], ctx: C): ResAC[S] =
|
||||
def getFrom(f: FromExpr[S], ctx: C): ResAC[S, C] =
|
||||
ctx.pickHeader.validNec |+| f.imports
|
||||
.map(
|
||||
_.bimap(
|
||||
_.bimap(n => (n, n.value), _.map(_.value)),
|
||||
_.bimap(n => (n, n.value), _.map(_.value))
|
||||
_.bimap(n => (n, n.value), n => (n, n.map(_.value))),
|
||||
_.bimap(n => (n, n.value), n => (n, n.map(_.value)))
|
||||
).merge match {
|
||||
case ((token, name), rename) =>
|
||||
case ((token, name), (renameToken, rename)) =>
|
||||
ctx
|
||||
.pick(name, rename, ctx.module.nonEmpty)
|
||||
.map { ctx =>
|
||||
val defName = rename.getOrElse(name)
|
||||
val occs = renameToken.map(defName -> _).toList :+ (defName, token)
|
||||
ctx.addOccurences(occs)
|
||||
}
|
||||
.toValidNec(
|
||||
error(
|
||||
token,
|
||||
@ -93,7 +64,7 @@ class HeaderHandler[S[_]: Comonad, C](using
|
||||
.combineAll
|
||||
|
||||
// Convert an imported context into a module (ability)
|
||||
def toModule(ctx: C, tkn: Token[S], rename: Option[Ability[S]]): ResAC[S] =
|
||||
def toModule(ctx: C, tkn: Token[S], rename: Option[Ability[S]]): ResAC[S, C] =
|
||||
rename
|
||||
.map(_.value)
|
||||
.orElse(ctx.module)
|
||||
@ -105,46 +76,8 @@ class HeaderHandler[S[_]: Comonad, C](using
|
||||
)
|
||||
)
|
||||
|
||||
val handleModule: ModuleExpr[S] => Res[S, C] = {
|
||||
case ModuleExpr(word, name, declareAll, declareNames, declareCustom) =>
|
||||
val shouldDeclare = declareNames.map(_.value).toSet ++ declareCustom.map(_.value)
|
||||
|
||||
lazy val sem = HeaderSem(
|
||||
// Save module header info
|
||||
acm.empty.setModule(
|
||||
name.value,
|
||||
shouldDeclare
|
||||
),
|
||||
(ctx, _) =>
|
||||
// When file is handled, check that all the declarations exists
|
||||
if (declareAll.nonEmpty)
|
||||
ctx.setModule(name.value, declares = ctx.all).validNec
|
||||
else
|
||||
(
|
||||
declareNames.fproductLeft(_.value) ::: declareCustom.fproductLeft(_.value)
|
||||
).map { case (n, t) =>
|
||||
ctx
|
||||
.pick(n, None, ctx.module.nonEmpty)
|
||||
.toValidNec(
|
||||
error(
|
||||
t,
|
||||
s"`$n` is expected to be declared, but declaration is not found in the file"
|
||||
)
|
||||
)
|
||||
.void
|
||||
}.combineAll.as(
|
||||
// TODO: why module name and declares is lost? where is it lost?
|
||||
ctx.setModule(name.value, declares = shouldDeclare)
|
||||
)
|
||||
)
|
||||
|
||||
word.value.fold(
|
||||
module = error(
|
||||
word,
|
||||
"Keyword `module` is deprecated, use `aqua` instead"
|
||||
).invalidNec,
|
||||
aqua = sem.validNec
|
||||
)
|
||||
val handleModule: ModuleExpr[S] => Res[S, C] = { me =>
|
||||
ModuleSem(me).headerSem
|
||||
}
|
||||
|
||||
// Handler for every header expression, will be combined later
|
||||
@ -181,44 +114,8 @@ class HeaderHandler[S[_]: Comonad, C](using
|
||||
HeaderSem(fc, (c, _) => validNec(c))
|
||||
}
|
||||
|
||||
case ExportExpr(pubs) =>
|
||||
// Save exports, finally handle them
|
||||
HeaderSem(
|
||||
// Nothing there
|
||||
picker.blank,
|
||||
(ctx, initCtx) =>
|
||||
val sumCtx = initCtx |+| ctx
|
||||
|
||||
pubs
|
||||
.map(
|
||||
_.bimap(
|
||||
_.bimap(n => (n, n.value), _.map(_.value)),
|
||||
_.bimap(n => (n, n.value), _.map(_.value))
|
||||
).merge
|
||||
)
|
||||
.map { case ((token, name), rename) =>
|
||||
sumCtx
|
||||
.pick(name, rename, declared = false)
|
||||
.as(Map(name -> rename))
|
||||
.toValid(
|
||||
error(
|
||||
token,
|
||||
s"File has no $name declaration or import, " +
|
||||
s"cannot export, available functions: ${sumCtx.funcNames.mkString(", ")}"
|
||||
)
|
||||
)
|
||||
.ensure(
|
||||
error(
|
||||
token,
|
||||
s"Can not export '$name' as it is an ability"
|
||||
)
|
||||
)(_ => !sumCtx.isAbility(name))
|
||||
.toValidatedNec <* exportFuncChecks(sumCtx, token, name)
|
||||
}
|
||||
.prepend(validNec(ctx.exports))
|
||||
.combineAll
|
||||
.map(ctx.setExports)
|
||||
).validNec
|
||||
case ee: ExportExpr[S] =>
|
||||
ExportSem(ee).headerSem
|
||||
|
||||
case f: FilenameExpr[S] =>
|
||||
resolve(f).map(fc => HeaderSem(fc, (c, _) => validNec(c)))
|
||||
@ -241,3 +138,15 @@ class HeaderHandler[S[_]: Comonad, C](using
|
||||
module |+| other
|
||||
}
|
||||
}
|
||||
|
||||
object HeaderHandler {
|
||||
|
||||
type Res[S[_], C] = ValidatedNec[SemanticError[S], HeaderSem[S, C]]
|
||||
type ResAC[S[_], C] = ValidatedNec[SemanticError[S], C]
|
||||
|
||||
// Error generator with token pointer
|
||||
def error[S[_], T](
|
||||
token: Token[S],
|
||||
msg: String
|
||||
): SemanticError[S] = HeaderError(token, msg)
|
||||
}
|
||||
|
@ -0,0 +1,68 @@
|
||||
package aqua.semantics.header
|
||||
|
||||
import aqua.parser.head.ModuleExpr
|
||||
import aqua.semantics.header.HeaderHandler.{Res, error}
|
||||
import aqua.semantics.header.Picker.*
|
||||
import aqua.semantics.rules.locations.LocationsAlgebra
|
||||
|
||||
import cats.data.*
|
||||
import cats.data.Validated.*
|
||||
import cats.kernel.Semigroup
|
||||
import cats.syntax.foldable.*
|
||||
import cats.syntax.functor.*
|
||||
import cats.syntax.option.*
|
||||
import cats.syntax.validated.*
|
||||
import cats.{Comonad, Monoid}
|
||||
|
||||
class ModuleSem[S[_]: Comonad, C: Picker](expr: ModuleExpr[S])(using
|
||||
acm: Monoid[C],
|
||||
locations: LocationsAlgebra[S, State[C, *]]
|
||||
) {
|
||||
|
||||
import expr.*
|
||||
|
||||
def headerSem: Res[S, C] = {
|
||||
val shouldDeclare = declareNames.map(_.value).toSet ++ declareCustom.map(_.value)
|
||||
|
||||
lazy val sem = HeaderSem(
|
||||
// Save module header info
|
||||
acm.empty.setModule(
|
||||
name.value,
|
||||
shouldDeclare
|
||||
),
|
||||
(ctx, _) =>
|
||||
// When file is handled, check that all the declarations exists
|
||||
if (declareAll.nonEmpty)
|
||||
ctx.setModule(name.value, declares = ctx.all).validNec
|
||||
else
|
||||
(
|
||||
declareNames.fproductLeft(_.value) ::: declareCustom.fproductLeft(_.value)
|
||||
).map { case (n, t) =>
|
||||
ctx
|
||||
.pick(n, None, ctx.module.nonEmpty)
|
||||
.toValidNec(
|
||||
error(
|
||||
t,
|
||||
s"`$n` is expected to be declared, but declaration is not found in the file"
|
||||
)
|
||||
)
|
||||
.void
|
||||
}.combineAll.as {
|
||||
val tokens = declareNames.map(n => n.value -> n) ++ declareCustom.map(a => a.value -> a)
|
||||
val ctxWithDeclaresLoc = ctx.addOccurences(tokens)
|
||||
// TODO: why module name and declares is lost? where is it lost?
|
||||
ctxWithDeclaresLoc.setModule(name.value, declares = shouldDeclare)
|
||||
}
|
||||
|
||||
|
||||
)
|
||||
|
||||
word.value.fold(
|
||||
module = error(
|
||||
word,
|
||||
"Keyword `module` is deprecated, use `aqua` instead"
|
||||
).invalidNec,
|
||||
aqua = sem.validNec
|
||||
)
|
||||
}
|
||||
}
|
20
semantics/src/main/scala/aqua/semantics/header/package.scala
Normal file
20
semantics/src/main/scala/aqua/semantics/header/package.scala
Normal file
@ -0,0 +1,20 @@
|
||||
package aqua.semantics
|
||||
|
||||
import cats.data.State
|
||||
|
||||
import aqua.semantics.rules.locations.LocationsAlgebra
|
||||
import aqua.parser.lexer.Token
|
||||
|
||||
package object header {
|
||||
|
||||
/*
|
||||
NOTE: This extension glues locations algebra from the body semantics
|
||||
with the context that is used in the header semantics
|
||||
*/
|
||||
extension [S[_], C](context: C)(using
|
||||
locations: LocationsAlgebra[S, State[C, *]]
|
||||
) {
|
||||
def addOccurences(tokens: List[(String, Token[S])]): C =
|
||||
locations.pointLocations(tokens).runS(context).value
|
||||
}
|
||||
}
|
@ -4,12 +4,11 @@ import aqua.parser.lexer.{Name, NamedTypeToken, Token}
|
||||
import aqua.raw.RawContext
|
||||
import aqua.raw.value.ValueRaw
|
||||
import aqua.semantics.Levenshtein
|
||||
import aqua.semantics.rules.locations.LocationsAlgebra
|
||||
import aqua.semantics.rules.locations.{DefinitionInfo, LocationsAlgebra}
|
||||
import aqua.semantics.rules.mangler.ManglerAlgebra
|
||||
import aqua.semantics.rules.report.ReportAlgebra
|
||||
import aqua.semantics.rules.{StackInterpreter, abilities}
|
||||
import aqua.semantics.rules.{abilities, StackInterpreter}
|
||||
import aqua.types.ArrowType
|
||||
|
||||
import cats.data.{NonEmptyMap, State}
|
||||
import cats.syntax.applicative.*
|
||||
import cats.syntax.apply.*
|
||||
|
@ -10,6 +10,8 @@ case class LocationsState[S[_]](
|
||||
variables: List[VariableInfo[S]] = Nil
|
||||
) extends Logging {
|
||||
|
||||
lazy val allLocations: List[TokenLocation[S]] = variables.flatMap(_.allLocations)
|
||||
|
||||
def addDefinitions(newDefinitions: List[DefinitionInfo[S]]): LocationsState[S] =
|
||||
copy(variables = newDefinitions.map(d => VariableInfo(d)) ++ variables)
|
||||
|
||||
@ -44,13 +46,12 @@ case class LocationsState[S[_]](
|
||||
|
||||
object LocationsState {
|
||||
|
||||
implicit def locationsStateMonoid[S[_]]: Monoid[LocationsState[S]] =
|
||||
new Monoid[LocationsState[S]] {
|
||||
override def empty: LocationsState[S] = LocationsState()
|
||||
given [S[_]]: Monoid[LocationsState[S]] with {
|
||||
override def empty: LocationsState[S] = LocationsState()
|
||||
|
||||
override def combine(x: LocationsState[S], y: LocationsState[S]): LocationsState[S] =
|
||||
LocationsState(
|
||||
variables = x.variables ++ y.variables
|
||||
)
|
||||
}
|
||||
override def combine(x: LocationsState[S], y: LocationsState[S]): LocationsState[S] =
|
||||
LocationsState(
|
||||
variables = x.variables ++ y.variables
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -10,8 +10,9 @@ import aqua.raw.ops.RawTag
|
||||
import aqua.raw.value.VarRaw
|
||||
import aqua.semantics.header.{HeaderHandler, HeaderSem}
|
||||
import aqua.types.{AbilityType, ArrowType, NilType, ProductType, ScalarType}
|
||||
import aqua.semantics.rules.locations.{LocationsAlgebra, DummyLocationsInterpreter}
|
||||
|
||||
import cats.data.{Chain, NonEmptyList, NonEmptyMap, Validated}
|
||||
import cats.data.{State, Chain, NonEmptyList, NonEmptyMap, Validated}
|
||||
import cats.free.Cofree
|
||||
import cats.syntax.applicative.*
|
||||
import cats.{Eval, Id, Monoid}
|
||||
@ -23,6 +24,9 @@ class HeaderSpec extends AnyFlatSpec with Matchers with Inside {
|
||||
|
||||
given Monoid[RawContext] = RawContext.implicits(RawContext.blank).rawContextMonoid
|
||||
|
||||
given LocationsAlgebra[Id, State[RawContext, *]] =
|
||||
DummyLocationsInterpreter[Id, RawContext]()
|
||||
|
||||
val handler = new HeaderHandler[Id, RawContext]()
|
||||
|
||||
def exportHeader(funcName: String): Ast.Head[Id] = {
|
||||
|
Loading…
Reference in New Issue
Block a user