refactor: fix: LNG-143 LNG-144 definitions and locations algebras (#705)

This commit is contained in:
Dima 2023-05-03 16:24:19 +03:00 committed by GitHub
parent fe889c9e00
commit 670e779ba0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
58 changed files with 655 additions and 469 deletions

View File

@ -43,7 +43,7 @@ jobs:
run: sbt aqua-api/fullLinkJS
- name: JS LSP build
run: sbt language-server-api/fullLinkJS
run: sbt language-server-apiJS/fullLinkJS
- name: Upload aqua-cli artifact
uses: actions/upload-artifact@v3
@ -61,7 +61,7 @@ jobs:
uses: actions/upload-artifact@v3
with:
name: aqua-lsp
path: language-server/language-server-api/target/scala-*/language-server-api-opt/main.js
path: language-server/language-server-api/.js/target/scala-*/language-server-api-opt/main.js
aqua-cli:
name: "Publish aqua-cli"

View File

@ -58,7 +58,7 @@ jobs:
- name: JS LSP build
env:
SNAPSHOT: ${{ steps.version.outputs.id }}
run: sbt language-server-api/fastOptJS
run: sbt language-server-apiJS/fastOptJS
- name: JS API build
env:
@ -81,7 +81,7 @@ jobs:
uses: actions/upload-artifact@v3
with:
name: aqua-lsp
path: language-server/language-server-api/target/scala-*/language-server-api-fastopt.js
path: language-server/language-server-api/.js/target/scala-*/language-server-api-fastopt.js
aqua-cli:
name: "Publish aqua-cli"

View File

@ -1,10 +1,9 @@
func returnCall() -> string -> string:
closure = (s: string) -> string:
<- s
closure("123asdf")
<- closure
module Import3 declares *
func test() -> string:
a = returnCall()
b = a("arg")
<- b
export foo_bar
use "export.aqua"
func foo_bar() -> string, string:
z <- FooBars.foo()
<- z, FooBars.DECLARE_CONST2

View File

@ -1,5 +1,7 @@
-- exports3.aqua
module Export declares *
module FooBars declares *
const DECLARE_CONST = "declare_const"
const DECLARE_CONST2 = "declare_const2"
func foo() -> string:
<- "I am MyFooBar foo"

View File

@ -61,7 +61,11 @@ lazy val cli = crossProject(JSPlatform, JVMPlatform)
"-H:+DashboardCode",
"-H:+DashboardPointsTo",
"-H:+DashboardAll"
) ++ sys.env.get("COMPILE_STATIC").filter(_.trim.toLowerCase() == "true").map(_ => Seq("--static")).getOrElse(Seq.empty),
) ++ sys.env
.get("COMPILE_STATIC")
.filter(_.trim.toLowerCase() == "true")
.map(_ => Seq("--static"))
.getOrElse(Seq.empty),
libraryDependencies ++= Seq(
"com.monovore" %%% "decline" % declineV,
"com.monovore" %%% "decline-effect" % declineV
@ -73,7 +77,8 @@ lazy val cliJS = cli.js
.settings(
scalaJSLinkerConfig ~= (_.withModuleKind(ModuleKind.ESModule)),
scalaJSUseMainModuleInitializer := true
).dependsOn(`js-exports`, `js-imports`)
)
.dependsOn(`js-exports`, `js-imports`)
lazy val cliJVM = cli.jvm
.settings(
@ -105,21 +110,26 @@ lazy val io = crossProject(JVMPlatform, JSPlatform)
lazy val ioJS = io.js.dependsOn(`js-imports`)
lazy val `language-server-api` = project
lazy val `language-server-api` = crossProject(JSPlatform, JVMPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("language-server/language-server-api"))
.enablePlugins(ScalaJSPlugin)
.settings(commons: _*)
.settings(
scalaJSLinkerConfig ~= (_.withModuleKind(ModuleKind.CommonJSModule)),
scalaJSUseMainModuleInitializer := true
)
.settings(
libraryDependencies ++= Seq(
"org.typelevel" %%% "cats-effect" % catsEffectV,
"co.fs2" %%% "fs2-io" % fs2V
)
)
.dependsOn(compiler.js, io.js)
.dependsOn(compiler, io)
lazy val `language-server-apiJS` = `language-server-api`.js
.settings(
scalaJSLinkerConfig ~= (_.withModuleKind(ModuleKind.CommonJSModule)),
scalaJSUseMainModuleInitializer := true
)
.enablePlugins(ScalaJSPlugin)
.dependsOn(`js-exports`, `js-imports`)
lazy val `js-exports` = project
.in(file("js/js-exports"))
@ -140,7 +150,7 @@ lazy val `aqua-api` = project
.settings(
scalaJSLinkerConfig ~= (_.withModuleKind(ModuleKind.CommonJSModule)),
scalaJSUseMainModuleInitializer := true,
Test / test := {}
Test / test := {}
)
.dependsOn(`js-exports`, `aqua-run`.js, `backend-api`.js)
@ -259,7 +269,8 @@ lazy val definitions = crossProject(JVMPlatform, JSPlatform)
"io.circe" %%% "circe-generic",
"io.circe" %%% "circe-parser"
).map(_ % circeVersion)
).dependsOn(res, types)
)
.dependsOn(res, types)
lazy val logging = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
@ -281,7 +292,8 @@ lazy val constants = crossProject(JVMPlatform, JSPlatform)
libraryDependencies ++= Seq(
"org.typelevel" %%% "cats-core" % catsV
)
).dependsOn(parser, raw)
)
.dependsOn(parser, raw)
lazy val `backend-air` = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)

View File

@ -63,14 +63,16 @@ class AquaCompiler[F[_]: Monad, E, I: Order, S[_]: Comonad, C: Monoid: Picker](
context =>
// Context with prepared imports
context.andThen { ctx =>
val imports = mod.imports.view
.mapValues(ctx(_))
.collect { case (fn, Some(fc)) => fn -> fc }
.toMap
val header = mod.body.head
// To manage imports, exports run HeaderHandler
headerHandler
.sem(
mod.imports.view
.mapValues(ctx(_))
.collect { case (fn, Some(fc)) => fn -> fc }
.toMap,
mod.body.head
imports,
header
)
.andThen { headerSem =>
// Analyze the body, with prepared initial context

View File

@ -7,11 +7,10 @@ import aqua.parser.lexer.{LiteralToken, Token}
import aqua.parser.lift.FileSpan.F
import aqua.parser.lift.{FileSpan, Span}
import aqua.parser.{ArrowReturnError, BlockIndentError, LexerError, ParserError}
import aqua.semantics.lsp.TokenInfo
import aqua.semantics.{HeaderError, RulesViolated, WrongAST}
import aqua.{AquaIO, SpanParser}
import cats.data.Validated.{Invalid, Valid, invalidNec, validNec}
import cats.data.{NonEmptyChain, Validated}
import cats.data.Validated.{invalidNec, validNec, Invalid, Valid}
import cats.effect.IO
import cats.effect.unsafe.implicits.global
import fs2.io.file.{Files, Path}
@ -22,7 +21,7 @@ import scala.concurrent.Future
import scala.scalajs.js
import scala.scalajs.js.JSConverters.*
import scala.scalajs.js.annotation.*
import scala.scalajs.js.{undefined, UndefOr}
import scala.scalajs.js.{UndefOr, undefined}
@JSExportAll
case class CompilationResult(
@ -173,14 +172,12 @@ object AquaLSP extends App with Logging {
logger.debug("Compilation done.")
def locationsToJs(
locations: List[(Token[FileSpan.F], TokenInfo[FileSpan.F])]
locations: List[(Token[FileSpan.F], Token[FileSpan.F])]
): js.Array[TokenLink] = {
locations.flatMap { case (t, tInfo) =>
tInfo.definition match {
case None => Nil
case Some(d) =>
val fromOp = TokenLocation.fromSpan(t.unit._1)
val toOp = TokenLocation.fromSpan(d.unit._1)
locations.flatMap { case (from, to) =>
val fromOp = TokenLocation.fromSpan(from.unit._1)
val toOp = TokenLocation.fromSpan(to.unit._1)
val link = for {
from <- fromOp
@ -190,10 +187,9 @@ object AquaLSP extends App with Logging {
}
if (link.isEmpty)
logger.warn(s"Incorrect coordinates for token '${t.unit._1.name}'")
logger.warn(s"Incorrect coordinates for token '${from.unit._1.name}'")
link.toList
}
}.toJSArray
}

View File

@ -0,0 +1,41 @@
package aqua.lsp
import aqua.compiler.AquaCompilerConf
import aqua.files.{AquaFileSources, AquaFilesIO, FileModuleId}
import aqua.io.AquaFileError
import aqua.lsp.LSPCompiler
import aqua.parser.lift.FileSpan
import aqua.{AquaIO, SpanParser}
import cats.data.Validated
import cats.effect.{IO, IOApp, Sync}
import fs2.io.file.Path
import scribe.Level
object Test extends IOApp.Simple {
implicit val aio: AquaIO[IO] = new AquaFilesIO[IO]
override def run: IO[Unit] = {
val sources = new AquaFileSources[IO](Path("./aqua-src/antithesis.aqua"), List(Path("./aqua")))
val config = AquaCompilerConf()
for {
start <- IO(System.currentTimeMillis())
_ <- LSPCompiler
.compileToLsp[IO, AquaFileError, FileModuleId, FileSpan.F](
sources,
SpanParser.parser,
config
)
.map {
case Validated.Invalid(errs) =>
errs.map(System.err.println): Unit
case Validated.Valid(res) =>
res.map(println): Unit
}
_ <- IO.println("Compilation ends in: " + (System.currentTimeMillis() - start) + " ms")
} yield ()
}
}

View File

@ -1,37 +1,84 @@
package aqua.lsp
import aqua.parser.lexer.Token
import aqua.semantics.lsp.{TokenInfo, TokenType}
import aqua.semantics.rules.{ReportError, StackInterpreter}
import aqua.semantics.rules.locations.{LocationsAlgebra, LocationsState}
import cats.data.State
import monocle.Lens
import monocle.macros.GenLens
import scribe.Logging
class LocationsInterpreter[S[_], X](implicit
lens: Lens[X, LocationsState[S]]
) extends LocationsAlgebra[S, State[X, *]] {
lens: Lens[X, LocationsState[S]],
error: ReportError[S, X]
) extends LocationsAlgebra[S, State[X, *]] with Logging {
type SX[A] = State[X, A]
private def getState = State.get.map(lens.get)
val stack = new StackInterpreter[S, X, LocationsState[S], LocationsState[S]](
GenLens[LocationsState[S]](_.stack)
)
import stack.{getState, mapStackHead, modify, report}
override def addToken(name: String, token: Token[S]): State[X, Unit] = modify { st =>
st.copy(tokens = st.tokens.updated(name, token))
}
private def combineFieldName(name: String, field: String): String = name + "." + field
override def addTokenWithFields(
name: String,
token: Token[S],
fields: List[(String, Token[S])]
): State[X, Unit] = modify { st =>
st.copy(tokens =
st.tokens ++ ((name, token) +: fields.map(kv => (combineFieldName(name, kv._1), kv._2))).toMap
)
}
def pointFieldLocation(typeName: String, fieldName: String, token: Token[S]): State[X, Unit] =
pointLocation(combineFieldName(typeName, fieldName), token)
def pointTokenWithFieldLocation(
typeName: String,
typeToken: Token[S],
fieldName: String,
token: Token[S]
): State[X, Unit] = {
for {
_ <- pointLocation(typeName, typeToken)
_ <- pointLocation(combineFieldName(typeName, fieldName), token)
} yield {}
}
override def pointLocation(name: String, token: Token[S]): State[X, Unit] = {
modify { st =>
val newLoc: Option[Token[S]] = st.stack.collectFirst {
case frame if frame.tokens.contains(name) => frame.tokens(name)
} orElse st.tokens.get(name)
st.copy(locations = st.locations ++ newLoc.map(token -> _).toList)
}
}
def pointLocations(locations: List[(String, Token[S])]): State[X, Unit] = {
modify { st =>
val newLocs = locations.flatMap { case (name, token) =>
(st.stack.collectFirst {
case frame if frame.tokens.contains(name) => frame.tokens(name)
} orElse st.tokens.get(name)).map(token -> _)
}
st.copy(locations = st.locations ++ newLocs)
}
}
private def modify(f: LocationsState[S] => LocationsState[S]): SX[Unit] =
State.modify(lens.modify(f))
override def addNameLocations(locs: List[(Token[S], TokenType[S])]): State[X, Unit] =
modify(st => st.copy(nameLocations = st.nameLocations ++ locs))
override def beginScope(): SX[Unit] =
stack.beginScope(LocationsState[S]())
override def addNameLocation(token: Token[S], tokenType: TokenType[S]): State[X, Unit] =
modify(st => st.copy(nameLocations = st.nameLocations :+ (token, tokenType)))
override def addTypeLocations(locs: List[(Token[S], TokenInfo[S])]): State[X, Unit] =
modify(st => st.copy(typeLocations = st.nameLocations ++ locs))
override def addTypeLocation(token: Token[S], tokenInfo: TokenInfo[S]): State[X, Unit] =
modify(st => st.copy(typeLocations = st.typeLocations :+ (token, tokenInfo)))
override def addServiceLocations(locs: List[(Token[S], TokenInfo[S])]): State[X, Unit] =
modify(st => st.copy(serviceLocations = st.nameLocations ++ locs))
override def addServiceLocation(token: Token[S], tokenInfo: TokenInfo[S]): State[X, Unit] =
modify(st => st.copy(serviceLocations = st.serviceLocations :+ (token, tokenInfo)))
override def endScope(): SX[Unit] = stack.endScope
}

View File

@ -1,10 +1,9 @@
package aqua.lsp
import aqua.parser.lexer.{Ability, LiteralToken, Name, Token}
import aqua.parser.lexer.{Ability, LiteralToken, Name, NamedTypeToken, Token}
import aqua.raw.{RawContext, RawPart}
import aqua.types.ArrowType
import aqua.types.{ArrowType, Type}
import cats.{Monoid, Semigroup}
import aqua.semantics.lsp.{TokenArrowInfo, TokenType, TokenInfo}
import cats.syntax.monoid.*
import RawContext.semiRC
import aqua.semantics.header.{Picker, PickerOps}
@ -12,11 +11,11 @@ import aqua.semantics.header.{Picker, PickerOps}
// Context with info that necessary for language server
case class LspContext[S[_]](
raw: RawContext,
abDefinitions: Map[String, (Ability[S], List[(Name[S], ArrowType)])] =
Map.empty[String, (Ability[S], List[(Name[S], ArrowType)])],
rootArrows: Map[String, TokenArrowInfo[S]] = Map.empty[String, TokenArrowInfo[S]],
constants: Map[String, TokenType[S]] = Map.empty[String, TokenType[S]],
locations: List[(Token[S], TokenInfo[S])] = Nil,
abDefinitions: Map[String, NamedTypeToken[S]] = Map.empty[String, NamedTypeToken[S]],
rootArrows: Map[String, ArrowType] = Map.empty[String, ArrowType],
constants: Map[String, Type] = Map.empty[String, Type],
tokens: Map[String, Token[S]] = Map.empty[String, Token[S]],
locations: List[(Token[S], Token[S])] = Nil,
importTokens: List[LiteralToken[S]] = Nil
)
@ -31,7 +30,8 @@ object LspContext {
abDefinitions = x.abDefinitions ++ y.abDefinitions,
rootArrows = x.rootArrows ++ y.rootArrows,
constants = x.constants ++ y.constants,
locations = x.locations ++ y.locations
locations = x.locations ++ y.locations,
tokens = x.tokens ++ y.tokens
)
trait Implicits[S[_]] {
@ -70,7 +70,11 @@ object LspContext {
override def declares(ctx: LspContext[S]): Set[String] = ops(ctx).declares
override def setAbility(ctx: LspContext[S], name: String, ctxAb: LspContext[S]): LspContext[S] =
ctx.copy(raw = ops(ctx).setAbility(name, ctxAb.raw))
val prefix = name + "."
ctx.copy(
raw = ops(ctx).setAbility(name, ctxAb.raw),
tokens = ctx.tokens ++ ctxAb.tokens.map(kv => (prefix + kv._1) -> kv._2)
)
override def setModule(
ctx: LspContext[S],
@ -91,6 +95,16 @@ object LspContext {
rename: Option[String],
declared: Boolean
): Option[LspContext[S]] =
// rename tokens from one context with prefix addition
val newTokens = rename.map { renameStr =>
ctx.tokens.map {
case (tokenName, token) if tokenName.startsWith(name) =>
tokenName.replaceFirst(name, renameStr) -> token
case kv => kv
}
}.getOrElse(ctx.tokens)
ops(ctx)
.pick(name, rename, declared)
.map(rc =>
@ -101,16 +115,15 @@ object LspContext {
rootArrows =
ctx.rootArrows.get(name).fold(Map.empty)(t => Map(rename.getOrElse(name) -> t)),
constants =
ctx.constants.get(name).fold(Map.empty)(t => Map(rename.getOrElse(name) -> t))
ctx.constants.get(name).fold(Map.empty)(t => Map(rename.getOrElse(name) -> t)),
tokens = newTokens
)
)
override def pickHeader(ctx: LspContext[S]): LspContext[S] =
ctx.copy(raw = ops(ctx).pickHeader)
override def pickHeader(ctx: LspContext[S]): LspContext[S] = ctx.copy(raw = ops(ctx).pickHeader)
override def pickDeclared(
ctx: LspContext[S]
)(implicit semi: Semigroup[LspContext[S]]): LspContext[S] =
ctx.copy(raw = ops(ctx).pickDeclared)
)(implicit semi: Semigroup[LspContext[S]]): LspContext[S] = ctx.copy(raw = ops(ctx).pickDeclared)
}
}

View File

@ -2,9 +2,10 @@ package aqua.lsp
import aqua.parser.Ast
import aqua.parser.head.{ImportExpr, ImportFromExpr}
import aqua.parser.lexer.LiteralToken
import aqua.parser.lexer.{LiteralToken, Token}
import aqua.semantics.rules.ReportError
import aqua.semantics.rules.locations.LocationsState
import aqua.semantics.{CompilerState, SemanticError, Semantics}
import aqua.semantics.{CompilerState, RulesViolated, SemanticError, Semantics}
import cats.data.Validated.{Invalid, Valid}
import cats.syntax.applicative.*
import cats.syntax.apply.*
@ -21,9 +22,7 @@ class LspSemantics[S[_]] extends Semantics[S, LspContext[S]] {
def getImportTokens(ast: Ast[S]): List[LiteralToken[S]] = {
ast.head.foldLeft[List[LiteralToken[S]]](Nil) { case (l, header) =>
header match {
case ImportExpr(fn) =>
println("import: " + fn)
l :+ fn
case ImportExpr(fn) => l :+ fn
case ImportFromExpr(_, fn) => l :+ fn
case _ => l
}
@ -36,6 +35,7 @@ class LspSemantics[S[_]] extends Semantics[S, LspContext[S]] {
): ValidatedNec[SemanticError[S], LspContext[S]] = {
val rawState = CompilerState.init[S](init.raw)
val initState = rawState.copy(
names = rawState.names.copy(
rootArrows = rawState.names.rootArrows ++ init.rootArrows,
@ -43,6 +43,9 @@ class LspSemantics[S[_]] extends Semantics[S, LspContext[S]] {
),
abilities = rawState.abilities.copy(
definitions = rawState.abilities.definitions ++ init.abDefinitions
),
locations = rawState.locations.copy(
tokens = rawState.locations.tokens ++ init.tokens
)
)
@ -51,6 +54,11 @@ class LspSemantics[S[_]] extends Semantics[S, LspContext[S]] {
implicit val ls: Lens[CompilerState[S], LocationsState[S]] =
GenLens[CompilerState[S]](_.locations)
import monocle.syntax.all.*
implicit val re: ReportError[S, CompilerState[S]] =
(st: CompilerState[S], token: Token[S], hints: List[String]) =>
st.focus(_.errors).modify(_.append(RulesViolated(token, hints)))
implicit val locationsInterpreter: LocationsInterpreter[S, CompilerState[S]] =
new LocationsInterpreter[S, CompilerState[S]]()
@ -67,7 +75,8 @@ class LspSemantics[S[_]] extends Semantics[S, LspContext[S]] {
constants = state.names.constants,
abDefinitions = state.abilities.definitions,
locations = state.locations.allLocations,
importTokens = importTokens
importTokens = importTokens,
tokens = state.locations.tokens
)
)
}(Invalid(_))

View File

@ -8,7 +8,7 @@
"aqua-lsp-api.d.ts"
],
"scripts": {
"move:scalajs": "cp ../language-server-api/target/scala-3.1.3/language-server-api-opt/main.js ./aqua-lsp-api.js"
"move:scalajs": "cp ../language-server-api/.js/target/scala-3.2.2/language-server-api-opt/main.js ./aqua-lsp-api.js"
},
"repository": {
"type": "git",

View File

@ -2,7 +2,7 @@ package aqua.parser.expr
import aqua.parser.Expr
import aqua.parser.lexer.Token._
import aqua.parser.lexer.{CustomTypeToken, TypeToken}
import aqua.parser.lexer.{NamedTypeToken, TypeToken}
import aqua.parser.lift.LiftParser
import cats.Comonad
import cats.parse.Parser
@ -10,7 +10,7 @@ import cats.~>
import aqua.parser.lift.Span
import aqua.parser.lift.Span.{P0ToSpan}
case class AliasExpr[F[_]](name: CustomTypeToken[F], target: TypeToken[F])
case class AliasExpr[F[_]](name: NamedTypeToken[F], target: TypeToken[F])
extends Expr[F](AliasExpr, name) {
def mapK[K[_]: Comonad](fk: F ~> K): AliasExpr[K] = copy(name.mapK(fk), target.mapK(fk))
}
@ -18,7 +18,7 @@ case class AliasExpr[F[_]](name: CustomTypeToken[F], target: TypeToken[F])
object AliasExpr extends Expr.Leaf {
override val p: Parser[AliasExpr[Span.S]] =
((`alias` *> ` ` *> CustomTypeToken.ct <* ` : `) ~ TypeToken.`typedef`).map {
((`alias` *> ` ` *> NamedTypeToken.ct <* ` : `) ~ TypeToken.`typedef`).map {
case (name, target) =>
AliasExpr(name, target)
}

View File

@ -1,7 +1,7 @@
package aqua.parser.expr
import aqua.parser.Expr
import aqua.parser.lexer.CustomTypeToken
import aqua.parser.lexer.NamedTypeToken
import aqua.parser.lexer.Token.*
import aqua.parser.lift.LiftParser
import cats.Comonad
@ -10,7 +10,7 @@ import cats.~>
import aqua.parser.lift.Span
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
case class DataStructExpr[F[_]](name: CustomTypeToken[F]) extends Expr[F](DataStructExpr, name) {
case class DataStructExpr[F[_]](name: NamedTypeToken[F]) extends Expr[F](DataStructExpr, name) {
override def mapK[K[_]: Comonad](fk: F ~> K): DataStructExpr[K] = copy(name.mapK(fk))
}
@ -19,5 +19,5 @@ object DataStructExpr extends Expr.AndIndented {
override def validChildren: List[Expr.Lexem] = FieldTypeExpr :: Nil
override val p: Parser[DataStructExpr[Span.S]] =
`data` *> ` ` *> CustomTypeToken.ct.map(DataStructExpr(_))
`data` *> ` ` *> NamedTypeToken.ct.map(DataStructExpr(_))
}

View File

@ -2,7 +2,7 @@ package aqua.parser.expr
import aqua.parser.Expr
import aqua.parser.lexer.Token.*
import aqua.parser.lexer.{Ability, ValueToken}
import aqua.parser.lexer.{Ability, NamedTypeToken, ValueToken}
import aqua.parser.lift.LiftParser
import cats.Comonad
import cats.parse.Parser
@ -10,7 +10,7 @@ import cats.~>
import aqua.parser.lift.Span
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
case class ServiceExpr[F[_]](name: Ability[F], id: Option[ValueToken[F]])
case class ServiceExpr[F[_]](name: NamedTypeToken[F], id: Option[ValueToken[F]])
extends Expr[F](ServiceExpr, name) {
override def mapK[K[_]: Comonad](fk: F ~> K): ServiceExpr[K] =
@ -22,7 +22,7 @@ object ServiceExpr extends Expr.AndIndented {
override def validChildren: List[Expr.Lexem] = ArrowTypeExpr :: Nil
override val p: Parser[ServiceExpr[Span.S]] =
(`service` *> ` ` *> Ability.ab ~ ValueToken.`value`.between(`(`, `)`).backtrack.?).map {
(`service` *> ` ` *> NamedTypeToken.ct ~ ValueToken.`value`.between(`(`, `)`).backtrack.?).map {
case (name, id) =>
ServiceExpr(name, id)
}

View File

@ -3,14 +3,14 @@ package aqua.parser.expr.func
import aqua.parser.Expr
import aqua.parser.expr.func.AbilityIdExpr
import aqua.parser.lexer.Token.*
import aqua.parser.lexer.{Ability, ValueToken}
import aqua.parser.lexer.{Ability, NamedTypeToken, ValueToken}
import aqua.parser.lift.LiftParser
import cats.parse.Parser as P
import cats.{Comonad, ~>}
import aqua.parser.lift.Span
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
case class AbilityIdExpr[F[_]](ability: Ability[F], id: ValueToken[F])
case class AbilityIdExpr[F[_]](ability: NamedTypeToken[F], id: ValueToken[F])
extends Expr[F](AbilityIdExpr, ability) {
def mapK[K[_]: Comonad](fk: F ~> K): AbilityIdExpr[K] =
@ -21,7 +21,7 @@ case class AbilityIdExpr[F[_]](ability: Ability[F], id: ValueToken[F])
object AbilityIdExpr extends Expr.Leaf {
override val p: P[AbilityIdExpr[Span.S]] =
((Ability.dotted <* ` `) ~ ValueToken.`value`).map { case (ability, id) =>
((NamedTypeToken.dotted <* ` `) ~ ValueToken.`value`).map { case (ability, id) =>
AbilityIdExpr(ability, id)
}

View File

@ -11,23 +11,24 @@ import aqua.parser.lift.Span
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
case class UseFromExpr[F[_]](
imports: NonEmptyList[FromExpr.NameOrAbAs[F]],
filename: LiteralToken[F],
asModule: Ability[F]
imports: NonEmptyList[FromExpr.NameOrAbAs[F]],
filename: LiteralToken[F],
asModule: Ability[F]
) extends FilenameExpr[F] with FromExpr[F] {
override def mapK[K[_]: Comonad](fk: F ~> K): UseFromExpr[K] =
copy(FromExpr.mapK(imports)(fk), filename.mapK(fk), asModule.mapK(fk))
override def toString(): String =
override def toString: String =
s"use ${FromExpr.show(imports)} from ${filename.value} as ${asModule.value}"
}
object UseFromExpr extends HeaderExpr.Leaf {
override val p: Parser[UseFromExpr[Span.S]] =
(`use` *> FromExpr.importFrom.surroundedBy(` `) ~ ValueToken.string ~ (` as ` *> Ability.ab)).map {
case ((imports, filename), asModule) =>
UseFromExpr(imports, filename, asModule)
(`use` *> FromExpr.importFrom.surroundedBy(
` `
) ~ ValueToken.string ~ (` as ` *> Ability.ab)).map { case ((imports, filename), asModule) =>
UseFromExpr(imports, filename, asModule)
}
}

View File

@ -63,23 +63,23 @@ object OptionTypeToken {
}
case class CustomTypeToken[F[_]: Comonad](name: F[String]) extends DataTypeToken[F] {
case class NamedTypeToken[F[_]: Comonad](name: F[String]) extends DataTypeToken[F] {
override def as[T](v: T): F[T] = name.as(v)
override def mapK[K[_]: Comonad](fk: F ~> K): CustomTypeToken[K] = copy(fk(name))
override def mapK[K[_]: Comonad](fk: F ~> K): NamedTypeToken[K] = copy(fk(name))
def value: String = name.extract
override def toString: String = name.extract
}
object CustomTypeToken {
object NamedTypeToken {
val ct: P[CustomTypeToken[Span.S]] =
`Class`.lift.map(CustomTypeToken(_))
val ct: P[NamedTypeToken[Span.S]] =
`Class`.lift.map(NamedTypeToken(_))
def dotted: P[CustomTypeToken[Span.S]] =
`Class`.repSep(`.`).string.lift.map(CustomTypeToken(_))
def dotted: P[NamedTypeToken[Span.S]] =
`Class`.repSep(`.`).string.lift.map(NamedTypeToken(_))
}
case class BasicTypeToken[F[_]: Comonad](scalarType: F[ScalarType]) extends DataTypeToken[F] {
@ -153,7 +153,7 @@ object DataTypeToken {
P.oneOf(
P.defer(`topbottomdef`) :: P.defer(`arraytypedef`) :: P.defer(
OptionTypeToken.`optiontypedef`
) :: BasicTypeToken.`basictypedef` :: CustomTypeToken.dotted :: Nil
) :: BasicTypeToken.`basictypedef` :: NamedTypeToken.dotted :: Nil
)
def `datatypedef`: P[DataTypeToken[Span.S]] =
@ -162,7 +162,7 @@ object DataTypeToken {
StreamTypeToken.`streamtypedef`
) :: P.defer(
OptionTypeToken.`optiontypedef`
) :: BasicTypeToken.`basictypedef` :: CustomTypeToken.dotted :: Nil
) :: BasicTypeToken.`basictypedef` :: NamedTypeToken.dotted :: Nil
)
}

View File

@ -72,7 +72,7 @@ object CollectionToken {
}
case class CallArrowToken[F[_]: Comonad](
ability: Option[Ability[F]],
ability: Option[NamedTypeToken[F]],
funcName: Name[F],
args: List[ValueToken[F]]
) extends ValueToken[F] {
@ -86,7 +86,7 @@ case class CallArrowToken[F[_]: Comonad](
object CallArrowToken {
val callArrow: P[CallArrowToken[Span.S]] =
((Ability.dotted <* `.`).?.with1 ~
((NamedTypeToken.dotted <* `.`).?.with1 ~
(Name.p
~ comma0(ValueToken.`value`.surroundedBy(`/s*`))
.between(` `.?.with1 *> `(` <* `/s*`, `/s*` *> `)`))
@ -98,7 +98,7 @@ object CallArrowToken {
}
case class StructValueToken[F[_]: Comonad](
typeName: CustomTypeToken[F],
typeName: NamedTypeToken[F],
fields: NonEmptyMap[String, ValueToken[F]]
) extends ValueToken[F] {
@ -116,7 +116,7 @@ object StructValueToken {
"Missing braces '()' after the struct type"
)
.map { case (dn, args) =>
StructValueToken(CustomTypeToken(dn), NonEmptyMap.of(args.head, args.tail: _*))
StructValueToken(NamedTypeToken(dn), NonEmptyMap.of(args.head, args.tail: _*))
}
}

View File

@ -52,7 +52,7 @@ object AquaSpec {
implicit def toBool(n: Boolean): LiteralToken[Id] = LiteralToken[Id](n.toString, bool)
implicit def toStr(n: String): LiteralToken[Id] = LiteralToken[Id]("\"" + n + "\"", string)
implicit def toCustomType(str: String): CustomTypeToken[Id] = CustomTypeToken[Id](str)
implicit def toNamedType(str: String): NamedTypeToken[Id] = NamedTypeToken[Id](str)
def toArrayType(str: String): ArrayTypeToken[Id] = ArrayTypeToken[Id]((), str)
implicit def toArrowType(
@ -67,8 +67,8 @@ object AquaSpec {
): ArrowTypeToken[Id] =
ArrowTypeToken[Id]((), args.map(ab => Some(Name[Id](ab._1)) -> ab._2), res)
implicit def toCustomArg(str: String, customType: String): Arg[Id] =
Arg[Id](str, toCustomType(customType))
implicit def toNamedArg(str: String, customType: String): Arg[Id] =
Arg[Id](str, toNamedType(customType))
implicit def toArg(str: String, typeToken: TypeToken[Id]): Arg[Id] = Arg[Id](str, typeToken)

View File

@ -13,19 +13,19 @@ class AbilityIdExprSpec extends AnyFlatSpec with Matchers with AquaSpec {
"abilities" should "be parsed" in {
parseAbId("Ab a") should be(
AbilityIdExpr[Id](toAb("Ab"), toVar("a"))
AbilityIdExpr[Id](toNamedType("Ab"), toVar("a"))
)
parseAbId("Ab \"a\"") should be(
AbilityIdExpr[Id](toAb("Ab"), LiteralToken[Id]("\"a\"", LiteralType.string))
AbilityIdExpr[Id](toNamedType("Ab"), LiteralToken[Id]("\"a\"", LiteralType.string))
)
parseAbId("Ab 1") should be(
AbilityIdExpr[Id](toAb("Ab"), LiteralToken[Id]("1", LiteralType.number))
AbilityIdExpr[Id](toNamedType("Ab"), LiteralToken[Id]("1", LiteralType.number))
)
parseAbId("Ab a.id") should be(
AbilityIdExpr[Id](toAb("Ab"), toVarLambda("a", List("id")))
AbilityIdExpr[Id](toNamedType("Ab"), toVarLambda("a", List("id")))
)
}

View File

@ -24,7 +24,7 @@ class ArrowTypeExprSpec extends AnyFlatSpec with Matchers with AquaSpec {
parseArrow("onIn(a: Custom, b: Custom2)") should be(
ArrowTypeExpr[Id](
"onIn",
toNamedArrow(List("a" -> toCustomType("Custom"), "b" -> toCustomType("Custom2")), Nil)
toNamedArrow(List("a" -> toNamedType("Custom"), "b" -> toNamedType("Custom2")), Nil)
)
)

View File

@ -17,7 +17,7 @@ class CallArrowSpec extends AnyFlatSpec with Matchers with AquaSpec {
parseExpr("Ab.func(arg)") should be(
CallArrowExpr[Id](
Nil,
CallArrowToken(Some(toAb("Ab")), Name[Id]("func"), List(VarToken[Id](toName("arg"))))
CallArrowToken(Some(toNamedType("Ab")), Name[Id]("func"), List(VarToken[Id](toName("arg"))))
)
)
@ -54,7 +54,7 @@ class CallArrowSpec extends AnyFlatSpec with Matchers with AquaSpec {
CallArrowExpr[Id](
Nil,
CallArrowToken(
Some(toAb("Ab")),
Some(toNamedType("Ab")),
Name[Id]("func"),
List(
toVarLambda("arg", List("doSomething", "and", "doSomethingElse")),

View File

@ -3,7 +3,7 @@ package aqua.parser
import aqua.AquaSpec
import aqua.parser.expr.{FuncExpr, RootExpr}
import aqua.parser.expr.func.{ArrowExpr, CallArrowExpr, ClosureExpr, ReturnExpr}
import aqua.parser.lexer.{Ability, CallArrowToken, Token, VarToken}
import aqua.parser.lexer.{Ability, CallArrowToken, NamedTypeToken, Token, VarToken}
import aqua.types.ScalarType.string
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
@ -53,11 +53,11 @@ class ClosureExprSpec extends AnyFlatSpec with Matchers with AquaSpec {
qTree.d() shouldBe ArrowExpr(toNamedArrow(("s", scToBt(string)) :: Nil, scToBt(string) :: Nil))
qTree.d() shouldBe CallArrowExpr(
Nil,
CallArrowToken(Some(Ability[Id]("LocalSrv")), toName("inside"), Nil)
CallArrowToken(Some(NamedTypeToken[Id]("LocalSrv")), toName("inside"), Nil)
)
qTree.d() shouldBe CallArrowExpr(
toName("p2Id") :: Nil,
CallArrowToken(Some(Ability[Id]("Peer")), toName("identify"), Nil)
CallArrowToken(Some(NamedTypeToken[Id]("Peer")), toName("identify"), Nil)
)
qTree.d() shouldBe ReturnExpr(NonEmptyList(VarToken[Id](toName("p2Id")), Nil))
qTree.d() shouldBe CallArrowExpr(

View File

@ -16,7 +16,7 @@ class FieldTypeExprSpec extends AnyFlatSpec with Matchers with AquaSpec {
)
parseFieldType("some: Custom") should be(
FieldTypeExpr[Id]("some", toCustomType("Custom"))
FieldTypeExpr[Id]("some", toNamedType("Custom"))
)
parseFieldType("some: []Custom") should be(

View File

@ -41,7 +41,7 @@ class FuncExprSpec extends AnyFlatSpec with Matchers with AquaSpec {
ArrowTypeToken[Id]((), List(None -> BasicTypeToken[Id](u8)), List(BasicTypeToken[Id](bool)))
arrowExpr("(peer: PeerId, other: u8 -> bool)") should be(
ArrowExpr[Id](
toNamedArrow(("peer" -> toCustomType("PeerId")) :: ("other" -> arrowToken) :: Nil, Nil)
toNamedArrow(("peer" -> toNamedType("PeerId")) :: ("other" -> arrowToken) :: Nil, Nil)
)
)
@ -53,7 +53,7 @@ class FuncExprSpec extends AnyFlatSpec with Matchers with AquaSpec {
)
arrowExpr("(peer: PeerId, other: u32, u64 -> bool)") should be(
ArrowExpr[Id](
toNamedArrow(("peer" -> toCustomType("PeerId")) :: ("other" -> arrowToken2) :: Nil, Nil)
toNamedArrow(("peer" -> toNamedType("PeerId")) :: ("other" -> arrowToken2) :: Nil, Nil)
)
)
@ -61,7 +61,7 @@ class FuncExprSpec extends AnyFlatSpec with Matchers with AquaSpec {
arrowExpr("(peer: PeerId, ret: u32 -> ()) -> string, u32") should be(
ArrowExpr[Id](
toNamedArrow(
("peer" -> toCustomType("PeerId")) :: ("ret" -> arrowToken3) :: Nil,
("peer" -> toNamedType("PeerId")) :: ("ret" -> arrowToken3) :: Nil,
BasicTypeToken[Id](string) :: BasicTypeToken[Id](u32) :: Nil
)
)
@ -107,9 +107,9 @@ class FuncExprSpec extends AnyFlatSpec with Matchers with AquaSpec {
).toList
ifBody.head.head.mapK(spanToId) should be(
CallArrowExpr(List(toName("x")), CallArrowToken(Some(toAb("Ab")), "func", Nil))
CallArrowExpr(List(toName("x")), CallArrowToken(Some(toNamedType("Ab")), "func", Nil))
)
ifBody(1).head.mapK(spanToId) should be(AbilityIdExpr(toAb("Peer"), toStr("some id")))
ifBody(1).head.mapK(spanToId) should be(AbilityIdExpr(toNamedType("Peer"), toStr("some id")))
ifBody(2).head.mapK(spanToId) should be(
CallArrowExpr(Nil, CallArrowToken(None, "call", List(toBool(true))))
)
@ -172,14 +172,14 @@ class FuncExprSpec extends AnyFlatSpec with Matchers with AquaSpec {
qTree.d() shouldBe RootExpr(Token.lift[Id, Unit](()))
// Local service
qTree.d() shouldBe ServiceExpr(toAb("Local"), Some(toStr("local")))
qTree.d() shouldBe ServiceExpr(toNamedType("Local"), Some(toStr("local")))
qTree.d() shouldBe ArrowTypeExpr("gt", toArrowType(Nil, Some(scToBt(bool))))
qTree.d() shouldBe FuncExpr(
"tryGen"
)
qTree.d() shouldBe ArrowExpr(toArrowType(Nil, Some(scToBt(bool))))
qTree.d() shouldBe OnExpr(toStr("deeper"), List(toStr("deep")))
qTree.d() shouldBe CallArrowExpr(List("v"), CallArrowToken(Some(toAb("Local")), "gt", Nil))
qTree.d() shouldBe CallArrowExpr(List("v"), CallArrowToken(Some(toNamedType("Local")), "gt", Nil))
qTree.d() shouldBe ReturnExpr(NonEmptyList.one(toVar("v")))
// genC function
qTree.d() shouldBe FuncExpr(
@ -188,12 +188,12 @@ class FuncExprSpec extends AnyFlatSpec with Matchers with AquaSpec {
// List("two": VarLambda[Id])
)
qTree.d() shouldBe ArrowExpr(toNamedArrow(("val" -> string) :: Nil, boolSc :: Nil))
qTree.d() shouldBe CallArrowExpr(List("one"), CallArrowToken(Some(toAb("Local")), "gt", List()))
qTree.d() shouldBe CallArrowExpr(List("one"), CallArrowToken(Some(toNamedType("Local")), "gt", List()))
qTree.d() shouldBe OnExpr(toStr("smth"), List(toStr("else")))
qTree.d() shouldBe CallArrowExpr(List("two"), CallArrowToken(None, "tryGen", List()))
qTree.d() shouldBe CallArrowExpr(
List("three"),
CallArrowToken(Some(toAb("Local")), "gt", List())
CallArrowToken(Some(toNamedType("Local")), "gt", List())
)
qTree.d() shouldBe ReturnExpr(NonEmptyList.one(toVar("two")))
}

View File

@ -40,7 +40,7 @@ class IfExprSpec extends AnyFlatSpec with Matchers with AquaSpec {
parseIf("if Op.identity(\"str\") == \"a\"") should be(
IfExpr[Id](
CallArrowToken[Id](Some(toAb("Op")), toName("identity"), toStr("str") :: Nil),
CallArrowToken[Id](Some(toNamedType("Op")), toName("identity"), toStr("str") :: Nil),
EqOp[Id](true),
toStr("a")
)
@ -48,9 +48,9 @@ class IfExprSpec extends AnyFlatSpec with Matchers with AquaSpec {
parseIf("if Op.identity(\"str\") != Op.identity(\"str\")") should be(
IfExpr[Id](
CallArrowToken[Id](Some(toAb("Op")), toName("identity"), toStr("str") :: Nil),
CallArrowToken[Id](Some(toNamedType("Op")), toName("identity"), toStr("str") :: Nil),
EqOp[Id](false),
CallArrowToken[Id](Some(toAb("Op")), toName("identity"), toStr("str") :: Nil)
CallArrowToken[Id](Some(toNamedType("Op")), toName("identity"), toStr("str") :: Nil)
)
)
@ -59,7 +59,7 @@ class IfExprSpec extends AnyFlatSpec with Matchers with AquaSpec {
InfixToken[Id](toNumber(2), toNumber(3), Sub),
EqOp[Id](false),
InfixToken[Id](
CallArrowToken[Id](Some(toAb("Op")), toName("identity"), toNumber(4) :: Nil),
CallArrowToken[Id](Some(toNamedType("Op")), toName("identity"), toNumber(4) :: Nil),
toNumber(5),
Add
)

View File

@ -12,15 +12,15 @@ class ServiceExprSpec extends AnyFlatSpec with Matchers with AquaSpec {
"on" should "be parsed" in {
parseService("service Local(\"local\")") should be(
ServiceExpr[Id](toAb("Local"), Some(toStr("local")))
ServiceExpr[Id](toNamedType("Local"), Some(toStr("local")))
)
parseService("service Local(1)") should be(
ServiceExpr[Id](toAb("Local"), Some(toNumber(1)))
ServiceExpr[Id](toNamedType("Local"), Some(toNumber(1)))
)
parseService("service LocalBr") should be(
ServiceExpr[Id](toAb("LocalBr"), None)
ServiceExpr[Id](toNamedType("LocalBr"), None)
)
}
}

View File

@ -6,7 +6,7 @@ import aqua.parser.expr.ConstantExpr
import aqua.parser.expr.func.AssignmentExpr
import aqua.parser.lexer.Token
import aqua.parser.lexer.CollectionToken.Mode.ArrayMode
import aqua.parser.lexer.{Ability, CallArrowToken, CollectionToken, CustomTypeToken, LiteralToken, Name, StructValueToken, ValueToken, VarToken}
import aqua.parser.lexer.{Ability, CallArrowToken, CollectionToken, NamedTypeToken, LiteralToken, Name, StructValueToken, ValueToken, VarToken}
import aqua.types.LiteralType
import cats.Id
import org.scalatest.flatspec.AnyFlatSpec
@ -28,14 +28,14 @@ class StructValueExprSpec extends AnyFlatSpec with Matchers with AquaSpec {
str
) should be(
StructValueToken(
CustomTypeToken[Id]("Obj"),
NamedTypeToken[Id]("Obj"),
NonEmptyMap.of(
"f1" -> one,
"f2" -> a,
"f3" -> CollectionToken[Id](ArrayMode, List(one, two, three)),
"f4" -> CollectionToken[Id](ArrayMode, List(b, c)),
"f5" -> StructValueToken(
CustomTypeToken[Id]("NestedObj"),
NamedTypeToken[Id]("NestedObj"),
NonEmptyMap.of(
"i1" -> two,
"i2" -> b,
@ -44,7 +44,7 @@ class StructValueExprSpec extends AnyFlatSpec with Matchers with AquaSpec {
)
),
"f6" -> CallArrowToken(None, Name[Id]("funcCall"), List(one)),
"f7" -> CallArrowToken(Option(Ability[Id]("Serv")), Name[Id]("call"), List(two))
"f7" -> CallArrowToken(Option(NamedTypeToken[Id]("Serv")), Name[Id]("call"), List(two))
)
)
)

View File

@ -31,39 +31,39 @@ class TypeTokenSpec extends AnyFlatSpec with Matchers with EitherValues {
ArrowTypeToken.returnDef().parseAll(str).value.map(_.mapK(spanToId))
typedef("(A -> ())") should be(
ArrowTypeToken[Id]((), List((None, CustomTypeToken[Id]("A"))), Nil)
ArrowTypeToken[Id]((), List((None, NamedTypeToken[Id]("A"))), Nil)
)
typedef("(A -> B)") should be(
ArrowTypeToken[Id]((), List((None, CustomTypeToken[Id]("A"))), List(CustomTypeToken[Id]("B")))
ArrowTypeToken[Id]((), List((None, NamedTypeToken[Id]("A"))), List(NamedTypeToken[Id]("B")))
)
returndef("(A -> B), (C -> D)") should be(
List(
ArrowTypeToken[Id](
(),
(None, CustomTypeToken[Id]("A")) :: Nil,
List(CustomTypeToken[Id]("B"))
(None, NamedTypeToken[Id]("A")) :: Nil,
List(NamedTypeToken[Id]("B"))
),
ArrowTypeToken[Id](
(),
(None, CustomTypeToken[Id]("C")) :: Nil,
List(CustomTypeToken[Id]("D"))
(None, NamedTypeToken[Id]("C")) :: Nil,
List(NamedTypeToken[Id]("D"))
)
)
)
returndef("A, (B, C -> D, E), F -> G, H") should be(
List(
CustomTypeToken[Id]("A"),
NamedTypeToken[Id]("A"),
ArrowTypeToken[Id](
(),
(None, CustomTypeToken[Id]("B")) :: (None, CustomTypeToken[Id]("C")) :: Nil,
List(CustomTypeToken[Id]("D"), CustomTypeToken[Id]("E"))
(None, NamedTypeToken[Id]("B")) :: (None, NamedTypeToken[Id]("C")) :: Nil,
List(NamedTypeToken[Id]("D"), NamedTypeToken[Id]("E"))
),
ArrowTypeToken[Id](
(),
(None, CustomTypeToken[Id]("F")) :: Nil,
List(CustomTypeToken[Id]("G"), CustomTypeToken[Id]("H"))
(None, NamedTypeToken[Id]("F")) :: Nil,
List(NamedTypeToken[Id]("G"), NamedTypeToken[Id]("H"))
)
)
)
@ -79,25 +79,25 @@ class TypeTokenSpec extends AnyFlatSpec with Matchers with EitherValues {
.mapK(spanToId)
arrowdef("-> B") should be(
ArrowTypeToken[Id]((), Nil, List(CustomTypeToken[Id]("B")))
ArrowTypeToken[Id]((), Nil, List(NamedTypeToken[Id]("B")))
)
arrowdef("A -> B") should be(
ArrowTypeToken[Id](
(),
(None -> CustomTypeToken[Id]("A")) :: Nil,
List(CustomTypeToken[Id]("B"))
(None -> NamedTypeToken[Id]("A")) :: Nil,
List(NamedTypeToken[Id]("B"))
)
)
arrowdef("A -> B -> C") should be(
ArrowTypeToken[Id](
(),
(None -> CustomTypeToken[Id]("A")) :: Nil,
(None -> NamedTypeToken[Id]("A")) :: Nil,
List(
ArrowTypeToken[Id](
(),
(None -> CustomTypeToken[Id]("B")) :: Nil,
List(CustomTypeToken[Id]("C"))
(None -> NamedTypeToken[Id]("B")) :: Nil,
List(NamedTypeToken[Id]("C"))
)
)
)
@ -106,12 +106,12 @@ class TypeTokenSpec extends AnyFlatSpec with Matchers with EitherValues {
arrowdef("A -> B, C -> D") should be(
ArrowTypeToken[Id](
(),
(None -> CustomTypeToken[Id]("A")) :: Nil,
(None -> NamedTypeToken[Id]("A")) :: Nil,
List(
ArrowTypeToken[Id](
(),
(None -> CustomTypeToken[Id]("B")) :: (None -> CustomTypeToken[Id]("C")) :: Nil,
List(CustomTypeToken[Id]("D"))
(None -> NamedTypeToken[Id]("B")) :: (None -> NamedTypeToken[Id]("C")) :: Nil,
List(NamedTypeToken[Id]("D"))
)
)
)
@ -120,17 +120,17 @@ class TypeTokenSpec extends AnyFlatSpec with Matchers with EitherValues {
arrowdef("A -> (B -> F), (C -> D, E)") should be(
ArrowTypeToken[Id](
(),
(None -> CustomTypeToken[Id]("A")) :: Nil,
(None -> NamedTypeToken[Id]("A")) :: Nil,
List(
ArrowTypeToken[Id](
(),
(None -> CustomTypeToken[Id]("B")) :: Nil,
CustomTypeToken[Id]("F") :: Nil
(None -> NamedTypeToken[Id]("B")) :: Nil,
NamedTypeToken[Id]("F") :: Nil
),
ArrowTypeToken[Id](
(),
(None -> CustomTypeToken[Id]("C")) :: Nil,
CustomTypeToken[Id]("D") :: CustomTypeToken[Id]("E") :: Nil
(None -> NamedTypeToken[Id]("C")) :: Nil,
NamedTypeToken[Id]("D") :: NamedTypeToken[Id]("E") :: Nil
)
)
)
@ -139,8 +139,8 @@ class TypeTokenSpec extends AnyFlatSpec with Matchers with EitherValues {
arrowWithNames("(a: A) -> B") should be(
ArrowTypeToken[Id](
(),
(Some(Name[Id]("a")) -> CustomTypeToken[Id]("A")) :: Nil,
List(CustomTypeToken[Id]("B"))
(Some(Name[Id]("a")) -> NamedTypeToken[Id]("A")) :: Nil,
List(NamedTypeToken[Id]("B"))
)
)
@ -148,7 +148,7 @@ class TypeTokenSpec extends AnyFlatSpec with Matchers with EitherValues {
ArrowTypeToken[Id](
(),
(None -> strToBt(u32)) :: Nil,
List(CustomTypeToken[Id]("Boo"))
List(NamedTypeToken[Id]("Boo"))
)
)
TypeToken.`typedef`.parseAll("u32 -> ()").value.mapK(spanToId) should be(
@ -157,17 +157,17 @@ class TypeTokenSpec extends AnyFlatSpec with Matchers with EitherValues {
arrowdef("A, u32 -> B") should be(
ArrowTypeToken[Id](
(),
(None -> CustomTypeToken[Id]("A")) :: (None -> strToBt(u32)) :: Nil,
List(CustomTypeToken[Id]("B"))
(None -> NamedTypeToken[Id]("A")) :: (None -> strToBt(u32)) :: Nil,
List(NamedTypeToken[Id]("B"))
)
)
arrowdef("[]Absolutely, u32 -> B, C") should be(
ArrowTypeToken[Id](
(),
(Option.empty[Name[Id]] -> ArrayTypeToken[Id]((), CustomTypeToken[Id]("Absolutely"))) ::
(Option.empty[Name[Id]] -> ArrayTypeToken[Id]((), NamedTypeToken[Id]("Absolutely"))) ::
(Option.empty[Name[Id]] -> strToBt(u32)) :: Nil,
CustomTypeToken[Id]("B") ::
CustomTypeToken[Id]("C") :: Nil
NamedTypeToken[Id]("B") ::
NamedTypeToken[Id]("C") :: Nil
)
)
@ -177,7 +177,7 @@ class TypeTokenSpec extends AnyFlatSpec with Matchers with EitherValues {
def typedef(str: String) = TypeToken.`typedef`.parseAll(str).value.mapK(spanToId)
typedef("[]Something") should be(
ArrayTypeToken[Id]((), CustomTypeToken[Id]("Something"))
ArrayTypeToken[Id]((), NamedTypeToken[Id]("Something"))
)
typedef("[]u32") should be(
ArrayTypeToken[Id]((), strToBt(u32))

View File

@ -3,8 +3,8 @@ package aqua.semantics
import aqua.parser.lexer.Token
import aqua.raw.Raw
import aqua.raw.RawContext
import aqua.semantics.lsp.{TokenInfo, TokenType}
import aqua.semantics.rules.abilities.AbilitiesState
import aqua.semantics.rules.definitions.DefinitionsState
import aqua.semantics.rules.locations.LocationsState
import aqua.semantics.rules.names.NamesState
import aqua.semantics.rules.types.TypesState
@ -18,9 +18,9 @@ case class CompilerState[S[_]](
names: NamesState[S] = NamesState[S](),
abilities: AbilitiesState[S] = AbilitiesState[S](),
types: TypesState[S] = TypesState[S](),
definitions: DefinitionsState[S] = DefinitionsState[S](),
locations: LocationsState[S] = LocationsState[S]()
) {
}
)
object CompilerState {
type St[S[_]] = State[CompilerState[S], Raw]
@ -43,7 +43,8 @@ object CompilerState {
a.errors ++ b.errors,
a.names |+| b.names,
a.abilities |+| b.abilities,
a.types |+| b.types
a.types |+| b.types,
locations = a.locations |+| b.locations
)
)
am <- x

View File

@ -8,6 +8,8 @@ import aqua.semantics.expr.*
import aqua.semantics.expr.func.*
import aqua.semantics.rules.ValuesAlgebra
import aqua.semantics.rules.abilities.AbilitiesAlgebra
import aqua.semantics.rules.definitions.DefinitionsAlgebra
import aqua.semantics.rules.locations.LocationsAlgebra
import aqua.semantics.rules.names.NamesAlgebra
import aqua.semantics.rules.types.TypesAlgebra
import cats.Monad
@ -20,7 +22,9 @@ object ExprSem {
A: AbilitiesAlgebra[S, G],
N: NamesAlgebra[S, G],
T: TypesAlgebra[S, G],
V: ValuesAlgebra[S, G]
V: ValuesAlgebra[S, G],
D: DefinitionsAlgebra[S, G],
L: LocationsAlgebra[S, G]
): Prog[G, Raw] =
expr match {
case expr: AbilityIdExpr[S] => new AbilityIdSem(expr).program[G]

View File

@ -7,8 +7,12 @@ import aqua.raw.ops.{FuncOp, SeqGroupTag}
import aqua.raw.{Raw, RawContext, RawPart}
import aqua.semantics.header.Picker
import aqua.semantics.header.Picker.*
import aqua.semantics.lsp.{TokenDef, TokenInfo, TokenType}
import aqua.semantics.rules.abilities.{AbilitiesAlgebra, AbilitiesInterpreter, AbilitiesState}
import aqua.semantics.rules.definitions.{
DefinitionsAlgebra,
DefinitionsInterpreter,
DefinitionsState
}
import aqua.semantics.rules.locations.{DummyLocationsInterpreter, LocationsAlgebra, LocationsState}
import aqua.semantics.rules.names.{NamesAlgebra, NamesInterpreter, NamesState}
import aqua.semantics.rules.types.{TypesAlgebra, TypesInterpreter, TypesState}
@ -29,7 +33,7 @@ import cats.syntax.semigroup.*
import cats.{Eval, Monad, Semigroup}
import monocle.Lens
import monocle.macros.GenLens
import scribe.{Logging, log}
import scribe.{log, Logging}
import cats.free.Cofree
trait Semantics[S[_], C] {
@ -69,7 +73,9 @@ object Semantics extends Logging {
private def folder[S[_], G[_]: Monad](implicit
A: AbilitiesAlgebra[S, G],
N: NamesAlgebra[S, G],
T: TypesAlgebra[S, G]
T: TypesAlgebra[S, G],
D: DefinitionsAlgebra[S, G],
L: LocationsAlgebra[S, G]
): (Expr[S], Chain[G[Raw]]) => Eval[G[Raw]] = { case (expr, inners) =>
Eval later ExprSem
.getProg[S, G](expr)
@ -93,7 +99,9 @@ object Semantics extends Logging {
type Interpreter[S[_], A] = State[CompilerState[S], A]
def transpile[S[_]](ast: Ast[S])(implicit locations: LocationsAlgebra[S, Interpreter[S, *]]): Interpreter[S, Raw] = {
def transpile[S[_]](
ast: Ast[S]
)(implicit locations: LocationsAlgebra[S, Interpreter[S, *]]): Interpreter[S, Raw] = {
import monocle.syntax.all.*
implicit val re: ReportError[S, CompilerState[S]] =
@ -107,16 +115,23 @@ object Semantics extends Logging {
implicit val ts: Lens[CompilerState[S], TypesState[S]] = GenLens[CompilerState[S]](_.types)
implicit val ds: Lens[CompilerState[S], DefinitionsState[S]] =
GenLens[CompilerState[S]](_.definitions)
implicit val typesInterpreter: TypesInterpreter[S, CompilerState[S]] =
new TypesInterpreter[S, CompilerState[S]]
implicit val abilitiesInterpreter: AbilitiesInterpreter[S, CompilerState[S]] =
new AbilitiesInterpreter[S, CompilerState[S]]
implicit val namesInterpreter: NamesInterpreter[S, CompilerState[S]] =
new NamesInterpreter[S, CompilerState[S]]
implicit val definitionsInterpreter: DefinitionsInterpreter[S, CompilerState[S]] =
new DefinitionsInterpreter[S, CompilerState[S]]
ast.cata(folder[S, Interpreter[S, *]]).value
}
private def astToState[S[_]](ast: Ast[S])(implicit locations: LocationsAlgebra[S, Interpreter[S, *]]): Interpreter[S, Raw] =
private def astToState[S[_]](ast: Ast[S])(implicit
locations: LocationsAlgebra[S, Interpreter[S, *]]
): Interpreter[S, Raw] =
transpile[S](ast)
// If there are any errors, they're inside CompilerState[S]
@ -124,7 +139,9 @@ object Semantics extends Logging {
ast: Ast[S],
initState: CompilerState[S],
init: RawContext
)(implicit locations: LocationsAlgebra[S, Interpreter[S, *]]): Eval[(CompilerState[S], RawContext)] =
)(implicit
locations: LocationsAlgebra[S, Interpreter[S, *]]
): Eval[(CompilerState[S], RawContext)] =
astToState[S](ast)
.run(initState)
.map {

View File

@ -4,6 +4,7 @@ import aqua.parser.expr.ArrowTypeExpr
import aqua.raw.{Raw, TypeRaw}
import aqua.semantics.Prog
import aqua.semantics.rules.abilities.AbilitiesAlgebra
import aqua.semantics.rules.definitions.DefinitionsAlgebra
import aqua.semantics.rules.types.TypesAlgebra
import cats.syntax.functor.*
import cats.syntax.applicative.*
@ -14,10 +15,11 @@ class ArrowTypeSem[S[_]](val expr: ArrowTypeExpr[S]) extends AnyVal {
def program[Alg[_]: Monad](implicit
T: TypesAlgebra[S, Alg],
A: AbilitiesAlgebra[S, Alg]
A: AbilitiesAlgebra[S, Alg],
D: DefinitionsAlgebra[S, Alg]
): Prog[Alg, Raw] =
T.resolveArrowDef(expr.`type`).flatMap {
case Some(t) => A.defineArrow(expr.name, t) as (TypeRaw(expr.name.value, t): Raw)
case Some(t) => D.defineArrow(expr.name, t) as (TypeRaw(expr.name.value, t): Raw)
case None => Raw.error("Arrow type unresolved").pure[Alg]
}

View File

@ -3,6 +3,7 @@ package aqua.semantics.expr
import aqua.parser.expr.DataStructExpr
import aqua.raw.{Raw, TypeRaw}
import aqua.semantics.Prog
import aqua.semantics.rules.definitions.DefinitionsAlgebra
import aqua.semantics.rules.names.NamesAlgebra
import aqua.semantics.rules.types.TypesAlgebra
import aqua.types.StructType
@ -14,16 +15,21 @@ import cats.Monad
class DataStructSem[S[_]](val expr: DataStructExpr[S]) extends AnyVal {
def program[Alg[_]: Monad](implicit
N: NamesAlgebra[S, Alg],
D: DefinitionsAlgebra[S, Alg],
T: TypesAlgebra[S, Alg]
): Prog[Alg, Raw] =
Prog.after((_: Raw) =>
T.purgeFields(expr.name).flatMap {
D.purgeDefs(expr.name).flatMap {
case Some(fields) =>
T.defineDataType(expr.name, fields) as (TypeRaw(
expr.name.value,
StructType(expr.name.value, fields)
): Raw)
T.defineDataType(expr.name, fields).map {
case Some(st@StructType(_, _)) =>
TypeRaw(
expr.name.value,
st
): Raw
case None =>
Raw.error("Data struct types unresolved")
}
case None => Raw.error("Data struct types unresolved").pure[Alg]
}
)

View File

@ -3,6 +3,7 @@ package aqua.semantics.expr
import aqua.parser.expr.FieldTypeExpr
import aqua.raw.{Raw, TypeRaw}
import aqua.semantics.Prog
import aqua.semantics.rules.definitions.DefinitionsAlgebra
import aqua.semantics.rules.types.TypesAlgebra
import cats.syntax.functor.*
import cats.syntax.applicative.*
@ -11,9 +12,9 @@ import cats.Monad
class FieldTypeSem[S[_]](val expr: FieldTypeExpr[S]) extends AnyVal {
def program[Alg[_]: Monad](implicit T: TypesAlgebra[S, Alg]): Prog[Alg, Raw] =
def program[Alg[_]: Monad](implicit T: TypesAlgebra[S, Alg], D: DefinitionsAlgebra[S, Alg]): Prog[Alg, Raw] =
T.resolveType(expr.`type`).flatMap {
case Some(t) => T.defineField(expr.name, t) as (TypeRaw(expr.name.value, t): Raw)
case Some(t) => D.defineDef(expr.name, t) as (TypeRaw(expr.name.value, t): Raw)
case None => Raw.error("Field type unresolved").pure[Alg]
}

View File

@ -1,7 +1,7 @@
package aqua.semantics.expr
import aqua.parser.expr.ScopeExpr
import aqua.parser.lexer.{CustomTypeToken, Name}
import aqua.parser.lexer.{NamedTypeToken, Name}
import aqua.raw.{Raw, ServiceRaw}
import aqua.semantics.Prog
import aqua.semantics.rules.ValuesAlgebra
@ -25,8 +25,7 @@ class ScopeSem[S[_]](val expr: ScopeExpr[S]) extends AnyVal {
T: TypesAlgebra[S, Alg],
V: ValuesAlgebra[S, Alg]
): Prog[Alg, Raw] =
Prog.around(
A.beginScope(expr.name),
(_: Unit, _: Raw) =>
Prog.after(
_ =>
Raw.error("Undefined").pure[Alg])
}

View File

@ -5,6 +5,7 @@ import aqua.raw.{Raw, ServiceRaw}
import aqua.semantics.Prog
import aqua.semantics.rules.ValuesAlgebra
import aqua.semantics.rules.abilities.AbilitiesAlgebra
import aqua.semantics.rules.definitions.DefinitionsAlgebra
import aqua.semantics.rules.names.NamesAlgebra
import aqua.semantics.rules.types.TypesAlgebra
import cats.syntax.apply.*
@ -19,12 +20,12 @@ class ServiceSem[S[_]](val expr: ServiceExpr[S]) extends AnyVal {
A: AbilitiesAlgebra[S, Alg],
N: NamesAlgebra[S, Alg],
T: TypesAlgebra[S, Alg],
V: ValuesAlgebra[S, Alg]
V: ValuesAlgebra[S, Alg],
D: DefinitionsAlgebra[S, Alg]
): Prog[Alg, Raw] =
Prog.around(
A.beginScope(expr.name),
(_: Unit, body: Raw) =>
(A.purgeArrows(expr.name) <* A.endScope()).flatMap {
Prog.after(
_ =>
D.purgeArrows(expr.name).flatMap {
case Some(nel) =>
val arrows = nel.map(kv => kv._1.value -> (kv._1, kv._2)).toNem
for {

View File

@ -17,6 +17,7 @@ import aqua.raw.value.{
import aqua.semantics.Prog
import aqua.semantics.rules.ValuesAlgebra
import aqua.semantics.rules.abilities.AbilitiesAlgebra
import aqua.semantics.rules.locations.LocationsAlgebra
import aqua.semantics.rules.names.NamesAlgebra
import aqua.semantics.rules.types.TypesAlgebra
import aqua.types.{ArrayType, ArrowType, CanonStreamType, ProductType, StreamType, Type}
@ -36,10 +37,11 @@ class ArrowSem[S[_]](val expr: ArrowExpr[S]) extends AnyVal {
def before[Alg[_]: Monad](implicit
T: TypesAlgebra[S, Alg],
N: NamesAlgebra[S, Alg],
A: AbilitiesAlgebra[S, Alg]
A: AbilitiesAlgebra[S, Alg],
L: LocationsAlgebra[S, Alg]
): Alg[ArrowType] =
// Begin scope -- for mangling
A.beginScope(arrowTypeExpr) *> N.beginScope(arrowTypeExpr) *> T
A.beginScope(arrowTypeExpr) *> L.beginScope() *> N.beginScope(arrowTypeExpr) *> T
.beginArrowScope(
arrowTypeExpr
)
@ -81,7 +83,8 @@ class ArrowSem[S[_]](val expr: ArrowExpr[S]) extends AnyVal {
def after[Alg[_]: Monad](funcArrow: ArrowType, bodyGen: Raw)(implicit
T: TypesAlgebra[S, Alg],
N: NamesAlgebra[S, Alg],
A: AbilitiesAlgebra[S, Alg]
A: AbilitiesAlgebra[S, Alg],
L: LocationsAlgebra[S, Alg]
): Alg[Raw] =
A.endScope() *> (
N.streamsDefinedWithinScope(),
@ -161,12 +164,13 @@ class ArrowSem[S[_]](val expr: ArrowExpr[S]) extends AnyVal {
case bodyModel =>
bodyModel
}
} <* N.endScope()
} <* N.endScope() <* L.endScope()
def program[Alg[_]: Monad](implicit
T: TypesAlgebra[S, Alg],
N: NamesAlgebra[S, Alg],
A: AbilitiesAlgebra[S, Alg]
A: AbilitiesAlgebra[S, Alg],
L: LocationsAlgebra[S, Alg]
): Prog[Alg, Raw] =
Prog.around(
before[Alg],

View File

@ -6,6 +6,7 @@ import aqua.raw.value.ValueRaw
import aqua.raw.Raw
import aqua.semantics.Prog
import aqua.semantics.rules.abilities.AbilitiesAlgebra
import aqua.semantics.rules.locations.LocationsAlgebra
import aqua.semantics.rules.names.NamesAlgebra
import cats.Monad
import cats.syntax.applicative.*
@ -16,16 +17,17 @@ class CatchSem[S[_]](val expr: CatchExpr[S]) extends AnyVal {
def program[Alg[_]: Monad](implicit
N: NamesAlgebra[S, Alg],
A: AbilitiesAlgebra[S, Alg]
A: AbilitiesAlgebra[S, Alg],
L: LocationsAlgebra[S, Alg]
): Prog[Alg, Raw] =
Prog
.around(
N.beginScope(expr.name) >>
N.beginScope(expr.name) >> L.beginScope() >>
N.define(expr.name, ValueRaw.LastError.baseType),
(_: Boolean, g: Raw) =>
g match {
case FuncOp(op) =>
N.endScope() as (XorTag
N.endScope() >> L.endScope() as (XorTag
.wrap(
SeqTag.wrap(
AssignmentTag(ValueRaw.LastError, expr.name.value).leaf,
@ -34,7 +36,7 @@ class CatchSem[S[_]](val expr: CatchExpr[S]) extends AnyVal {
)
.toFuncOp: Raw)
case _ =>
N.endScope() as g
N.endScope() >> L.endScope() as g
}
)
.abilitiesScope[S](expr.token)

View File

@ -2,7 +2,6 @@ package aqua.semantics.header
import aqua.raw.{RawContext, RawPart}
import aqua.semantics.CompilerState
import aqua.semantics.lsp.{TokenArrowInfo, TokenTypeInfo}
import aqua.semantics.rules.abilities.AbilitiesState
import aqua.semantics.rules.names.NamesState
import aqua.semantics.rules.types.TypesState

View File

@ -1,21 +0,0 @@
package aqua.semantics.lsp
import aqua.parser.lexer.Token
import aqua.types.{ArrowType, Type}
// Token description with it's definition, type, etc
sealed trait TokenInfo[F[_]] {
def definition: Option[Token[F]]
}
case class TokenDef[F[_]](definition: Option[Token[F]]) extends TokenInfo[F]
sealed trait TokenType[F[_]] extends TokenInfo[F] {
def definition: Option[Token[F]]
def tokenType: Type
}
case class TokenTypeInfo[F[_]](definition: Option[Token[F]], tokenType: Type) extends TokenType[F]
case class TokenArrowInfo[F[_]](definition: Option[Token[F]], tokenType: ArrowType)
extends TokenType[F]

View File

@ -1,6 +1,6 @@
package aqua.semantics.rules.abilities
import aqua.parser.lexer.{Ability, Name, Token, ValueToken}
import aqua.parser.lexer.{Ability, NamedTypeToken, Name, Token, ValueToken}
import aqua.raw.value.ValueRaw
import aqua.types.ArrowType
import cats.InjectK
@ -8,21 +8,17 @@ import cats.data.{NonEmptyList, NonEmptyMap}
trait AbilitiesAlgebra[S[_], Alg[_]] {
def defineArrow(arrow: Name[S], `type`: ArrowType): Alg[Boolean]
def purgeArrows(token: Token[S]): Alg[Option[NonEmptyList[(Name[S], ArrowType)]]]
def defineService(
name: Ability[S],
name: NamedTypeToken[S],
arrows: NonEmptyMap[String, (Name[S], ArrowType)],
defaultId: Option[ValueRaw]
): Alg[Boolean]
def getArrow(name: Ability[S], arrow: Name[S]): Alg[Option[ArrowType]]
def getArrow(name: NamedTypeToken[S], arrow: Name[S]): Alg[Option[ArrowType]]
def setServiceId(name: Ability[S], id: ValueToken[S], vm: ValueRaw): Alg[Boolean]
def setServiceId(name: NamedTypeToken[S], id: ValueToken[S], vm: ValueRaw): Alg[Boolean]
def getServiceId(name: Ability[S]): Alg[Either[Boolean, ValueRaw]]
def getServiceId(name: NamedTypeToken[S]): Alg[Either[Boolean, ValueRaw]]
def beginScope(token: Token[S]): Alg[Unit]

View File

@ -1,13 +1,13 @@
package aqua.semantics.rules.abilities
import aqua.parser.lexer.{Ability, Name, Token, ValueToken}
import aqua.parser.lexer.{Ability, Name, NamedTypeToken, Token, ValueToken}
import aqua.raw.ServiceRaw
import aqua.raw.RawContext
import aqua.raw.value.ValueRaw
import aqua.semantics.lsp.{TokenArrowInfo, TokenDef, TokenTypeInfo}
import aqua.semantics.Levenshtein
import aqua.semantics.rules.definitions.DefinitionsAlgebra
import aqua.semantics.rules.locations.LocationsAlgebra
import aqua.semantics.rules.{abilities, ReportError, StackInterpreter}
import aqua.semantics.rules.{ReportError, StackInterpreter, abilities}
import aqua.types.ArrowType
import cats.data.{NonEmptyList, NonEmptyMap, State}
import cats.syntax.functor.*
@ -29,31 +29,8 @@ class AbilitiesInterpreter[S[_], X](implicit
import stackInt.{getState, mapStackHead, mapStackHeadE, modify, report, setState}
override def defineArrow(arrow: Name[S], `type`: ArrowType): SX[Boolean] =
mapStackHeadE(
report(arrow, "No abilities definition scope is found").as(false)
)(h =>
h.arrows.get(arrow.value) match {
case Some(_) =>
Left((arrow, "Arrow with this name was already defined above", false))
case None =>
Right(
h.copy(arrows = h.arrows.updated(arrow.value, arrow -> `type`)) -> true
)
}
)
override def purgeArrows(token: Token[S]): SX[Option[NonEmptyList[(Name[S], ArrowType)]]] =
getState.map(_.purgeArrows).flatMap {
case Some((arrs, nextState)) =>
setState(nextState).as(Option[NonEmptyList[(Name[S], ArrowType)]](arrs))
case _ =>
report(token, "Cannot purge arrows, no arrows provided")
.as(Option.empty[NonEmptyList[(Name[S], ArrowType)]])
}
override def defineService(
name: Ability[S],
name: NamedTypeToken[S],
arrows: NonEmptyMap[String, (Name[S], ArrowType)],
defaultId: Option[ValueRaw]
): SX[Boolean] =
@ -70,31 +47,19 @@ class AbilitiesInterpreter[S[_], X](implicit
services = s.services
.updated(name.value, ServiceRaw(name.value, arrows.map(_._2), defaultId)),
definitions =
s.definitions.updated(name.value, (name, arrows.toSortedMap.values.toList))
s.definitions.updated(name.value, name)
)
).as(true)
).flatMap { _ =>
locations.addTokenWithFields(name.value, name, arrows.toNel.toList.map(t => t._1 -> t._2._1))
}.as(true)
}
// adds location from token to its definition
def addServiceArrowLocation(name: Ability[S], arrow: Name[S]): SX[Unit] = {
getState.flatMap { st =>
st.definitions.get(name.value) match {
case Some((ab, arrows)) =>
locations.addServiceLocations(
(name, TokenDef(Some(ab))) :: (
arrow,
TokenDef(
arrows.find(_._1.value == arrow.value).map(_._1)
)
) :: Nil
)
case None =>
State.pure(())
}
}
private def addServiceArrowLocation(name: NamedTypeToken[S], arrow: Name[S]): SX[Unit] = {
locations.pointTokenWithFieldLocation(name.value, name, arrow.value, arrow)
}
override def getArrow(name: Ability[S], arrow: Name[S]): SX[Option[ArrowType]] =
override def getArrow(name: NamedTypeToken[S], arrow: Name[S]): SX[Option[ArrowType]] =
getService(name.value).map(_.map(_.arrows)).flatMap {
case Some(arrows) =>
arrows(arrow.value)
@ -132,7 +97,7 @@ class AbilitiesInterpreter[S[_], X](implicit
}
}
override def setServiceId(name: Ability[S], id: ValueToken[S], vm: ValueRaw): SX[Boolean] =
override def setServiceId(name: NamedTypeToken[S], id: ValueToken[S], vm: ValueRaw): SX[Boolean] =
getService(name.value).flatMap {
case Some(_) =>
mapStackHead(
@ -144,7 +109,7 @@ class AbilitiesInterpreter[S[_], X](implicit
report(name, "Service with this name is not registered, can't set its ID").as(false)
}
override def getServiceId(name: Ability[S]): SX[Either[Boolean, ValueRaw]] =
override def getServiceId(name: NamedTypeToken[S]): SX[Either[Boolean, ValueRaw]] =
getService(name.value).flatMap {
case Some(_) =>
getState.flatMap(st =>

View File

@ -2,8 +2,7 @@ package aqua.semantics.rules.abilities
import aqua.raw.{RawContext, ServiceRaw}
import aqua.raw.value.ValueRaw
import aqua.parser.lexer.{Ability, Name, Token, ValueToken}
import aqua.semantics.lsp.TokenInfo
import aqua.parser.lexer.{Ability, NamedTypeToken, Name, Token, ValueToken}
import aqua.types.ArrowType
import cats.Monoid
import cats.data.NonEmptyList
@ -14,9 +13,8 @@ case class AbilitiesState[S[_]](
abilities: Map[String, RawContext] = Map.empty,
rootServiceIds: Map[String, (ValueToken[S], ValueRaw)] =
Map.empty[String, (ValueToken[S], ValueRaw)],
definitions: Map[String, (Ability[S], List[(Name[S], ArrowType)])] =
Map.empty[String, (Ability[S], List[(Name[S], ArrowType)])],
locations: List[(Token[S], TokenInfo[S])] = Nil
definitions: Map[String, NamedTypeToken[S]] =
Map.empty[String, NamedTypeToken[S]]
) {
def purgeArrows: Option[(NonEmptyList[(Name[S], ArrowType)], AbilitiesState[S])] =
@ -48,8 +46,7 @@ object AbilitiesState {
x.services ++ y.services,
x.abilities ++ y.abilities,
x.rootServiceIds ++ y.rootServiceIds,
x.definitions ++ y.definitions,
x.locations ++ y.locations
x.definitions ++ y.definitions
)
}

View File

@ -0,0 +1,15 @@
package aqua.semantics.rules.definitions
import aqua.parser.lexer.{NamedTypeToken, Name, Token}
import aqua.types.{ArrowType, Type}
import cats.data.{NonEmptyList, NonEmptyMap}
// Collect and purge arrows/values from structures, services, etc
trait DefinitionsAlgebra[S[_], Alg[_]] {
def defineDef(name: Name[S], `type`: Type): Alg[Boolean]
def purgeDefs(token: NamedTypeToken[S]): Alg[Option[NonEmptyMap[String, Type]]]
def defineArrow(arrow: Name[S], `type`: ArrowType): Alg[Boolean]
def purgeArrows(token: Token[S]): Alg[Option[NonEmptyList[(Name[S], ArrowType)]]]
}

View File

@ -0,0 +1,89 @@
package aqua.semantics.rules.definitions
import aqua.parser.lexer.{Name, NamedTypeToken, Token}
import aqua.semantics.rules.{ReportError, StackInterpreter}
import aqua.semantics.rules.abilities.AbilitiesState
import aqua.semantics.rules.locations.{LocationsAlgebra, LocationsState}
import aqua.semantics.rules.types.TypesState
import aqua.types.{ArrowType, Type}
import cats.data.{NonEmptyList, NonEmptyMap, State}
import monocle.Lens
import monocle.macros.GenLens
import cats.syntax.applicative.*
import cats.syntax.apply.*
import cats.syntax.flatMap.*
import cats.syntax.functor.*
import scala.collection.immutable.SortedMap
class DefinitionsInterpreter[S[_], X](implicit
lens: Lens[X, DefinitionsState[S]],
error: ReportError[S, X],
locations: LocationsAlgebra[S, State[X, *]]
) extends DefinitionsAlgebra[S, State[X, *]] {
type SX[A] = State[X, A]
private def getState = State.get.map(lens.get)
private def modify(f: DefinitionsState[S] => DefinitionsState[S]): SX[Unit] =
State.modify(lens.modify(f))
def report(t: Token[S], hint: String): SX[Unit] =
State.modify(error(_, t, hint :: Nil))
def define(name: Name[S], `type`: Type, defName: String): SX[Boolean] =
getState.map(_.definitions.get(name.value)).flatMap {
case None =>
modify(st => st.copy(definitions = st.definitions.updated(name.value, name -> `type`)))
.as(true)
case Some(_) =>
report(name, s"Cannot define $defName `${name.value}`, it was already defined above")
.as(false)
}
override def defineDef(name: Name[S], `type`: Type): SX[Boolean] =
define(name, `type`, "field")
override def defineArrow(arrow: Name[S], `type`: ArrowType): SX[Boolean] =
define(arrow, `type`, "arrow")
override def purgeDefs(
token: NamedTypeToken[S]
): SX[Option[NonEmptyMap[String, Type]]] =
getState.map(_.definitions).flatMap { defs =>
NonEmptyMap.fromMap(SortedMap.from(defs.view.mapValues(_._2))) match {
case Some(fs) =>
val fields = defs.map { case (n, (tt, _)) =>
n -> tt
}.toList
locations
.addTokenWithFields(token.value, token, fields)
.flatMap { _ =>
modify { st =>
st.copy(definitions = Map.empty)
}.map { _ =>
Some(fs)
}
}
case None => report(token, "Cannot define a data type without fields").as(None)
}
}
def purgeArrows(token: Token[S]): SX[Option[NonEmptyList[(Name[S], ArrowType)]]] =
getState.map(_.definitions).flatMap { definitions =>
val values = definitions.values
val arrows = NonEmptyList.fromList(values.collect { case (n, at @ ArrowType(_, _)) =>
(n, at)
}.toList)
arrows match {
case Some(arrs) =>
modify { st =>
st.copy(definitions = Map.empty)
}.as(Option[NonEmptyList[(Name[S], ArrowType)]](arrs))
case None =>
report(token, "Cannot purge arrows, no arrows provided")
.as(Option.empty[NonEmptyList[(Name[S], ArrowType)]])
}
}
}

View File

@ -0,0 +1,8 @@
package aqua.semantics.rules.definitions
import aqua.parser.lexer.{Name, Token}
import aqua.types.Type
case class DefinitionsState[S[_]](
definitions: Map[String, (Name[S], Type)] = Map.empty[String, (Name[S], Type)]
)

View File

@ -0,0 +1,34 @@
package aqua.semantics.rules.locations
import aqua.parser.lexer.Token
import aqua.semantics.rules.StackInterpreter
import aqua.semantics.rules.types.TypesState
import monocle.Lens
import monocle.macros.GenLens
import cats.data.{NonEmptyList, NonEmptyMap, State}
class DummyLocationsInterpreter[S[_], X] extends LocationsAlgebra[S, State[X, *]] {
def addToken(name: String, token: Token[S]): State[X, Unit] = State.pure(())
def addTokenWithFields(
name: String,
token: Token[S],
fields: List[(String, Token[S])]
): State[X, Unit] = State.pure(())
def pointFieldLocation(typeName: String, fieldName: String, token: Token[S]): State[X, Unit] = State.pure(())
def pointTokenWithFieldLocation(
typeName: String,
typeToken: Token[S],
fieldName: String,
token: Token[S]
): State[X, Unit] = State.pure(())
override def pointLocation(name: String, token: Token[S]): State[X, Unit] = State.pure(())
override def pointLocations(locations: List[(String, Token[S])]): State[X, Unit] = State.pure(())
def beginScope(): State[X, Unit] = State.pure(())
def endScope(): State[X, Unit] = State.pure(())
}

View File

@ -1,14 +1,16 @@
package aqua.semantics.rules.locations
import aqua.parser.lexer.Token
import aqua.semantics.lsp.{TokenInfo, TokenType}
trait LocationsAlgebra[S[_], Alg[_]] {
def addNameLocations(locs: List[(Token[S], TokenType[S])]): Alg[Unit]
def addNameLocation(token: Token[S], tokenType: TokenType[S]): Alg[Unit]
def addToken(name: String, token: Token[S]): Alg[Unit]
def addTokenWithFields(name: String, token: Token[S], fields: List[(String, Token[S])]): Alg[Unit]
def addTypeLocations(locs: List[(Token[S], TokenInfo[S])]): Alg[Unit]
def addTypeLocation(token: Token[S], tokenInfo: TokenInfo[S]): Alg[Unit]
def pointTokenWithFieldLocation(typeName: String, typeToken: Token[S], fieldName: String, token: Token[S]): Alg[Unit]
def pointFieldLocation(typeName: String, fieldName: String, token: Token[S]): Alg[Unit]
def pointLocation(name: String, token: Token[S]): Alg[Unit]
def pointLocations(locations: List[(String, Token[S])]): Alg[Unit]
def addServiceLocations(locs: List[(Token[S], TokenInfo[S])]): Alg[Unit]
def addServiceLocation(token: Token[S], tokenInfo: TokenInfo[S]): Alg[Unit]
def beginScope(): Alg[Unit]
def endScope(): Alg[Unit]
}

View File

@ -1,24 +0,0 @@
package aqua.semantics.rules.locations
import aqua.parser.lexer.Token
import aqua.semantics.lsp.{TokenInfo, TokenType}
import aqua.semantics.rules.StackInterpreter
import aqua.semantics.rules.types.TypesState
import monocle.Lens
import monocle.macros.GenLens
import cats.data.{NonEmptyList, NonEmptyMap, State}
class DummyLocationsInterpreter[S[_], X] extends LocationsAlgebra[S, State[X, *]] {
override def addNameLocations(locs: List[(Token[S], TokenType[S])]): State[X, Unit] = State.pure(())
override def addNameLocation(token: Token[S], tokenType: TokenType[S]): State[X, Unit] = State.pure(())
override def addTypeLocations(locs: List[(Token[S], TokenInfo[S])]): State[X, Unit] = State.pure(())
override def addTypeLocation(token: Token[S], tokenInfo: TokenInfo[S]): State[X, Unit] = State.pure(())
override def addServiceLocations(locs: List[(Token[S], TokenInfo[S])]): State[X, Unit] = State.pure(())
override def addServiceLocation(token: Token[S], tokenInfo: TokenInfo[S]): State[X, Unit] = State.pure(())
}

View File

@ -1,14 +1,26 @@
package aqua.semantics.rules.locations
import aqua.parser.lexer.Token
import aqua.semantics.lsp.{TokenInfo, TokenType}
import aqua.semantics.rules.types.TypesState
import cats.kernel.Monoid
case class LocationsState[S[_]](
nameLocations: List[(Token[S], TokenType[S])] = Nil,
typeLocations: List[(Token[S], TokenInfo[S])] = Nil,
serviceLocations: List[(Token[S], TokenInfo[S])] = Nil
tokens: Map[String, Token[S]] = Map.empty[String, Token[S]],
locations: List[(Token[S], Token[S])] = Nil,
stack: List[LocationsState[S]] = Nil
) {
lazy val allLocations: List[(Token[S], TokenInfo[S])] =
nameLocations ++ typeLocations ++ serviceLocations
lazy val allLocations: List[(Token[S], Token[S])] = locations
}
object LocationsState {
implicit def locationsStateMonoid[S[_]]: Monoid[LocationsState[S]] = new Monoid[LocationsState[S]] {
override def empty: LocationsState[S] = LocationsState()
override def combine(x: LocationsState[S], y: LocationsState[S]): LocationsState[S] =
LocationsState(
tokens = x.tokens ++ y.tokens
)
}
}

View File

@ -23,9 +23,9 @@ trait NamesAlgebra[S[_], Alg[_]] {
def defineArrow(name: Name[S], gen: ArrowType, isRoot: Boolean): Alg[Boolean]
def beginScope(token: Token[S]): Alg[Unit]
def streamsDefinedWithinScope(): Alg[Map[String, StreamType]]
def beginScope(token: Token[S]): Alg[Unit]
def endScope(): Alg[Unit]
}

View File

@ -1,7 +1,6 @@
package aqua.semantics.rules.names
import aqua.parser.lexer.{Name, Token}
import aqua.semantics.lsp.{TokenArrowInfo, TokenType, TokenTypeInfo}
import aqua.semantics.Levenshtein
import aqua.semantics.rules.locations.LocationsAlgebra
import aqua.semantics.rules.{ReportError, StackInterpreter}
@ -27,7 +26,7 @@ class NamesInterpreter[S[_], X](implicit
type SX[A] = State[X, A]
def readName(name: String): SX[Option[TokenType[S]]] =
private def readName(name: String): SX[Option[Type]] =
getState.map { st =>
st.constants.get(name) orElse st.stack.collectFirst {
case frame if frame.names.contains(name) => frame.names(name)
@ -36,7 +35,7 @@ class NamesInterpreter[S[_], X](implicit
}
override def read(name: Name[S], mustBeDefined: Boolean = true): SX[Option[Type]] =
OptionT(constantInfo(name))
OptionT(constantDefined(name))
.orElseF(readName(name.value))
.value
.flatTap {
@ -52,27 +51,23 @@ class NamesInterpreter[S[_], X](implicit
)
)
)
case Some(tokenInfo) =>
locations.addNameLocation(name, tokenInfo)
case Some(_) =>
locations.pointLocation(name.value, name)
case _ => State.pure(())
}
.map(_.map(_.tokenType))
def constantInfo(name: Name[S]): SX[Option[TokenType[S]]] =
getState.map(_.constants.get(name.value))
override def constantDefined(name: Name[S]): SX[Option[Type]] =
constantInfo(name).map(_.map(_.tokenType))
getState.map(_.constants.get(name.value))
def readArrow(name: Name[S]): SX[Option[ArrowType]] =
readArrowHelper(name.value).flatMap {
case Some(g) =>
locations.addNameLocation(name, g).map(_ => Option(g.tokenType))
case Some(at) =>
locations.pointLocation(name.value, name).map(_ => Option(at))
case None =>
// check if we have arrow in variable
readName(name.value).flatMap {
case Some(tt @ TokenTypeInfo(_, at @ ArrowType(_, _))) =>
locations.addNameLocation(name, tt).map(_ => Option(at))
case Some(at @ ArrowType(_, _)) =>
locations.pointLocation(name.value, name).map(_ => Option(at))
case _ =>
getState.flatMap(st =>
report(
@ -88,7 +83,7 @@ class NamesInterpreter[S[_], X](implicit
}
}
def readArrowHelper(name: String): SX[Option[TokenArrowInfo[S]]] =
private def readArrowHelper(name: String): SX[Option[ArrowType]] =
getState.map { st =>
st.stack
.flatMap(_.arrows.get(name))
@ -106,7 +101,7 @@ class NamesInterpreter[S[_], X](implicit
mapStackHead(
report(name, "Cannot define a variable in the root scope")
.as(false)
)(fr => fr.addName(name, `type`) -> true)
)(fr => fr.addName(name, `type`) -> true).flatTap(_ => locations.addToken(name.value, name))
}
override def derive(name: Name[S], `type`: Type, derivedFrom: Set[String]): State[X, Boolean] =
@ -114,7 +109,7 @@ class NamesInterpreter[S[_], X](implicit
case true =>
mapStackHead(State.pure(true))(_.derived(name, derivedFrom) -> true)
case false => State.pure(false)
}
}.flatTap(_ => locations.addToken(name.value, name))
override def getDerivedFrom(fromNames: List[Set[String]]): State[X, List[Set[String]]] =
mapStackHead(State.pure(Nil))(fr =>
@ -128,10 +123,10 @@ class NamesInterpreter[S[_], X](implicit
case None =>
modify(st =>
st.copy(
constants = st.constants.updated(name.value, TokenTypeInfo(Some(name), `type`))
constants = st.constants.updated(name.value, `type`)
)
).as(true)
}
}.flatTap(_ => locations.addToken(name.value, name))
override def defineArrow(name: Name[S], gen: ArrowType, isRoot: Boolean): SX[Boolean] =
readName(name.value).flatMap {
@ -146,7 +141,7 @@ class NamesInterpreter[S[_], X](implicit
if (isRoot)
modify(st =>
st.copy(
rootArrows = st.rootArrows.updated(name.value, TokenArrowInfo(Some(name), gen)),
rootArrows = st.rootArrows.updated(name.value, gen),
definitions = st.definitions.updated(name.value, name)
)
)
@ -155,18 +150,17 @@ class NamesInterpreter[S[_], X](implicit
report(name, "Cannot define a variable in the root scope")
.as(false)
)(fr => fr.addArrow(name, gen) -> true)
}.flatTap(_ => locations.addToken(name.value, name))
override def streamsDefinedWithinScope(): SX[Map[String, StreamType]] =
stackInt.mapStackHead(State.pure(Map.empty[String, StreamType])) { frame =>
frame -> frame.names.collect { case (n, st @ StreamType(_)) =>
n -> st
}
}
override def beginScope(token: Token[S]): SX[Unit] =
stackInt.beginScope(NamesState.Frame(token))
override def streamsDefinedWithinScope(): SX[Map[String, StreamType]] =
stackInt.mapStackHead(State.pure(Map.empty[String, StreamType])) { frame =>
frame -> frame.names.collect { case (n, TokenTypeInfo(_, st @ StreamType(_))) =>
n -> st
}
}
override def endScope(): SX[Unit] = stackInt.endScope
}

View File

@ -2,17 +2,15 @@ package aqua.semantics.rules.names
import aqua.parser.lexer.{Name, Token}
import aqua.raw.RawContext
import aqua.semantics.lsp.{TokenArrowInfo, TokenType, TokenTypeInfo}
import aqua.types.{ArrowType, Type}
import cats.kernel.Monoid
import cats.syntax.functor.*
case class NamesState[S[_]](
stack: List[NamesState.Frame[S]] = Nil,
rootArrows: Map[String, TokenArrowInfo[S]] = Map.empty[String, TokenArrowInfo[S]],
constants: Map[String, TokenType[S]] = Map.empty[String, TokenType[S]],
definitions: Map[String, Name[S]] = Map.empty[String, Name[S]],
locations: List[(Token[S], TokenType[S])] = Nil
rootArrows: Map[String, ArrowType] = Map.empty[String, ArrowType],
constants: Map[String, Type] = Map.empty[String, Type],
definitions: Map[String, Name[S]] = Map.empty[String, Name[S]]
) {
def allNames: LazyList[String] =
@ -30,21 +28,21 @@ object NamesState {
case class Frame[S[_]](
token: Token[S],
names: Map[String, TokenType[S]] = Map.empty[String, TokenType[S]],
names: Map[String, Type] = Map.empty[String, Type],
derivedFrom: Map[String, Set[String]] = Map.empty,
arrows: Map[String, TokenArrowInfo[S]] = Map.empty[String, TokenArrowInfo[S]]
arrows: Map[String, ArrowType] = Map.empty[String, ArrowType]
) {
def addName(n: Name[S], t: Type): NamesState.Frame[S] =
copy[S](names = names.updated(n.value, TokenTypeInfo(Some(n), t)))
copy[S](names = names.updated(n.value, t))
def derived(n: Name[S], from: Set[String]): NamesState.Frame[S] =
copy[S](derivedFrom =
derivedFrom + (n.value -> from.flatMap(f => derivedFrom.get(f).fold(Set(f))(_ + f)))
)
def addArrow(n: Name[S], g: ArrowType): NamesState.Frame[S] =
copy[S](arrows = arrows.updated(n.value, TokenArrowInfo(Some(n), g)))
def addArrow(n: Name[S], at: ArrowType): NamesState.Frame[S] =
copy[S](arrows = arrows.updated(n.value, at))
}
implicit def namesStateMonoid[S[_]]: Monoid[NamesState[S]] = new Monoid[NamesState[S]] {
@ -62,8 +60,8 @@ object NamesState {
def init[S[_]](context: RawContext): NamesState[S] =
NamesState(
rootArrows = context.allFuncs.map { case (s, fc) =>
(s, TokenArrowInfo[S](None, fc.arrow.`type`))
(s, fc.arrow.`type`)
},
constants = context.allValues.map { case (s, vm) => (s, TokenTypeInfo[S](None, vm.`type`)) }
constants = context.allValues.map { case (s, vm) => (s, vm.`type`) }
)
}

View File

@ -12,19 +12,20 @@ trait TypesAlgebra[S[_], Alg[_]] {
def resolveArrowDef(arrowDef: ArrowTypeToken[S]): Alg[Option[ArrowType]]
def defineField(name: Name[S], `type`: Type): Alg[Boolean]
def purgeFields(token: CustomTypeToken[S]): Alg[Option[NonEmptyMap[String, Type]]]
def defineDataType(
name: CustomTypeToken[S],
name: NamedTypeToken[S],
fields: NonEmptyMap[String, Type]
): Alg[Boolean]
): Alg[Option[StructType]]
def defineAlias(name: CustomTypeToken[S], target: Type): Alg[Boolean]
def defineAlias(name: NamedTypeToken[S], target: Type): Alg[Boolean]
def resolveIndex(rootT: Type, op: IntoIndex[S], idx: ValueRaw): Alg[Option[PropertyRaw]]
def resolveCopy(rootT: Type, op: IntoCopy[S], fields: NonEmptyMap[String, ValueRaw]): Alg[Option[PropertyRaw]]
def resolveCopy(
rootT: Type,
op: IntoCopy[S],
fields: NonEmptyMap[String, ValueRaw]
): Alg[Option[PropertyRaw]]
def resolveField(rootT: Type, op: IntoField[S]): Alg[Option[PropertyRaw]]
def ensureValuesComparable(token: Token[S], left: Type, right: Type): Alg[Boolean]

View File

@ -2,7 +2,6 @@ package aqua.semantics.rules.types
import aqua.parser.lexer.*
import aqua.raw.value.{FunctorRaw, IntoCopyRaw, IntoFieldRaw, IntoIndexRaw, PropertyRaw, ValueRaw}
import aqua.semantics.lsp.{TokenDef, TokenTypeInfo}
import aqua.semantics.rules.locations.LocationsAlgebra
import aqua.semantics.rules.{ReportError, StackInterpreter}
import aqua.types.{
@ -45,8 +44,8 @@ class TypesInterpreter[S[_], X](implicit
type ST[A] = State[X, A]
val resolver: (TypesState[S], CustomTypeToken[S]) => Option[
(Type, List[(Token[S], CustomTypeToken[S])])
val resolver: (TypesState[S], NamedTypeToken[S]) => Option[
(Type, List[(Token[S], NamedTypeToken[S])])
] = { (state, ctt) =>
state.strict.get(ctt.value).map(t => (t, state.definitions.get(ctt.value).toList.map(ctt -> _)))
}
@ -55,11 +54,10 @@ class TypesInterpreter[S[_], X](implicit
getState.map(st => TypesStateHelper.resolveTypeToken(token, st, resolver)).flatMap {
case Some(t) =>
val (tt, tokens) = t
locations
.addTypeLocations(tokens.map { case (t, td) =>
(t, TokenDef(Some(td)))
})
.map(_ => Some(tt))
val tokensLocs = tokens.map { case (t, n) =>
n.value -> t
}
locations.pointLocations(tokensLocs).map(_ => Some(tt))
case None => report(token, s"Unresolved type").as(None)
}
@ -67,11 +65,10 @@ class TypesInterpreter[S[_], X](implicit
getState.map(st => TypesStateHelper.resolveArrowDef(arrowDef, st, resolver)).flatMap {
case Valid(t) =>
val (tt, tokens) = t
locations
.addTypeLocations(tokens.map { case (t, td) =>
(t, TokenDef(Some(td)))
})
.map(_ => Some(tt))
val tokensLocs = tokens.map { case (t, n) =>
n.value -> t
}
locations.pointLocations(tokensLocs).map(_ => Some(tt))
case Invalid(errs) =>
errs
.foldLeft[ST[Option[ArrowType]]](State.pure(None)) { case (n, (tkn, hint)) =>
@ -79,53 +76,26 @@ class TypesInterpreter[S[_], X](implicit
}
}
override def defineField(name: Name[S], `type`: Type): State[X, Boolean] =
getState.map(_.fields.get(name.value)).flatMap {
case None =>
modify(st => st.copy(fields = st.fields.updated(name.value, name -> `type`)))
.as(true)
case Some(_) =>
report(name, s"Cannot define field `${name.value}`, it was already defined above")
.as(false)
}
override def purgeFields(
token: CustomTypeToken[S]
): State[X, Option[NonEmptyMap[String, Type]]] = {
getState.map(_.fields).flatMap { fields =>
NonEmptyMap.fromMap(SortedMap.from(fields.view.mapValues(_._2))) match {
case Some(fs) =>
modify { st =>
val tokens = st.fieldsToken
val updated = tokens ++ fields.toList.map { case (n, (tt, t)) =>
(token.value + "." + n, TokenTypeInfo(Some(tt), t))
}
st.copy(fields = Map.empty, fieldsToken = updated)
}.map(_ => Some(fs))
case None => report(token, "Cannot define a data type without fields").as(None)
}
}
}
override def defineDataType(
name: CustomTypeToken[S],
name: NamedTypeToken[S],
fields: NonEmptyMap[String, Type]
): State[X, Boolean] =
): State[X, Option[StructType]] =
getState.map(_.definitions.get(name.value)).flatMap {
case Some(n) if n == name => State.pure(false)
case Some(n) if n == name => State.pure(None)
case Some(_) =>
report(name, s"Type `${name.value}` was already defined").as(false)
report(name, s"Type `${name.value}` was already defined").as(None)
case None =>
val structType = StructType(name.value, fields)
modify { st =>
st.copy(
strict = st.strict.updated(name.value, StructType(name.value, fields)),
strict = st.strict.updated(name.value, structType),
definitions = st.definitions.updated(name.value, name)
)
}
.as(true)
.as(Option(structType))
}
override def defineAlias(name: CustomTypeToken[S], target: Type): State[X, Boolean] =
override def defineAlias(name: NamedTypeToken[S], target: Type): State[X, Boolean] =
getState.map(_.definitions.get(name.value)).flatMap {
case Some(n) if n == name => State.pure(false)
case Some(_) => report(name, s"Type `${name.value}` was already defined").as(false)
@ -135,7 +105,7 @@ class TypesInterpreter[S[_], X](implicit
strict = st.strict.updated(name.value, target),
definitions = st.definitions.updated(name.value, name)
)
).as(true)
).flatMap(_ => locations.addToken(name.value, name)).as(true)
}
override def resolveField(rootT: Type, op: IntoField[S]): State[X, Option[PropertyRaw]] = {
@ -147,12 +117,7 @@ class TypesInterpreter[S[_], X](implicit
s"Field `${op.value}` not found in type `$name`, available: ${fields.toNel.toList.map(_._1).mkString(", ")}"
).as(None)
) { t =>
getState.flatMap { st =>
(st.fieldsToken.get(name + "." + op.value) match {
case Some(td) => locations.addTypeLocation(op, td).map(_ => st)
case None => State.pure(st)
}).as(Some(IntoFieldRaw(op.value, t)))
}
locations.pointFieldLocation(name, op.value, op).as(Some(IntoFieldRaw(op.value, t)))
}
case t =>
t.properties

View File

@ -5,7 +5,7 @@ import aqua.parser.lexer.{
ArrayTypeToken,
ArrowTypeToken,
BasicTypeToken,
CustomTypeToken,
NamedTypeToken,
IntoField,
IntoIndex,
Name,
@ -32,13 +32,11 @@ import cats.data.Validated.{Invalid, Valid}
import cats.data.{Chain, NonEmptyChain, ValidatedNec}
import cats.kernel.Monoid
import aqua.raw.RawContext
import aqua.semantics.lsp.{TokenInfo, TokenType, TokenTypeInfo}
case class TypesState[S[_]](
fields: Map[String, (Name[S], Type)] = Map.empty[String, (Name[S], Type)],
strict: Map[String, Type] = Map.empty[String, Type],
definitions: Map[String, CustomTypeToken[S]] = Map.empty[String, CustomTypeToken[S]],
fieldsToken: Map[String, TokenTypeInfo[S]] = Map.empty[String, TokenTypeInfo[S]],
definitions: Map[String, NamedTypeToken[S]] = Map.empty[String, NamedTypeToken[S]],
stack: List[TypesState.Frame[S]] = Nil
) {
def isDefined(t: String): Boolean = strict.contains(t)
@ -53,9 +51,9 @@ object TypesStateHelper {
state: TypesState[S],
resolver: (
TypesState[S],
CustomTypeToken[S]
) => Option[(Type, List[(Token[S], CustomTypeToken[S])])]
): Option[(Type, List[(Token[S], CustomTypeToken[S])])] =
NamedTypeToken[S]
) => Option[(Type, List[(Token[S], NamedTypeToken[S])])]
): Option[(Type, List[(Token[S], NamedTypeToken[S])])] =
tt match {
case TopBottomToken(_, isTop) =>
Option((if (isTop) TopType else BottomType, Nil))
@ -71,9 +69,8 @@ object TypesStateHelper {
resolveTypeToken(dtt, state, resolver).collect { case (it: DataType, t) =>
(OptionType(it), t)
}
case ctt: CustomTypeToken[S] =>
case ctt: NamedTypeToken[S] =>
resolver(state, ctt)
// strict.get(ctt.value).map(t => (t, definitions.get(ctt.value).toList.map(ctt -> _)))
case btt: BasicTypeToken[S] => Some((btt.value, Nil))
case ArrowTypeToken(_, args, res) =>
val strictArgs =
@ -97,16 +94,16 @@ object TypesStateHelper {
state: TypesState[S],
resolver: (
TypesState[S],
CustomTypeToken[S]
) => Option[(Type, List[(Token[S], CustomTypeToken[S])])]
): ValidatedNec[(Token[S], String), (ArrowType, List[(Token[S], CustomTypeToken[S])])] = {
NamedTypeToken[S]
) => Option[(Type, List[(Token[S], NamedTypeToken[S])])]
): ValidatedNec[(Token[S], String), (ArrowType, List[(Token[S], NamedTypeToken[S])])] = {
val resType = arrowTypeToken.res.map(resolveTypeToken(_, state, resolver))
NonEmptyChain
.fromChain(Chain.fromSeq(arrowTypeToken.res.zip(resType).collect { case (dt, None) =>
dt -> "Cannot resolve the result type"
}))
.fold[ValidatedNec[(Token[S], String), (ArrowType, List[(Token[S], CustomTypeToken[S])])]] {
.fold[ValidatedNec[(Token[S], String), (ArrowType, List[(Token[S], NamedTypeToken[S])])]] {
val (errs, argTypes) = arrowTypeToken.args.map { (argName, tt) =>
resolveTypeToken(tt, state, resolver)
.toRight(tt -> s"Type unresolved")
@ -115,12 +112,12 @@ object TypesStateHelper {
.foldLeft[
(
Chain[(Token[S], String)],
Chain[(Option[String], (Type, List[(Token[S], CustomTypeToken[S])]))]
Chain[(Option[String], (Type, List[(Token[S], NamedTypeToken[S])]))]
)
](
(
Chain.empty,
Chain.empty[(Option[String], (Type, List[(Token[S], CustomTypeToken[S])]))]
Chain.empty[(Option[String], (Type, List[(Token[S], NamedTypeToken[S])]))]
)
) {
case ((errs, argTypes), Right(at)) => (errs, argTypes.append(at))
@ -131,7 +128,7 @@ object TypesStateHelper {
.fromChain(errs)
.fold[ValidatedNec[
(Token[S], String),
(ArrowType, List[(Token[S], CustomTypeToken[S])])
(ArrowType, List[(Token[S], NamedTypeToken[S])])
]](
Valid {
val (labels, types) = argTypes.toList.unzip