diff --git a/compiler/src/main/scala/aqua/compiler/CompilerAPI.scala b/compiler/src/main/scala/aqua/compiler/CompilerAPI.scala index bcd287b8..34294ba8 100644 --- a/compiler/src/main/scala/aqua/compiler/CompilerAPI.scala +++ b/compiler/src/main/scala/aqua/compiler/CompilerAPI.scala @@ -8,9 +8,8 @@ import aqua.parser.{Ast, ParserError} import aqua.raw.RawPart.Parts import aqua.raw.{RawContext, RawPart} import aqua.res.AquaRes -import aqua.semantics.{CompilerState, LspSemantics, RawSemantics, Semantics} +import aqua.semantics.{CompilerState, RawSemantics, Semantics} import aqua.semantics.header.{HeaderHandler, HeaderSem} -import aqua.semantics.lsp.LspContext import cats.data.* import cats.data.Validated.{validNec, Invalid, Valid, invalid} import cats.parse.Parser0 @@ -66,41 +65,6 @@ object CompilerAPI extends Logging { ._1 } - private def getLspAquaCompiler[F[_]: Monad, E, I: Order, S[_]: Comonad]( - config: AquaCompilerConf - ): AquaCompiler[F, E, I, S, LspContext[S]] = { - implicit val rc: Monoid[LspContext[S]] = LspContext - .implicits( - LspContext - .blank[S] - .copy(raw = - RawContext.blank.copy(parts = - Chain.fromSeq(config.constantsList).map(const => RawContext.blank -> const) - ) - ) - ) - .lspContextMonoid - - implicit val headerSemMonoid: Monoid[HeaderSem[S, LspContext[S]]] = - new Monoid[HeaderSem[S, LspContext[S]]] { - override def empty: HeaderSem[S, LspContext[S]] = HeaderSem(rc.empty, (c, _) => validNec(c)) - - override def combine( - a: HeaderSem[S, LspContext[S]], - b: HeaderSem[S, LspContext[S]] - ): HeaderSem[S, LspContext[S]] = { - HeaderSem( - a.initCtx |+| b.initCtx, - (c, i) => a.finInitCtx(c, i).andThen(b.finInitCtx(_, i)) - ) - } - } - - val semantics = new LspSemantics[S]() - - new AquaCompiler[F, E, I, S, LspContext[S]](new HeaderHandler[S, LspContext[S]](), semantics) - } - private def getAquaCompiler[F[_]: Monad, E, I: Order, S[_]: Comonad]( config: AquaCompilerConf ): AquaCompiler[F, E, I, S, RawContext] = { @@ -204,28 +168,6 @@ object CompilerAPI extends Logging { Validated.invalid[NonEmptyChain[AquaError[I, E, S]], Chain[T]](errs).pure[F] } - def compileToLsp[F[_]: Monad, E, I: Order, S[_]: Comonad]( - sources: AquaSources[F, E, I], - parser: I => String => ValidatedNec[ParserError[S], Ast[S]], - config: AquaCompilerConf - ): F[Validated[NonEmptyChain[AquaError[I, E, S]], Map[I, Validated[NonEmptyChain[ - AquaError[I, E, S] - ], Map[I, LspContext[S]]]]]] = { - - val compiler = getLspAquaCompiler[F, E, I, S](config) - compiler - .compileRaw(sources, parser) - .map { v => - v.map { innerMap => - innerMap.view.mapValues { vCtx => - vCtx.map { - _.toSortedMap.toMap - } - }.toMap - } - } - } - def compileToContext[F[_]: Monad, E, I: Order, S[_]: Comonad]( sources: AquaSources[F, E, I], parser: I => String => ValidatedNec[ParserError[S], Ast[S]], diff --git a/compiler/src/test/scala/aqua/compiler/AquaCompilerSpec.scala b/compiler/src/test/scala/aqua/compiler/AquaCompilerSpec.scala index 4170e7e0..e776af45 100644 --- a/compiler/src/test/scala/aqua/compiler/AquaCompilerSpec.scala +++ b/compiler/src/test/scala/aqua/compiler/AquaCompilerSpec.scala @@ -31,7 +31,6 @@ import aqua.res.{ SeqRes, XorRes } -import aqua.semantics.lsp.LspContext import aqua.types.{ArrayType, CanonStreamType, LiteralType, ScalarType, StreamType, Type} import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -68,14 +67,6 @@ class AquaCompilerSpec extends AnyFlatSpec with Matchers { AquaCompilerConf() ) - private def compileToLsp(src: Map[String, String], imports: Map[String, String]) = - CompilerAPI - .compileToLsp[Id, String, String, Span.S]( - aquaSource(src, imports), - id => txt => Parser.parse(Parser.parserSchema)(txt), - AquaCompilerConf() - ) - "aqua compiler" should "compile a simple snipped to the right context" in { val res = compileToContext( diff --git a/language-server/language-server-api/src/main/scala/aqua/lsp/AquaLSP.scala b/language-server/language-server-api/src/main/scala/aqua/lsp/AquaLSP.scala index 3b0d3834..28656b15 100644 --- a/language-server/language-server-api/src/main/scala/aqua/lsp/AquaLSP.scala +++ b/language-server/language-server-api/src/main/scala/aqua/lsp/AquaLSP.scala @@ -7,8 +7,8 @@ import aqua.parser.lexer.{LiteralToken, Token} import aqua.parser.lift.FileSpan.F import aqua.parser.lift.{FileSpan, Span} import aqua.parser.{ArrowReturnError, BlockIndentError, LexerError, ParserError} -import aqua.semantics.lsp.{LspContext, TokenInfo} -import aqua.semantics.{CompilerState, HeaderError, RulesViolated, WrongAST} +import aqua.semantics.lsp.TokenInfo +import aqua.semantics.{HeaderError, RulesViolated, WrongAST} import aqua.{AquaIO, SpanParser} import cats.data.{NonEmptyChain, Validated} import cats.data.Validated.{invalidNec, validNec, Invalid, Valid} @@ -142,7 +142,7 @@ object AquaLSP extends App with Logging { val proc = for { - res <- CompilerAPI + res <- LSPCompiler .compileToLsp[IO, AquaFileError, FileModuleId, FileSpan.F]( sources, SpanParser.parser, diff --git a/language-server/language-server-api/src/main/scala/aqua/lsp/LSPCompiler.scala b/language-server/language-server-api/src/main/scala/aqua/lsp/LSPCompiler.scala new file mode 100644 index 00000000..8e0474e5 --- /dev/null +++ b/language-server/language-server-api/src/main/scala/aqua/lsp/LSPCompiler.scala @@ -0,0 +1,75 @@ +package aqua.lsp + +import aqua.compiler.{AquaCompiler, AquaCompilerConf, AquaError, AquaSources} +import aqua.parser.{Ast, ParserError} +import aqua.raw.RawContext +import aqua.semantics.header.{HeaderHandler, HeaderSem} +import cats.data.Validated.validNec +import cats.syntax.semigroup.* +import cats.syntax.applicative.* +import cats.syntax.flatMap.* +import cats.syntax.functor.* +import cats.syntax.monoid.* +import cats.syntax.traverse.* +import cats.{Comonad, Monad, Monoid, Order} +import cats.data.{Chain, NonEmptyChain, Validated, ValidatedNec} + +object LSPCompiler { + + private def getLspAquaCompiler[F[_]: Monad, E, I: Order, S[_]: Comonad]( + config: AquaCompilerConf + ): AquaCompiler[F, E, I, S, LspContext[S]] = { + implicit val rc: Monoid[LspContext[S]] = LspContext + .implicits( + LspContext + .blank[S] + .copy(raw = + RawContext.blank.copy(parts = + Chain.fromSeq(config.constantsList).map(const => RawContext.blank -> const) + ) + ) + ) + .lspContextMonoid + + implicit val headerSemMonoid: Monoid[HeaderSem[S, LspContext[S]]] = + new Monoid[HeaderSem[S, LspContext[S]]] { + override def empty: HeaderSem[S, LspContext[S]] = HeaderSem(rc.empty, (c, _) => validNec(c)) + + override def combine( + a: HeaderSem[S, LspContext[S]], + b: HeaderSem[S, LspContext[S]] + ): HeaderSem[S, LspContext[S]] = { + HeaderSem( + a.initCtx |+| b.initCtx, + (c, i) => a.finInitCtx(c, i).andThen(b.finInitCtx(_, i)) + ) + } + } + + val semantics = new LspSemantics[S]() + + new AquaCompiler[F, E, I, S, LspContext[S]](new HeaderHandler[S, LspContext[S]](), semantics) + } + + def compileToLsp[F[_]: Monad, E, I: Order, S[_]: Comonad]( + sources: AquaSources[F, E, I], + parser: I => String => ValidatedNec[ParserError[S], Ast[S]], + config: AquaCompilerConf + ): F[Validated[NonEmptyChain[AquaError[I, E, S]], Map[I, Validated[NonEmptyChain[ + AquaError[I, E, S] + ], Map[I, LspContext[S]]]]]] = { + + val compiler = getLspAquaCompiler[F, E, I, S](config) + compiler + .compileRaw(sources, parser) + .map { v => + v.map { innerMap => + innerMap.view.mapValues { vCtx => + vCtx.map { + _.toSortedMap.toMap + } + }.toMap + } + } + } +} diff --git a/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala b/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala new file mode 100644 index 00000000..24197b8d --- /dev/null +++ b/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala @@ -0,0 +1,37 @@ +package aqua.lsp + +import aqua.parser.lexer.Token +import aqua.semantics.lsp.{TokenInfo, TokenType} +import aqua.semantics.rules.locations.{LocationsAlgebra, LocationsState} +import cats.data.State +import monocle.Lens + +class LocationsInterpreter[S[_], X](implicit + lens: Lens[X, LocationsState[S]] +) extends LocationsAlgebra[S, State[X, *]] { + + type SX[A] = State[X, A] + + private def getState = State.get.map(lens.get) + + private def modify(f: LocationsState[S] => LocationsState[S]): SX[Unit] = + State.modify(lens.modify(f)) + + override def addNameLocations(locs: List[(Token[S], TokenType[S])]): State[X, Unit] = + modify(st => st.copy(nameLocations = st.nameLocations ++ locs)) + + override def addNameLocation(token: Token[S], tokenType: TokenType[S]): State[X, Unit] = + modify(st => st.copy(nameLocations = st.nameLocations :+ (token, tokenType))) + + override def addTypeLocations(locs: List[(Token[S], TokenInfo[S])]): State[X, Unit] = + modify(st => st.copy(typeLocations = st.nameLocations ++ locs)) + + override def addTypeLocation(token: Token[S], tokenInfo: TokenInfo[S]): State[X, Unit] = + modify(st => st.copy(typeLocations = st.typeLocations :+ (token, tokenInfo))) + + override def addServiceLocations(locs: List[(Token[S], TokenInfo[S])]): State[X, Unit] = + modify(st => st.copy(serviceLocations = st.nameLocations ++ locs)) + + override def addServiceLocation(token: Token[S], tokenInfo: TokenInfo[S]): State[X, Unit] = + modify(st => st.copy(serviceLocations = st.serviceLocations :+ (token, tokenInfo))) +} diff --git a/semantics/src/main/scala/aqua/semantics/lsp/LspContext.scala b/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala similarity index 98% rename from semantics/src/main/scala/aqua/semantics/lsp/LspContext.scala rename to language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala index 6ed3615f..85553a80 100644 --- a/semantics/src/main/scala/aqua/semantics/lsp/LspContext.scala +++ b/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala @@ -1,9 +1,10 @@ -package aqua.semantics.lsp +package aqua.lsp import aqua.parser.lexer.{Ability, LiteralToken, Name, Token} import aqua.raw.{RawContext, RawPart} import aqua.types.ArrowType import cats.{Monoid, Semigroup} +import aqua.semantics.lsp.{TokenArrowInfo, TokenType, TokenInfo} import cats.syntax.monoid.* import RawContext.semiRC import aqua.semantics.header.{Picker, PickerOps} diff --git a/language-server/language-server-api/src/main/scala/aqua/lsp/LspSemantics.scala b/language-server/language-server-api/src/main/scala/aqua/lsp/LspSemantics.scala new file mode 100644 index 00000000..4c64f603 --- /dev/null +++ b/language-server/language-server-api/src/main/scala/aqua/lsp/LspSemantics.scala @@ -0,0 +1,78 @@ +package aqua.lsp + +import aqua.parser.Ast +import aqua.parser.head.{ImportExpr, ImportFromExpr} +import aqua.parser.lexer.LiteralToken +import aqua.semantics.rules.locations.LocationsState +import aqua.semantics.{CompilerState, SemanticError, Semantics} +import cats.data.Validated.{Invalid, Valid} +import cats.syntax.applicative.* +import cats.syntax.apply.* +import cats.syntax.flatMap.* +import cats.syntax.functor.* +import cats.syntax.foldable.* +import cats.syntax.reducible.* +import cats.data.{NonEmptyChain, ValidatedNec} +import monocle.Lens +import monocle.macros.GenLens + +class LspSemantics[S[_]] extends Semantics[S, LspContext[S]] { + + def getImportTokens(ast: Ast[S]): List[LiteralToken[S]] = { + ast.head.foldLeft[List[LiteralToken[S]]](Nil) { case (l, header) => + header match { + case ImportExpr(fn) => + println("import: " + fn) + l :+ fn + case ImportFromExpr(_, fn) => l :+ fn + case _ => l + } + } + } + + def process( + ast: Ast[S], + init: LspContext[S] + ): ValidatedNec[SemanticError[S], LspContext[S]] = { + + val rawState = CompilerState.init[S](init.raw) + val initState = rawState.copy( + names = rawState.names.copy( + rootArrows = rawState.names.rootArrows ++ init.rootArrows, + constants = rawState.names.constants ++ init.constants + ), + abilities = rawState.abilities.copy( + definitions = rawState.abilities.definitions ++ init.abDefinitions + ) + ) + + val importTokens = getImportTokens(ast) + + implicit val ls: Lens[CompilerState[S], LocationsState[S]] = + GenLens[CompilerState[S]](_.locations) + + implicit val locationsInterpreter: LocationsInterpreter[S, CompilerState[S]] = + new LocationsInterpreter[S, CompilerState[S]]() + + Semantics + .interpret(ast, initState, init.raw) + .map { case (state, ctx) => + NonEmptyChain + .fromChain(state.errors) + .fold[ValidatedNec[SemanticError[S], LspContext[S]]] { + Valid( + LspContext( + raw = ctx, + rootArrows = state.names.rootArrows, + constants = state.names.constants, + abDefinitions = state.abilities.definitions, + locations = state.locations.allLocations, + importTokens = importTokens + ) + ) + }(Invalid(_)) + } + // TODO: return as Eval + .value + } +} diff --git a/model/raw/src/main/scala/aqua/raw/ScopeRaw.scala b/model/raw/src/main/scala/aqua/raw/ScopeRaw.scala new file mode 100644 index 00000000..01128e22 --- /dev/null +++ b/model/raw/src/main/scala/aqua/raw/ScopeRaw.scala @@ -0,0 +1,19 @@ +package aqua.raw + +import aqua.raw.arrow.FuncRaw +import aqua.types.{ArrowType, StructType, Type} +import cats.data.NonEmptyMap +import aqua.raw.value.ValueRaw + +import scala.collection.immutable.SortedMap + +case class ScopeRaw( + name: String, + fieldsAndArrows: NonEmptyMap[String, Type] +) extends RawPart { + lazy val rawPartType: StructType = StructType(name, fieldsAndArrows) + + + override def rename(s: String): RawPart = copy(name = s) + +} diff --git a/parser/src/main/scala/aqua/parser/expr/ScopeExpr.scala b/parser/src/main/scala/aqua/parser/expr/ScopeExpr.scala new file mode 100644 index 00000000..070d6390 --- /dev/null +++ b/parser/src/main/scala/aqua/parser/expr/ScopeExpr.scala @@ -0,0 +1,25 @@ +package aqua.parser.expr + +import aqua.parser.Expr +import aqua.parser.lexer.Token.* +import aqua.parser.lexer.{Ability, Name, ValueToken} +import aqua.parser.lift.LiftParser +import cats.Comonad +import cats.parse.Parser +import cats.~> +import aqua.parser.lift.Span +import aqua.parser.lift.Span.{P0ToSpan, PToSpan} + +case class ScopeExpr[F[_]](name: Ability[F]) extends Expr[F](ScopeExpr, name) { + + override def mapK[K[_]: Comonad](fk: F ~> K): ScopeExpr[K] = + copy(name.mapK(fk)) +} + +object ScopeExpr extends Expr.AndIndented { + + override def validChildren: List[Expr.Lexem] = FieldTypeExpr :: ArrowTypeExpr :: Nil + + override val p: Parser[ScopeExpr[Span.S]] = + (`scope` *> ` ` *> Ability.ab).map(ScopeExpr(_)) +} diff --git a/parser/src/main/scala/aqua/parser/lexer/Token.scala b/parser/src/main/scala/aqua/parser/lexer/Token.scala index e58bd4cf..7476b501 100644 --- a/parser/src/main/scala/aqua/parser/lexer/Token.scala +++ b/parser/src/main/scala/aqua/parser/lexer/Token.scala @@ -49,6 +49,7 @@ object Token { val ` as ` : P[Unit] = `as`.surroundedBy(` `) val `alias`: P[Unit] = P.string("alias") val `service`: P[Unit] = P.string("service") + val `scope`: P[Unit] = P.string("scope") val `func`: P[Unit] = P.string("func") val `on`: P[Unit] = P.string("on") val `via`: P[Unit] = P.string("via") diff --git a/semantics/src/main/scala/aqua/semantics/CompilerState.scala b/semantics/src/main/scala/aqua/semantics/CompilerState.scala index 18d58276..90e17e36 100644 --- a/semantics/src/main/scala/aqua/semantics/CompilerState.scala +++ b/semantics/src/main/scala/aqua/semantics/CompilerState.scala @@ -5,6 +5,7 @@ import aqua.raw.Raw import aqua.raw.RawContext import aqua.semantics.lsp.{TokenInfo, TokenType} import aqua.semantics.rules.abilities.AbilitiesState +import aqua.semantics.rules.locations.LocationsState import aqua.semantics.rules.names.NamesState import aqua.semantics.rules.types.TypesState import cats.Semigroup @@ -16,9 +17,9 @@ case class CompilerState[S[_]]( errors: Chain[SemanticError[S]] = Chain.empty[SemanticError[S]], names: NamesState[S] = NamesState[S](), abilities: AbilitiesState[S] = AbilitiesState[S](), - types: TypesState[S] = TypesState[S]() + types: TypesState[S] = TypesState[S](), + locations: LocationsState[S] = LocationsState[S]() ) { - lazy val locations: List[(Token[S], TokenInfo[S])] = names.locations ++ abilities.locations ++ types.locations } object CompilerState { diff --git a/semantics/src/main/scala/aqua/semantics/ExprSem.scala b/semantics/src/main/scala/aqua/semantics/ExprSem.scala index 9d43136a..34cc5da6 100644 --- a/semantics/src/main/scala/aqua/semantics/ExprSem.scala +++ b/semantics/src/main/scala/aqua/semantics/ExprSem.scala @@ -47,6 +47,7 @@ object ExprSem { case expr: JoinExpr[S] => new JoinSem(expr).program[G] case expr: ReturnExpr[S] => new ReturnSem(expr).program[G] case expr: ServiceExpr[S] => new ServiceSem(expr).program[G] + case expr: ScopeExpr[S] => new ScopeSem(expr).program[G] case expr: RootExpr[S] => new RootSem(expr).program[G] } diff --git a/semantics/src/main/scala/aqua/semantics/Semantics.scala b/semantics/src/main/scala/aqua/semantics/Semantics.scala index 2bf2b285..ccfd8a0e 100644 --- a/semantics/src/main/scala/aqua/semantics/Semantics.scala +++ b/semantics/src/main/scala/aqua/semantics/Semantics.scala @@ -7,8 +7,9 @@ import aqua.raw.ops.{FuncOp, SeqGroupTag} import aqua.raw.{Raw, RawContext, RawPart} import aqua.semantics.header.Picker import aqua.semantics.header.Picker.* -import aqua.semantics.lsp.{LspContext, TokenDef, TokenInfo, TokenType} +import aqua.semantics.lsp.{TokenDef, TokenInfo, TokenType} import aqua.semantics.rules.abilities.{AbilitiesAlgebra, AbilitiesInterpreter, AbilitiesState} +import aqua.semantics.rules.locations.{DummyLocationsInterpreter, LocationsAlgebra, LocationsState} import aqua.semantics.rules.names.{NamesAlgebra, NamesInterpreter, NamesState} import aqua.semantics.rules.types.{TypesAlgebra, TypesInterpreter, TypesState} import aqua.semantics.rules.{ReportError, ValuesAlgebra} @@ -31,7 +32,7 @@ import monocle.macros.GenLens import scribe.{Logging, log} import cats.free.Cofree -sealed trait Semantics[S[_], C] { +trait Semantics[S[_], C] { def process( ast: Ast[S], @@ -44,7 +45,11 @@ class RawSemantics[S[_]](implicit p: Picker[RawContext]) extends Semantics[S, Ra def process( ast: Ast[S], init: RawContext - ): ValidatedNec[SemanticError[S], RawContext] = + ): ValidatedNec[SemanticError[S], RawContext] = { + + implicit val locationsInterpreter: DummyLocationsInterpreter[S, CompilerState[S]] = + new DummyLocationsInterpreter[S, CompilerState[S]]() + Semantics .interpret(ast, CompilerState.init(init), init) .map { case (state, ctx) => @@ -56,61 +61,6 @@ class RawSemantics[S[_]](implicit p: Picker[RawContext]) extends Semantics[S, Ra } // TODO: return as Eval .value -} - -class LspSemantics[S[_]] extends Semantics[S, LspContext[S]] { - - def getImportTokens(ast: Ast[S]): List[LiteralToken[S]] = { - ast.head.foldLeft[List[LiteralToken[S]]](Nil){ case (l, header) => - header match { - case ImportExpr(fn) => - println("import: " + fn) - l :+ fn - case ImportFromExpr(_, fn) => l :+ fn - case _ => l - } - } - } - - def process( - ast: Ast[S], - init: LspContext[S] - ): ValidatedNec[SemanticError[S], LspContext[S]] = { - - val rawState = CompilerState.init[S](init.raw) - val initState = rawState.copy( - names = rawState.names.copy( - rootArrows = rawState.names.rootArrows ++ init.rootArrows, - constants = rawState.names.constants ++ init.constants - ), - abilities = rawState.abilities.copy( - definitions = rawState.abilities.definitions ++ init.abDefinitions - ) - ) - - val importTokens = getImportTokens(ast) - - - Semantics - .interpret(ast, initState, init.raw) - .map { case (state, ctx) => - NonEmptyChain - .fromChain(state.errors) - .fold[ValidatedNec[SemanticError[S], LspContext[S]]] { - Valid( - LspContext( - raw = ctx, - rootArrows = state.names.rootArrows, - constants = state.names.constants, - abDefinitions = state.abilities.definitions, - locations = state.locations, - importTokens = importTokens - ) - ) - }(Invalid(_)) - } - // TODO: return as Eval - .value } } @@ -143,7 +93,7 @@ object Semantics extends Logging { type Interpreter[S[_], A] = State[CompilerState[S], A] - def transpile[S[_]](ast: Ast[S]): Interpreter[S, Raw] = { + def transpile[S[_]](ast: Ast[S])(implicit locations: LocationsAlgebra[S, Interpreter[S, *]]): Interpreter[S, Raw] = { import monocle.syntax.all.* implicit val re: ReportError[S, CompilerState[S]] = @@ -166,7 +116,7 @@ object Semantics extends Logging { ast.cata(folder[S, Interpreter[S, *]]).value } - private def astToState[S[_]](ast: Ast[S]): Interpreter[S, Raw] = + private def astToState[S[_]](ast: Ast[S])(implicit locations: LocationsAlgebra[S, Interpreter[S, *]]): Interpreter[S, Raw] = transpile[S](ast) // If there are any errors, they're inside CompilerState[S] @@ -174,7 +124,7 @@ object Semantics extends Logging { ast: Ast[S], initState: CompilerState[S], init: RawContext - ): Eval[(CompilerState[S], RawContext)] = + )(implicit locations: LocationsAlgebra[S, Interpreter[S, *]]): Eval[(CompilerState[S], RawContext)] = astToState[S](ast) .run(initState) .map { diff --git a/semantics/src/main/scala/aqua/semantics/expr/ScopeSem.scala b/semantics/src/main/scala/aqua/semantics/expr/ScopeSem.scala new file mode 100644 index 00000000..d6fdc17d --- /dev/null +++ b/semantics/src/main/scala/aqua/semantics/expr/ScopeSem.scala @@ -0,0 +1,32 @@ +package aqua.semantics.expr + +import aqua.parser.expr.ScopeExpr +import aqua.parser.lexer.{CustomTypeToken, Name} +import aqua.raw.{Raw, ServiceRaw} +import aqua.semantics.Prog +import aqua.semantics.rules.ValuesAlgebra +import aqua.semantics.rules.abilities.AbilitiesAlgebra +import aqua.semantics.rules.names.NamesAlgebra +import aqua.semantics.rules.types.TypesAlgebra +import aqua.types.{ArrowType, Type} +import aqua.raw.ScopeRaw +import cats.syntax.apply.* +import cats.syntax.flatMap.* +import cats.syntax.functor.* +import cats.syntax.applicative.* +import cats.Monad +import cats.data.NonEmptyList + +class ScopeSem[S[_]](val expr: ScopeExpr[S]) extends AnyVal { + + def program[Alg[_]: Monad](implicit + A: AbilitiesAlgebra[S, Alg], + N: NamesAlgebra[S, Alg], + T: TypesAlgebra[S, Alg], + V: ValuesAlgebra[S, Alg] + ): Prog[Alg, Raw] = + Prog.around( + A.beginScope(expr.name), + (_: Unit, _: Raw) => + Raw.error("Undefined").pure[Alg]) +} diff --git a/semantics/src/main/scala/aqua/semantics/header/Picker.scala b/semantics/src/main/scala/aqua/semantics/header/Picker.scala index d3f36d3f..b2ebdf6b 100644 --- a/semantics/src/main/scala/aqua/semantics/header/Picker.scala +++ b/semantics/src/main/scala/aqua/semantics/header/Picker.scala @@ -2,7 +2,7 @@ package aqua.semantics.header import aqua.raw.{RawContext, RawPart} import aqua.semantics.CompilerState -import aqua.semantics.lsp.{LspContext, TokenArrowInfo, TokenTypeInfo} +import aqua.semantics.lsp.{TokenArrowInfo, TokenTypeInfo} import aqua.semantics.rules.abilities.AbilitiesState import aqua.semantics.rules.names.NamesState import aqua.semantics.rules.types.TypesState diff --git a/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesInterpreter.scala index da2ce3d9..321e8754 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesInterpreter.scala @@ -6,6 +6,7 @@ import aqua.raw.RawContext import aqua.raw.value.ValueRaw import aqua.semantics.lsp.{TokenArrowInfo, TokenDef, TokenTypeInfo} import aqua.semantics.Levenshtein +import aqua.semantics.rules.locations.LocationsAlgebra import aqua.semantics.rules.{abilities, ReportError, StackInterpreter} import aqua.types.ArrowType import cats.data.{NonEmptyList, NonEmptyMap, State} @@ -16,7 +17,8 @@ import monocle.macros.GenLens class AbilitiesInterpreter[S[_], X](implicit lens: Lens[X, AbilitiesState[S]], - error: ReportError[S, X] + error: ReportError[S, X], + locations: LocationsAlgebra[S, State[X, *]] ) extends AbilitiesAlgebra[S, State[X, *]] { type SX[A] = State[X, A] @@ -78,17 +80,13 @@ class AbilitiesInterpreter[S[_], X](implicit getState.flatMap { st => st.definitions.get(name.value) match { case Some((ab, arrows)) => - modify(st => - st.copy(locations = - st.locations ++ ( - (name, TokenDef(Some(ab))) :: ( - arrow, - TokenDef( - arrows.find(_._1.value == arrow.value).map(_._1) - ) - ) :: Nil + locations.addServiceLocations( + (name, TokenDef(Some(ab))) :: ( + arrow, + TokenDef( + arrows.find(_._1.value == arrow.value).map(_._1) ) - ) + ) :: Nil ) case None => State.pure(()) @@ -127,8 +125,7 @@ class AbilitiesInterpreter[S[_], X](implicit ) { fn => // TODO: add name and arrow separately // TODO: find tokens somewhere -// addServiceArrowLocation(name, arrow).as(Some(fn.arrow.`type`)) - State.pure(Some(fn.arrow.`type`)) + addServiceArrowLocation(name, arrow).as(Some(fn.arrow.`type`)) } case None => report(name, "Ability with this name is undefined").as(Option.empty[ArrowType]) diff --git a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala new file mode 100644 index 00000000..59acb76a --- /dev/null +++ b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala @@ -0,0 +1,14 @@ +package aqua.semantics.rules.locations +import aqua.parser.lexer.Token +import aqua.semantics.lsp.{TokenInfo, TokenType} + +trait LocationsAlgebra[S[_], Alg[_]] { + def addNameLocations(locs: List[(Token[S], TokenType[S])]): Alg[Unit] + def addNameLocation(token: Token[S], tokenType: TokenType[S]): Alg[Unit] + + def addTypeLocations(locs: List[(Token[S], TokenInfo[S])]): Alg[Unit] + def addTypeLocation(token: Token[S], tokenInfo: TokenInfo[S]): Alg[Unit] + + def addServiceLocations(locs: List[(Token[S], TokenInfo[S])]): Alg[Unit] + def addServiceLocation(token: Token[S], tokenInfo: TokenInfo[S]): Alg[Unit] +} diff --git a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsInterpreter.scala new file mode 100644 index 00000000..2f61adf9 --- /dev/null +++ b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsInterpreter.scala @@ -0,0 +1,24 @@ +package aqua.semantics.rules.locations + +import aqua.parser.lexer.Token +import aqua.semantics.lsp.{TokenInfo, TokenType} +import aqua.semantics.rules.StackInterpreter +import aqua.semantics.rules.types.TypesState +import monocle.Lens +import monocle.macros.GenLens +import cats.data.{NonEmptyList, NonEmptyMap, State} + +class DummyLocationsInterpreter[S[_], X] extends LocationsAlgebra[S, State[X, *]] { + + override def addNameLocations(locs: List[(Token[S], TokenType[S])]): State[X, Unit] = State.pure(()) + + override def addNameLocation(token: Token[S], tokenType: TokenType[S]): State[X, Unit] = State.pure(()) + + override def addTypeLocations(locs: List[(Token[S], TokenInfo[S])]): State[X, Unit] = State.pure(()) + + override def addTypeLocation(token: Token[S], tokenInfo: TokenInfo[S]): State[X, Unit] = State.pure(()) + + override def addServiceLocations(locs: List[(Token[S], TokenInfo[S])]): State[X, Unit] = State.pure(()) + + override def addServiceLocation(token: Token[S], tokenInfo: TokenInfo[S]): State[X, Unit] = State.pure(()) +} diff --git a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala new file mode 100644 index 00000000..bf30c075 --- /dev/null +++ b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala @@ -0,0 +1,14 @@ +package aqua.semantics.rules.locations + +import aqua.parser.lexer.Token +import aqua.semantics.lsp.{TokenInfo, TokenType} + +case class LocationsState[S[_]]( + nameLocations: List[(Token[S], TokenType[S])] = Nil, + typeLocations: List[(Token[S], TokenInfo[S])] = Nil, + serviceLocations: List[(Token[S], TokenInfo[S])] = Nil +) { + + lazy val allLocations: List[(Token[S], TokenInfo[S])] = + nameLocations ++ typeLocations ++ serviceLocations +} diff --git a/semantics/src/main/scala/aqua/semantics/rules/names/NamesInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/names/NamesInterpreter.scala index f67307dd..a569784e 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/names/NamesInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/names/NamesInterpreter.scala @@ -3,6 +3,7 @@ package aqua.semantics.rules.names import aqua.parser.lexer.{Name, Token} import aqua.semantics.lsp.{TokenArrowInfo, TokenType, TokenTypeInfo} import aqua.semantics.Levenshtein +import aqua.semantics.rules.locations.LocationsAlgebra import aqua.semantics.rules.{ReportError, StackInterpreter} import aqua.types.{ArrowType, StreamType, Type} import cats.data.{OptionT, State} @@ -12,8 +13,11 @@ import cats.~> import monocle.Lens import monocle.macros.GenLens -class NamesInterpreter[S[_], X](implicit lens: Lens[X, NamesState[S]], error: ReportError[S, X]) - extends NamesAlgebra[S, State[X, *]] { +class NamesInterpreter[S[_], X](implicit + lens: Lens[X, NamesState[S]], + error: ReportError[S, X], + locations: LocationsAlgebra[S, State[X, *]] +) extends NamesAlgebra[S, State[X, *]] { val stackInt = new StackInterpreter[S, X, NamesState[S], NamesState.Frame[S]]( GenLens[NamesState[S]](_.stack) @@ -49,7 +53,7 @@ class NamesInterpreter[S[_], X](implicit lens: Lens[X, NamesState[S]], error: Re ) ) case Some(tokenInfo) => - modify(st => st.copy(locations = st.locations :+ (name, tokenInfo))) + locations.addNameLocation(name, tokenInfo) case _ => State.pure(()) } .map(_.map(_.tokenType)) @@ -63,12 +67,12 @@ class NamesInterpreter[S[_], X](implicit lens: Lens[X, NamesState[S]], error: Re def readArrow(name: Name[S]): SX[Option[ArrowType]] = readArrowHelper(name.value).flatMap { case Some(g) => - modify(st => st.copy(locations = st.locations :+ (name, g))).map(_ => Option(g.tokenType)) + locations.addNameLocation(name, g).map(_ => Option(g.tokenType)) case None => // check if we have arrow in variable readName(name.value).flatMap { - case Some(tt@TokenTypeInfo(_, at@ArrowType(_, _))) => - modify(st => st.copy(locations = st.locations :+ (name, tt))).map(_ => Option(at)) + case Some(tt @ TokenTypeInfo(_, at @ ArrowType(_, _))) => + locations.addNameLocation(name, tt).map(_ => Option(at)) case _ => getState.flatMap(st => report( diff --git a/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala index c9462638..ddc213f4 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala @@ -3,6 +3,7 @@ package aqua.semantics.rules.types import aqua.parser.lexer.* import aqua.raw.value.{FunctorRaw, IntoCopyRaw, IntoFieldRaw, IntoIndexRaw, PropertyRaw, ValueRaw} import aqua.semantics.lsp.{TokenDef, TokenTypeInfo} +import aqua.semantics.rules.locations.LocationsAlgebra import aqua.semantics.rules.{ReportError, StackInterpreter} import aqua.types.{ ArrayType, @@ -30,8 +31,11 @@ import monocle.macros.GenLens import scala.collection.immutable.SortedMap -class TypesInterpreter[S[_], X](implicit lens: Lens[X, TypesState[S]], error: ReportError[S, X]) - extends TypesAlgebra[S, State[X, *]] { +class TypesInterpreter[S[_], X](implicit + lens: Lens[X, TypesState[S]], + error: ReportError[S, X], + locations: LocationsAlgebra[S, State[X, *]] +) extends TypesAlgebra[S, State[X, *]] { val stack = new StackInterpreter[S, X, TypesState[S], TypesState.Frame[S]]( GenLens[TypesState[S]](_.stack) @@ -41,27 +45,33 @@ class TypesInterpreter[S[_], X](implicit lens: Lens[X, TypesState[S]], error: Re type ST[A] = State[X, A] + val resolver: (TypesState[S], CustomTypeToken[S]) => Option[ + (Type, List[(Token[S], CustomTypeToken[S])]) + ] = { (state, ctt) => + state.strict.get(ctt.value).map(t => (t, state.definitions.get(ctt.value).toList.map(ctt -> _))) + } + override def resolveType(token: TypeToken[S]): State[X, Option[Type]] = - getState.map(_.resolveTypeToken(token)).flatMap { + getState.map(st => TypesStateHelper.resolveTypeToken(token, st, resolver)).flatMap { case Some(t) => val (tt, tokens) = t - modify(st => - st.copy(locations = st.locations ++ tokens.map { case (t, td) => + locations + .addTypeLocations(tokens.map { case (t, td) => (t, TokenDef(Some(td))) }) - ).map(_ => Some(tt)) + .map(_ => Some(tt)) case None => report(token, s"Unresolved type").as(None) } override def resolveArrowDef(arrowDef: ArrowTypeToken[S]): State[X, Option[ArrowType]] = - getState.map(_.resolveArrowDef(arrowDef)).flatMap { + getState.map(st => TypesStateHelper.resolveArrowDef(arrowDef, st, resolver)).flatMap { case Valid(t) => val (tt, tokens) = t - modify(st => - st.copy(locations = st.locations ++ tokens.map { case (t, td) => + locations + .addTypeLocations(tokens.map { case (t, td) => (t, TokenDef(Some(td))) }) - ).map(_ => Some(tt)) + .map(_ => Some(tt)) case Invalid(errs) => errs .foldLeft[ST[Option[ArrowType]]](State.pure(None)) { case (n, (tkn, hint)) => @@ -137,13 +147,12 @@ class TypesInterpreter[S[_], X](implicit lens: Lens[X, TypesState[S]], error: Re s"Field `${op.value}` not found in type `$name`, available: ${fields.toNel.toList.map(_._1).mkString(", ")}" ).as(None) ) { t => - modify { st => - st.fieldsToken.get(name + "." + op.value) match { - case Some(td) => st.copy(locations = st.locations :+ (op, td)) - case None => st + getState.flatMap { st => + (st.fieldsToken.get(name + "." + op.value) match { + case Some(td) => locations.addTypeLocation(op, td).map(_ => st) + case None => State.pure(st) + }).as(Some(IntoFieldRaw(op.value, t))) } - - }.as(Some(IntoFieldRaw(op.value, t))) } case t => t.properties diff --git a/semantics/src/main/scala/aqua/semantics/rules/types/TypesState.scala b/semantics/src/main/scala/aqua/semantics/rules/types/TypesState.scala index c1413222..dd3c82ca 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/types/TypesState.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/types/TypesState.scala @@ -39,40 +39,52 @@ case class TypesState[S[_]]( strict: Map[String, Type] = Map.empty[String, Type], definitions: Map[String, CustomTypeToken[S]] = Map.empty[String, CustomTypeToken[S]], fieldsToken: Map[String, TokenTypeInfo[S]] = Map.empty[String, TokenTypeInfo[S]], - stack: List[TypesState.Frame[S]] = Nil, - locations: List[(Token[S], TokenInfo[S])] = Nil + stack: List[TypesState.Frame[S]] = Nil ) { def isDefined(t: String): Boolean = strict.contains(t) +} + +object TypesStateHelper { // TODO: an ugly return type, refactoring // Returns type and a token with its definition - def resolveTypeToken(tt: TypeToken[S]): Option[(Type, List[(Token[S], CustomTypeToken[S])])] = + def resolveTypeToken[S[_]]( + tt: TypeToken[S], + state: TypesState[S], + resolver: ( + TypesState[S], + CustomTypeToken[S] + ) => Option[(Type, List[(Token[S], CustomTypeToken[S])])] + ): Option[(Type, List[(Token[S], CustomTypeToken[S])])] = tt match { case TopBottomToken(_, isTop) => Option((if (isTop) TopType else BottomType, Nil)) case ArrayTypeToken(_, dtt) => - resolveTypeToken(dtt).collect { case (it: DataType, t) => + resolveTypeToken(dtt, state, resolver).collect { case (it: DataType, t) => (ArrayType(it), t) } case StreamTypeToken(_, dtt) => - resolveTypeToken(dtt).collect { case (it: DataType, t) => + resolveTypeToken(dtt, state, resolver).collect { case (it: DataType, t) => (StreamType(it), t) } case OptionTypeToken(_, dtt) => - resolveTypeToken(dtt).collect { case (it: DataType, t) => + resolveTypeToken(dtt, state, resolver).collect { case (it: DataType, t) => (OptionType(it), t) } case ctt: CustomTypeToken[S] => - strict.get(ctt.value).map(t => (t, definitions.get(ctt.value).toList.map(ctt -> _))) + resolver(state, ctt) + // strict.get(ctt.value).map(t => (t, definitions.get(ctt.value).toList.map(ctt -> _))) case btt: BasicTypeToken[S] => Some((btt.value, Nil)) case ArrowTypeToken(_, args, res) => val strictArgs = - args.map(_._2).map(resolveTypeToken).collect { case Some((dt: DataType, t)) => + args.map(_._2).map(resolveTypeToken(_, state, resolver)).collect { + case Some((dt: DataType, t)) => + (dt, t) + } + val strictRes = + res.map(resolveTypeToken(_, state, resolver)).collect { case Some((dt: DataType, t)) => (dt, t) } - val strictRes = res.map(resolveTypeToken).collect { case Some((dt: DataType, t)) => - (dt, t) - } Option.when(strictRes.length == res.length && strictArgs.length == args.length) { val (sArgs, argTokens) = strictArgs.unzip val (sRes, resTokens) = strictRes.unzip @@ -80,18 +92,23 @@ case class TypesState[S[_]]( } } - def resolveArrowDef( - ad: ArrowTypeToken[S] + def resolveArrowDef[S[_]]( + arrowTypeToken: ArrowTypeToken[S], + state: TypesState[S], + resolver: ( + TypesState[S], + CustomTypeToken[S] + ) => Option[(Type, List[(Token[S], CustomTypeToken[S])])] ): ValidatedNec[(Token[S], String), (ArrowType, List[(Token[S], CustomTypeToken[S])])] = { - val resType = ad.res.map(resolveTypeToken) + val resType = arrowTypeToken.res.map(resolveTypeToken(_, state, resolver)) NonEmptyChain - .fromChain(Chain.fromSeq(ad.res.zip(resType).collect { case (dt, None) => + .fromChain(Chain.fromSeq(arrowTypeToken.res.zip(resType).collect { case (dt, None) => dt -> "Cannot resolve the result type" })) .fold[ValidatedNec[(Token[S], String), (ArrowType, List[(Token[S], CustomTypeToken[S])])]] { - val (errs, argTypes) = ad.args.map { (argName, tt) => - resolveTypeToken(tt) + val (errs, argTypes) = arrowTypeToken.args.map { (argName, tt) => + resolveTypeToken(tt, state, resolver) .toRight(tt -> s"Type unresolved") .map(argName.map(_.value) -> _) } @@ -124,7 +141,7 @@ case class TypesState[S[_]]( ProductType.maybeLabelled(labels.zip(types.map(_._1))), ProductType(resTypes) ), - types.map(_._2).flatten ++ resTokens.flatten + types.flatMap(_._2) ++ resTokens.flatten ) } )(Invalid(_)) diff --git a/semantics/src/test/scala/aqua/semantics/Utils.scala b/semantics/src/test/scala/aqua/semantics/Utils.scala index eeb93862..de855237 100644 --- a/semantics/src/test/scala/aqua/semantics/Utils.scala +++ b/semantics/src/test/scala/aqua/semantics/Utils.scala @@ -7,11 +7,12 @@ import aqua.raw.{Raw, RawContext} import aqua.semantics.expr.func.ClosureSem import aqua.semantics.rules.ReportError import aqua.semantics.rules.abilities.{AbilitiesInterpreter, AbilitiesState} +import aqua.semantics.rules.locations.DummyLocationsInterpreter import aqua.semantics.rules.names.{NamesInterpreter, NamesState} import aqua.semantics.rules.types.{TypesInterpreter, TypesState} import aqua.types.* import cats.data.State -import cats.{~>, Id} +import cats.{Id, ~>} import monocle.Lens import monocle.macros.GenLens import monocle.syntax.all.* @@ -22,6 +23,9 @@ object Utils { (st: CompilerState[Id], token: Token[Id], hints: List[String]) => st.focus(_.errors).modify(_.append(RulesViolated(token, hints))) + implicit val locationsInterpreter: DummyLocationsInterpreter[Id, CompilerState[Id]] = + new DummyLocationsInterpreter[Id, CompilerState[Id]]() + implicit val ns: Lens[CompilerState[Id], NamesState[Id]] = GenLens[CompilerState[Id]](_.names) implicit val as: Lens[CompilerState[Id], AbilitiesState[Id]] =