From 74d02e1f63766ffc0f849f3d6e262030bc0deab8 Mon Sep 17 00:00:00 2001 From: Dima Date: Thu, 7 Dec 2023 19:06:51 +0700 Subject: [PATCH] feat(language-server): Pass token types to LSP [LNG-285] (#999) --- .github/workflows/snapshot.yml | 4 +- .../main/scala/aqua/run/FuncCompiler.scala | 22 +- aqua-src/antithesis.aqua | 23 +- build.sbt | 2 +- integration-tests/package.json | 2 +- .../.js/src/main/scala/aqua/lsp/AquaLSP.scala | 192 +----------- .../src/main/scala/aqua/lsp/OutputTypes.scala | 86 ++++++ .../main/scala/aqua/lsp/ResultHelper.scala | 135 +++++++++ .../src/main/scala/aqua/lsp/LSPCompiler.scala | 9 +- .../scala/aqua/lsp/LocationsInterpreter.scala | 63 ++-- .../src/main/scala/aqua/lsp/LspContext.scala | 39 ++- .../main/scala/aqua/lsp/LspSemantics.scala | 5 +- .../src/test/scala/aqua/lsp/AquaLSPSpec.scala | 281 ++++++++++++++++++ .../language-server-npm/aqua-lsp-api.d.ts | 8 +- .../scala/aqua/tree/TreeNodeCompanion.scala | 8 +- parser/src/main/scala/aqua/parser/Ast.scala | 2 +- .../scala/aqua/parser/lexer/PropertyOp.scala | 2 +- .../main/scala/aqua/parser/lexer/Token.scala | 2 - .../scala/aqua/parser/lift/FileSpan.scala | 9 + .../src/main/scala/aqua/semantics/Prog.scala | 8 - .../aqua/semantics/expr/AbilitySem.scala | 2 +- .../aqua/semantics/expr/DataStructSem.scala | 2 +- .../aqua/semantics/expr/ServiceSem.scala | 2 +- .../aqua/semantics/expr/func/ArrowSem.scala | 3 - .../aqua/semantics/expr/func/CatchSem.scala | 2 - .../expr/func/DeclareStreamSem.scala | 5 - .../expr/func/ElseOtherwiseSem.scala | 1 - .../aqua/semantics/expr/func/IfSem.scala | 1 - .../aqua/semantics/expr/func/TrySem.scala | 1 - .../aqua/semantics/rules/ValuesAlgebra.scala | 2 - .../abilities/AbilitiesInterpreter.scala | 14 +- .../definitions/DefinitionsAlgebra.scala | 2 +- .../definitions/DefinitionsInterpreter.scala | 26 +- .../locations/DummyLocationsInterpreter.scala | 18 +- .../rules/locations/LocationsAlgebra.scala | 21 +- .../rules/locations/LocationsState.scala | 55 +++- .../rules/locations/VariableInfo.scala | 11 + .../rules/names/NamesInterpreter.scala | 17 +- .../rules/types/TypesInterpreter.scala | 24 +- types/src/main/scala/aqua/types/Type.scala | 51 +++- .../main/scala/aqua/helpers/syntax/list.scala | 19 ++ .../aqua/{ => helpers}/syntax/optiont.scala | 0 .../scala/aqua/{ => helpers}/tree/Tree.scala | 10 +- 43 files changed, 771 insertions(+), 420 deletions(-) create mode 100644 language-server/language-server-api/.js/src/main/scala/aqua/lsp/OutputTypes.scala create mode 100644 language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala create mode 100644 language-server/language-server-api/src/test/scala/aqua/lsp/AquaLSPSpec.scala create mode 100644 semantics/src/main/scala/aqua/semantics/rules/locations/VariableInfo.scala create mode 100644 utils/helpers/src/main/scala/aqua/helpers/syntax/list.scala rename utils/helpers/src/main/scala/aqua/{ => helpers}/syntax/optiont.scala (100%) rename utils/helpers/src/main/scala/aqua/{ => helpers}/tree/Tree.scala (79%) diff --git a/.github/workflows/snapshot.yml b/.github/workflows/snapshot.yml index c1653c37..7148b443 100644 --- a/.github/workflows/snapshot.yml +++ b/.github/workflows/snapshot.yml @@ -78,12 +78,12 @@ jobs: registry-url: "https://npm.fluence.dev" cache: "pnpm" - - run: pnpm -r i + - run: pnpm --filter='!integration-tests' -r i - name: Set package version run: node ci.cjs bump-version ${{ steps.version.outputs.id }} - - run: pnpm -r build + - run: pnpm --filter='!integration-tests' -r build - name: Publish snapshot id: snapshot diff --git a/aqua-run/src/main/scala/aqua/run/FuncCompiler.scala b/aqua-run/src/main/scala/aqua/run/FuncCompiler.scala index 134e6e46..16717549 100644 --- a/aqua-run/src/main/scala/aqua/run/FuncCompiler.scala +++ b/aqua-run/src/main/scala/aqua/run/FuncCompiler.scala @@ -1,34 +1,24 @@ package aqua.run -import aqua.Rendering.given -import aqua.compiler.{AquaCompiler, AquaCompilerConf, CompileResult, CompilerAPI} +import aqua.compiler.{AquaCompilerConf, CompileResult, CompilerAPI} import aqua.files.{AquaFileSources, FileModuleId} -import aqua.{AquaIO, SpanParser} -import aqua.io.{AquaFileError, AquaPath, PackagePath, Prelude} +import aqua.io.{AquaFileError, AquaPath, PackagePath} import aqua.model.transform.TransformConfig import aqua.model.{AquaContext, FuncArrow} import aqua.parser.lift.FileSpan -import aqua.run.CliFunc +import aqua.{AquaIO, SpanParser} -import cats.data.Validated.{invalidNec, validNec} -import cats.data.{Chain, NonEmptyList, Validated, ValidatedNec} -import cats.effect.IO +import cats.data.{Chain, ValidatedNec} import cats.effect.kernel.{Async, Clock} import cats.syntax.applicative.* +import cats.syntax.apply.* import cats.syntax.flatMap.* import cats.syntax.functor.* -import cats.syntax.monad.* -import cats.syntax.show.* -import cats.syntax.traverse.* import cats.syntax.option.* -import cats.syntax.either.* -import cats.syntax.validated.* -import cats.syntax.apply.* +import cats.syntax.traverse.* import fs2.io.file.{Files, Path} import scribe.Logging -import scala.concurrent.duration.Duration - class FuncCompiler[F[_]: Files: AquaIO: Async]( input: Option[AquaPath], imports: List[Path], diff --git a/aqua-src/antithesis.aqua b/aqua-src/antithesis.aqua index e453fa5b..04fd9406 100644 --- a/aqua-src/antithesis.aqua +++ b/aqua-src/antithesis.aqua @@ -1,20 +1,3 @@ -aqua StreamArgs - -export lng280BugWithForEmptyStreamFunc - -service StreamService("test-service"): - store(numbers: []u32, n: u32) - -func callService(stream: *u32, n: u32): - stream <<- 1 - StreamService.store(stream, n) - -func returnEmptyStream() -> *u32: - <- *[] - -func lng280BugWithForEmptyStreamFunc(): - arr = [1,2,3,4,5] - for a <- arr: - str <- returnEmptyStream() - -- passing the function directly won't work, see LNG-290 - callService(str, a) \ No newline at end of file +func arr() -> string: + n = "str" + <- n \ No newline at end of file diff --git a/build.sbt b/build.sbt index 964f26f0..cae19333 100644 --- a/build.sbt +++ b/build.sbt @@ -80,7 +80,7 @@ lazy val `language-server-api` = crossProject(JSPlatform, JVMPlatform) lazy val `language-server-apiJS` = `language-server-api`.js .settings( scalaJSLinkerConfig ~= (_.withModuleKind(ModuleKind.CommonJSModule)), - scalaJSUseMainModuleInitializer := true + scalaJSUseMainModuleInitializer := false ) .settings(addBundleJS("../../language-server-npm/aqua-lsp-api.js")) .enablePlugins(ScalaJSPlugin) diff --git a/integration-tests/package.json b/integration-tests/package.json index cbb6536c..a02ae6d3 100644 --- a/integration-tests/package.json +++ b/integration-tests/package.json @@ -22,7 +22,7 @@ "pubsub": "node -r ts-node/register src/pubsub.ts", "exec": "npm run compile-aqua && npm run prettify-compiled && node -r ts-node/register src/index.ts", "run": "node -r ts-node/register src/index.ts", - "compile-aqua": "ts-node ./src/compile.ts", + "compile-aqua": "node --loader ts-node/esm ./src/compile.ts", "compile-aqua:air": "aqua -i ./aqua/ -o ./compiled-air -a", "prettify-compiled": "prettier --write src/compiled", "prettify": "prettier --write src", diff --git a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/AquaLSP.scala b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/AquaLSP.scala index 0e8aa6bb..6acefc54 100644 --- a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/AquaLSP.scala +++ b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/AquaLSP.scala @@ -1,16 +1,12 @@ package aqua.lsp import aqua.compiler.* -import aqua.compiler.AquaError.{ParserError as AquaParserError, *} -import aqua.compiler.AquaWarning.* +import aqua.compiler.AquaError.SourcesError import aqua.files.{AquaFileSources, AquaFilesIO, FileModuleId} import aqua.io.* -import aqua.parser.lexer.{LiteralToken, Token} +import aqua.parser.lift.FileSpan import aqua.parser.lift.FileSpan.F -import aqua.parser.lift.{FileSpan, Span} -import aqua.parser.{ArrowReturnError, BlockIndentError, LexerError, ParserError} import aqua.raw.ConstantRaw -import aqua.semantics.* import aqua.{AquaIO, SpanParser} import cats.data.Validated @@ -23,148 +19,12 @@ import scala.concurrent.ExecutionContext.Implicits.global import scala.scalajs.js import scala.scalajs.js.JSConverters.* import scala.scalajs.js.annotation.* -import scala.scalajs.js.{UndefOr, undefined} import scribe.Logging -@JSExportAll -case class CompilationResult( - errors: js.Array[ErrorInfo], - warnings: js.Array[WarningInfo] = js.Array(), - locations: js.Array[TokenLink] = js.Array(), - importLocations: js.Array[TokenImport] = js.Array() -) - -@JSExportAll -case class TokenLocation(name: String, startLine: Int, startCol: Int, endLine: Int, endCol: Int) - -@JSExportAll -case class TokenLink(current: TokenLocation, definition: TokenLocation) - -@JSExportAll -case class TokenImport(current: TokenLocation, path: String) - -object TokenLocation { - - def fromSpan(span: FileSpan): Option[TokenLocation] = { - val start = span.locationMap.value.toLineCol(span.span.startIndex) - val end = span.locationMap.value.toLineCol(span.span.endIndex) - - for { - startLC <- start - endLC <- end - } yield { - TokenLocation(span.name, startLC._1, startLC._2, endLC._1, endLC._2) - } - - } -} - -@JSExportAll -case class ErrorInfo( - start: Int, - end: Int, - message: String, - location: UndefOr[String] -) { - // Used to distinguish from WarningInfo in TS - val infoType: String = "error" -} - -object ErrorInfo { - - def apply(fileSpan: FileSpan, message: String): ErrorInfo = { - val start = fileSpan.span.startIndex - val end = fileSpan.span.endIndex - ErrorInfo(start, end, message, fileSpan.name) - } - - def applyOp(start: Int, end: Int, message: String, location: Option[String]): ErrorInfo = { - ErrorInfo(start, end, message, location.getOrElse(undefined)) - } -} - -@JSExportAll -case class WarningInfo( - start: Int, - end: Int, - message: String, - location: UndefOr[String] -) { - // Used to distinguish from ErrorInfo in TS - val infoType: String = "warning" -} - -object WarningInfo { - - def apply(fileSpan: FileSpan, message: String): WarningInfo = { - val start = fileSpan.span.startIndex - val end = fileSpan.span.endIndex - WarningInfo(start, end, message, fileSpan.name) - } -} - @JSExportTopLevel("AquaLSP") -object AquaLSP extends App with Logging { +object AquaLSP extends Logging { - private def errorToInfo( - error: AquaError[FileModuleId, AquaFileError, FileSpan.F] - ): List[ErrorInfo] = error match { - case AquaParserError(err) => - err match { - case BlockIndentError(indent, message) => - ErrorInfo(indent._1, message) :: Nil - case ArrowReturnError(point, message) => - ErrorInfo(point._1, message) :: Nil - case LexerError((span, e)) => - e.expected.toList - .groupBy(_.offset) - .map { case (offset, exps) => - val localSpan = Span(offset, offset + 1) - val fSpan = FileSpan(span.name, span.locationMap, localSpan) - val errorMessages = exps.flatMap(exp => ParserError.expectationToString(exp)) - val msg = s"${errorMessages.head}" :: errorMessages.tail.map(t => "OR " + t) - (offset, ErrorInfo(fSpan, msg.mkString("\n"))) - } - .toList - .sortBy(_._1) - .map(_._2) - .reverse - } - case SourcesError(err) => - ErrorInfo.applyOp(0, 0, err.showForConsole, None) :: Nil - case ResolveImportsError(_, token, err) => - ErrorInfo(token.unit._1, err.showForConsole) :: Nil - case ImportError(token) => - ErrorInfo(token.unit._1, "Cannot resolve import") :: Nil - case CycleError(modules) => - ErrorInfo.applyOp( - 0, - 0, - s"Cycle loops detected in imports: ${modules.map(_.file.fileName)}", - None - ) :: Nil - case CompileError(err) => - err match { - case RulesViolated(token, messages) => - ErrorInfo(token.unit._1, messages.mkString("\n")) :: Nil - case HeaderError(token, message) => - ErrorInfo(token.unit._1, message) :: Nil - case WrongAST(ast) => - ErrorInfo.applyOp(0, 0, "Semantic error: wrong AST", None) :: Nil - - } - case OutputError(_, err) => - ErrorInfo.applyOp(0, 0, err.showForConsole, None) :: Nil - case AirValidationError(errors) => - errors.toChain.toList.map(ErrorInfo.applyOp(0, 0, _, None)) - } - - private def warningToInfo( - warning: AquaWarning[FileSpan.F] - ): List[WarningInfo] = warning match { - case CompileWarning(SemanticWarning(token, messages)) => - WarningInfo(token.unit._1, messages.mkString("\n")) :: Nil - } + import ResultHelper.* @JSExport def compile( @@ -195,54 +55,14 @@ object AquaLSP extends App with Logging { logger.debug("Compilation done.") - def locationsToJs( - locations: List[(Token[FileSpan.F], Token[FileSpan.F])] - ): js.Array[TokenLink] = { - locations.flatMap { case (from, to) => - val fromOp = TokenLocation.fromSpan(from.unit._1) - val toOp = TokenLocation.fromSpan(to.unit._1) - - val link = for { - from <- fromOp - to <- toOp - } yield TokenLink(from, to) - - if (link.isEmpty) - logger.warn(s"Incorrect coordinates for token '${from.unit._1.name}'") - - link.toList - }.toJSArray - } - - def importsToTokenImport(imports: List[LiteralToken[FileSpan.F]]): js.Array[TokenImport] = - imports.flatMap { lt => - val (span, str) = lt.valueToken - val unquoted = str.substring(1, str.length - 1) - TokenLocation.fromSpan(span).map(l => TokenImport(l, unquoted)) - }.toJSArray - - val result = fileRes match { + fileRes match { case Valid(lsp) => - val errors = lsp.errors.map(CompileError.apply).flatMap(errorToInfo) - val warnings = lsp.warnings.map(CompileWarning.apply).flatMap(warningToInfo) - errors match - case Nil => - logger.debug("No errors on compilation.") - case errs => - logger.debug("Errors: " + errs.mkString("\n")) - - CompilationResult( - errors.toJSArray, - warnings.toJSArray, - locationsToJs(lsp.locations), - importsToTokenImport(lsp.importTokens) - ) + lspToCompilationResult(lsp) case Invalid(e) => val errors = e.toChain.toList.flatMap(errorToInfo) logger.debug("Errors: " + errors.mkString("\n")) CompilationResult(errors.toJSArray) } - result } proc.unsafeToFuture().toJSPromise diff --git a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/OutputTypes.scala b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/OutputTypes.scala new file mode 100644 index 00000000..63c6ed12 --- /dev/null +++ b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/OutputTypes.scala @@ -0,0 +1,86 @@ +package aqua.lsp + +import aqua.parser.lift.FileSpan + +import scala.scalajs.js +import scala.scalajs.js.annotation.JSExportAll +import scala.scalajs.js.{UndefOr, undefined} + +@JSExportAll +case class CompilationResult( + errors: js.Array[ErrorInfo], + warnings: js.Array[WarningInfo] = js.Array(), + locations: js.Array[TokenLink] = js.Array(), + importLocations: js.Array[TokenImport] = js.Array(), + tokens: js.Array[ExprInfoJs] = js.Array() +) + +@JSExportAll +case class ExprInfoJs(location: TokenLocation, `type`: String) + +@JSExportAll +case class TokenLocation(name: String, startLine: Int, startCol: Int, endLine: Int, endCol: Int) + +@JSExportAll +case class TokenLink(current: TokenLocation, definition: TokenLocation) + +@JSExportAll +case class TokenImport(current: TokenLocation, path: String) + +object TokenLocation { + + def fromSpan(span: FileSpan): Option[TokenLocation] = { + val start = span.locationMap.value.toLineCol(span.span.startIndex) + val end = span.locationMap.value.toLineCol(span.span.endIndex) + + for { + startLC <- start + endLC <- end + } yield TokenLocation(span.name, startLC._1, startLC._2, endLC._1, endLC._2) + + } +} + +@JSExportAll +case class ErrorInfo( + start: Int, + end: Int, + message: String, + location: UndefOr[String] +) { + // Used to distinguish from WarningInfo in TS + val infoType: String = "error" +} + +object ErrorInfo { + + def apply(fileSpan: FileSpan, message: String): ErrorInfo = { + val start = fileSpan.span.startIndex + val end = fileSpan.span.endIndex + ErrorInfo(start, end, message, fileSpan.name) + } + + def applyOp(start: Int, end: Int, message: String, location: Option[String]): ErrorInfo = { + ErrorInfo(start, end, message, location.getOrElse(undefined)) + } +} + +@JSExportAll +case class WarningInfo( + start: Int, + end: Int, + message: String, + location: UndefOr[String] +) { + // Used to distinguish from ErrorInfo in TS + val infoType: String = "warning" +} + +object WarningInfo { + + def apply(fileSpan: FileSpan, message: String): WarningInfo = { + val start = fileSpan.span.startIndex + val end = fileSpan.span.endIndex + WarningInfo(start, end, message, fileSpan.name) + } +} diff --git a/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala new file mode 100644 index 00000000..17c51b20 --- /dev/null +++ b/language-server/language-server-api/.js/src/main/scala/aqua/lsp/ResultHelper.scala @@ -0,0 +1,135 @@ +package aqua.lsp + +import aqua.compiler.AquaError.{ParserError as AquaParserError, *} +import aqua.compiler.AquaWarning.CompileWarning +import aqua.compiler.{AquaError, AquaWarning} +import aqua.files.FileModuleId +import aqua.io.AquaFileError +import aqua.lsp.AquaLSP.logger +import aqua.parser.lexer.LiteralToken +import aqua.parser.lift.{FileSpan, Span} +import aqua.parser.{ArrowReturnError, BlockIndentError, LexerError, ParserError} +import aqua.semantics.rules.locations.{DefinitionInfo, TokenLocation as TokenLoc} +import aqua.semantics.{HeaderError, RulesViolated, SemanticWarning, WrongAST} + +import cats.syntax.show.* +import scala.scalajs.js +import scala.scalajs.js.JSConverters.* +import scribe.Logging + +object ResultHelper extends Logging { + + import aqua.types.Type.given + + def warningToInfo( + warning: AquaWarning[FileSpan.F] + ): List[WarningInfo] = warning match { + case CompileWarning(SemanticWarning(token, messages)) => + WarningInfo(token.unit._1, messages.mkString("\n")) :: Nil + } + + def errorToInfo( + error: AquaError[FileModuleId, AquaFileError, FileSpan.F] + ): List[ErrorInfo] = error match { + case AquaParserError(err) => + err match { + case BlockIndentError(indent, message) => + ErrorInfo(indent._1, message) :: Nil + case ArrowReturnError(point, message) => + ErrorInfo(point._1, message) :: Nil + case LexerError((span, e)) => + e.expected.toList + .groupBy(_.offset) + .map { case (offset, exps) => + val localSpan = Span(offset, offset + 1) + val fSpan = FileSpan(span.name, span.locationMap, localSpan) + val errorMessages = exps.flatMap(exp => ParserError.expectationToString(exp)) + val msg = s"${errorMessages.head}" :: errorMessages.tail.map(t => "OR " + t) + (offset, ErrorInfo(fSpan, msg.mkString("\n"))) + } + .toList + .sortBy(_._1) + .map(_._2) + .reverse + } + case SourcesError(err) => + ErrorInfo.applyOp(0, 0, err.showForConsole, None) :: Nil + case ResolveImportsError(_, token, err) => + ErrorInfo(token.unit._1, err.showForConsole) :: Nil + case ImportError(token) => + ErrorInfo(token.unit._1, "Cannot resolve import") :: Nil + case CycleError(modules) => + ErrorInfo.applyOp( + 0, + 0, + s"Cycle loops detected in imports: ${modules.map(_.file.fileName)}", + None + ) :: Nil + case CompileError(err) => + err match { + case RulesViolated(token, messages) => + ErrorInfo(token.unit._1, messages.mkString("\n")) :: Nil + case HeaderError(token, message) => + ErrorInfo(token.unit._1, message) :: Nil + case WrongAST(ast) => + ErrorInfo.applyOp(0, 0, "Semantic error: wrong AST", None) :: Nil + + } + case OutputError(_, err) => + ErrorInfo.applyOp(0, 0, err.showForConsole, None) :: Nil + case AirValidationError(errors) => + errors.toChain.toList.map(ErrorInfo.applyOp(0, 0, _, None)) + } + + private def tokensToJs(tokens: List[DefinitionInfo[FileSpan.F]]): js.Array[ExprInfoJs] = + tokens.flatMap { ti => + TokenLocation.fromSpan(ti.token.unit._1).map { tl => + val typeName = ti.`type`.show + ExprInfoJs(tl, typeName) + } + }.toJSArray + + private def locationsToJs( + locations: List[TokenLoc[FileSpan.F]] + ): js.Array[TokenLink] = + locations.flatMap { case TokenLoc(from, to) => + val fromOp = TokenLocation.fromSpan(from.unit._1) + val toOp = TokenLocation.fromSpan(to.unit._1) + + val link = for { + from <- fromOp + to <- toOp + } yield TokenLink(from, to) + + if (link.isEmpty) + logger.warn(s"Incorrect coordinates for token '${from.unit._1.name}'") + + link.toList + }.toJSArray + + private def importsToTokenImport(imports: List[LiteralToken[FileSpan.F]]): js.Array[TokenImport] = + imports.flatMap { lt => + val (span, str) = lt.valueToken + val unquoted = str.substring(1, str.length - 1) + TokenLocation.fromSpan(span).map(l => TokenImport(l, unquoted)) + }.toJSArray + + def lspToCompilationResult(lsp: LspContext[FileSpan.F]): CompilationResult = { + val errors = lsp.errors.map(CompileError.apply).flatMap(errorToInfo) + val warnings = lsp.warnings.map(CompileWarning.apply).flatMap(warningToInfo) + + errors match + case Nil => + logger.debug("No errors on compilation.") + case errs => + logger.debug("Errors: " + errs.mkString("\n")) + + CompilationResult( + errors.toJSArray, + warnings.toJSArray, + locationsToJs(lsp.variables.flatMap(v => v.allLocations)), + importsToTokenImport(lsp.importTokens), + tokensToJs(lsp.variables.map(_.definition)) + ) + } +} diff --git a/language-server/language-server-api/src/main/scala/aqua/lsp/LSPCompiler.scala b/language-server/language-server-api/src/main/scala/aqua/lsp/LSPCompiler.scala index f0eb0dc8..00a6659e 100644 --- a/language-server/language-server-api/src/main/scala/aqua/lsp/LSPCompiler.scala +++ b/language-server/language-server-api/src/main/scala/aqua/lsp/LSPCompiler.scala @@ -6,15 +6,12 @@ import aqua.raw.RawContext import aqua.semantics.header.{HeaderHandler, HeaderSem} import cats.data.Validated.validNec -import cats.syntax.semigroup.* -import cats.syntax.applicative.* -import cats.syntax.flatMap.* +import cats.data.{Chain, Validated, ValidatedNec} +import cats.syntax.either.* import cats.syntax.functor.* import cats.syntax.monoid.* -import cats.syntax.traverse.* -import cats.syntax.either.* +import cats.syntax.semigroup.* import cats.{Comonad, Monad, Monoid, Order} -import cats.data.{Chain, NonEmptyChain, Validated, ValidatedNec} object LSPCompiler { diff --git a/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala b/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala index 342b202b..084d55c7 100644 --- a/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala +++ b/language-server/language-server-api/src/main/scala/aqua/lsp/LocationsInterpreter.scala @@ -1,12 +1,10 @@ package aqua.lsp import aqua.parser.lexer.Token -import aqua.semantics.rules.StackInterpreter -import aqua.semantics.rules.locations.{LocationsAlgebra, LocationsState} - +import aqua.semantics.rules.locations.{DefinitionInfo, LocationsAlgebra, LocationsState} +import aqua.types.AbilityType import cats.data.State import monocle.Lens -import monocle.macros.GenLens import scribe.Logging class LocationsInterpreter[S[_], X](using @@ -15,30 +13,25 @@ class LocationsInterpreter[S[_], X](using type SX[A] = State[X, A] - val stack = new StackInterpreter[S, X, LocationsState[S], LocationsState[S]]( - GenLens[LocationsState[S]](_.stack) - ) - - import stack.* - - override def addToken(name: String, token: Token[S]): State[X, Unit] = modify { st => - st.copy(tokens = st.tokens.updated(name, token)) + override def addDefinition(definition: DefinitionInfo[S]): State[X, Unit] = modify { st => + st.addDefinition(definition) } - private def combineFieldName(name: String, field: String): String = name + "." + field - - override def addTokenWithFields( - name: String, - token: Token[S], - fields: List[(String, Token[S])] - ): State[X, Unit] = modify { st => - st.copy(tokens = - st.tokens ++ ((name, token) +: fields.map(kv => (combineFieldName(name, kv._1), kv._2))).toMap - ) + override def addDefinitionWithFields( + definition: DefinitionInfo[S], + fields: List[DefinitionInfo[S]] + ): State[X, Unit] = { + val allTokens = + definition +: fields.map { fieldDef => + fieldDef.copy(name = AbilityType.fullName(definition.name, fieldDef.name)) + } + modify { st => + st.addDefinitions(allTokens) + } } def pointFieldLocation(typeName: String, fieldName: String, token: Token[S]): State[X, Unit] = - pointLocation(combineFieldName(typeName, fieldName), token) + pointLocation(AbilityType.fullName(typeName, fieldName), token) def pointTokenWithFieldLocation( typeName: String, @@ -48,37 +41,21 @@ class LocationsInterpreter[S[_], X](using ): State[X, Unit] = { for { _ <- pointLocation(typeName, typeToken) - _ <- pointLocation(combineFieldName(typeName, fieldName), token) + _ <- pointLocation(AbilityType.fullName(typeName, fieldName), token) } yield {} } override def pointLocation(name: String, token: Token[S]): State[X, Unit] = { modify { st => - val newLoc: Option[Token[S]] = st.stack.collectFirst { - case frame if frame.tokens.contains(name) => frame.tokens(name) - } orElse st.tokens.get(name) - st.copy(locations = st.locations ++ newLoc.map(token -> _).toList) + st.addLocation(name, token) } } - def pointLocations(locations: List[(String, Token[S])]): State[X, Unit] = { + def pointLocations(locations: List[(String, Token[S])]): State[X, Unit] = modify { st => - - val newLocs = locations.flatMap { case (name, token) => - (st.stack.collectFirst { - case frame if frame.tokens.contains(name) => frame.tokens(name) - } orElse st.tokens.get(name)).map(token -> _) - } - - st.copy(locations = st.locations ++ newLocs) + st.addLocations(locations) } - } private def modify(f: LocationsState[S] => LocationsState[S]): SX[Unit] = State.modify(lens.modify(f)) - - override def beginScope(): SX[Unit] = - stack.beginScope(LocationsState[S]()) - - override def endScope(): SX[Unit] = stack.endScope } diff --git a/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala b/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala index aa2ddb55..4115d103 100644 --- a/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala +++ b/language-server/language-server-api/src/main/scala/aqua/lsp/LspContext.scala @@ -2,10 +2,10 @@ package aqua.lsp import aqua.parser.lexer.{LiteralToken, NamedTypeToken, Token} import aqua.raw.{RawContext, RawPart} -import aqua.semantics.{SemanticError, SemanticWarning} import aqua.semantics.header.Picker -import aqua.types.{ArrowType, Type} - +import aqua.semantics.rules.locations.{TokenLocation, VariableInfo} +import aqua.semantics.{SemanticError, SemanticWarning} +import aqua.types.{AbilityType, ArrowType, Type} import cats.syntax.monoid.* import cats.{Monoid, Semigroup} @@ -15,12 +15,13 @@ case class LspContext[S[_]]( abDefinitions: Map[String, NamedTypeToken[S]] = Map.empty[String, NamedTypeToken[S]], rootArrows: Map[String, ArrowType] = Map.empty[String, ArrowType], constants: Map[String, Type] = Map.empty[String, Type], - tokens: Map[String, Token[S]] = Map.empty[String, Token[S]], - locations: List[(Token[S], Token[S])] = Nil, + variables: List[VariableInfo[S]] = Nil, importTokens: List[LiteralToken[S]] = Nil, errors: List[SemanticError[S]] = Nil, warnings: List[SemanticWarning[S]] = Nil -) +) { + lazy val allLocations: List[TokenLocation[S]] = variables.flatMap(_.allLocations) +} object LspContext { @@ -33,8 +34,8 @@ object LspContext { abDefinitions = x.abDefinitions ++ y.abDefinitions, rootArrows = x.rootArrows ++ y.rootArrows, constants = x.constants ++ y.constants, - locations = x.locations ++ y.locations, - tokens = x.tokens ++ y.tokens, + importTokens = x.importTokens ++ y.importTokens, + variables = x.variables ++ y.variables, errors = x.errors ++ y.errors, warnings = x.warnings ++ y.warnings ) @@ -87,10 +88,13 @@ object LspContext { override def declares(ctx: LspContext[S]): Set[String] = ctx.raw.declares override def setAbility(ctx: LspContext[S], name: String, ctxAb: LspContext[S]): LspContext[S] = - val prefix = name + "." ctx.copy( raw = ctx.raw.setAbility(name, ctxAb.raw), - tokens = ctx.tokens ++ ctxAb.tokens.map(kv => (prefix + kv._1) -> kv._2) + variables = ctx.variables ++ ctxAb.variables.map(v => + v.copy(definition = + v.definition.copy(name = AbilityType.fullName(name, v.definition.name)) + ) + ) ) override def setModule( @@ -113,13 +117,16 @@ object LspContext { declared: Boolean ): Option[LspContext[S]] = // rename tokens from one context with prefix addition - val newTokens = rename.map { renameStr => - ctx.tokens.map { - case (tokenName, token) if tokenName.startsWith(name) => - tokenName.replaceFirst(name, renameStr) -> token + val newVariables = rename.map { renameStr => + ctx.variables.map { + case v if v.definition.name.startsWith(name) => + v.copy(definition = + v.definition.copy(name = v.definition.name.replaceFirst(v.definition.name, renameStr)) + ) + case kv => kv } - }.getOrElse(ctx.tokens) + }.getOrElse(ctx.variables) ctx.raw .pick(name, rename, declared) @@ -132,7 +139,7 @@ object LspContext { ctx.rootArrows.get(name).fold(Map.empty)(t => Map(rename.getOrElse(name) -> t)), constants = ctx.constants.get(name).fold(Map.empty)(t => Map(rename.getOrElse(name) -> t)), - tokens = newTokens + variables = newVariables ) ) diff --git a/language-server/language-server-api/src/main/scala/aqua/lsp/LspSemantics.scala b/language-server/language-server-api/src/main/scala/aqua/lsp/LspSemantics.scala index b412e798..b8370023 100644 --- a/language-server/language-server-api/src/main/scala/aqua/lsp/LspSemantics.scala +++ b/language-server/language-server-api/src/main/scala/aqua/lsp/LspSemantics.scala @@ -49,7 +49,7 @@ class LspSemantics[S[_]] extends Semantics[S, LspContext[S]] { definitions = rawState.abilities.definitions ++ init.abDefinitions ), locations = rawState.locations.copy( - tokens = rawState.locations.tokens ++ init.tokens + variables = rawState.locations.variables ++ init.variables ) ) @@ -69,9 +69,8 @@ class LspSemantics[S[_]] extends Semantics[S, LspContext[S]] { rootArrows = state.names.rootArrows, constants = state.names.constants, abDefinitions = state.abilities.definitions, - locations = state.locations.allLocations, importTokens = importTokens, - tokens = state.locations.tokens, + variables = state.locations.variables, errors = state.errors.toList, warnings = state.warnings.toList ).pure[Result] diff --git a/language-server/language-server-api/src/test/scala/aqua/lsp/AquaLSPSpec.scala b/language-server/language-server-api/src/test/scala/aqua/lsp/AquaLSPSpec.scala new file mode 100644 index 00000000..cbb0d075 --- /dev/null +++ b/language-server/language-server-api/src/test/scala/aqua/lsp/AquaLSPSpec.scala @@ -0,0 +1,281 @@ +package aqua.lsp + +import aqua.compiler.{AquaCompilerConf, AquaError, AquaSources} +import aqua.parser.Parser +import aqua.parser.lift.Span +import aqua.parser.lift.Span.S +import aqua.raw.ConstantRaw +import aqua.semantics.rules.locations.{DefinitionInfo, TokenLocation, VariableInfo} +import aqua.types.* + +import cats.Id +import cats.data.* +import cats.instances.string.* +import org.scalatest.Inside +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +class AquaLSPSpec extends AnyFlatSpec with Matchers with Inside { + + private def getByPosition(code: String, str: String, position: Int): Option[(Int, Int)] = { + str.r.findAllMatchIn(code).toList.lift(position).map(r => (r.start, r.end)) + } + + extension (c: LspContext[Span.S]) { + + def checkLocations( + name: String, + defPosition: Int, + usePosition: Int, + defCode: String, + useCode: Option[String] = None, + fieldName: Option[String] = None + ): Boolean = { + (for { + defPos <- getByPosition(defCode, name, defPosition) + usePos <- getByPosition(useCode.getOrElse(defCode), fieldName.getOrElse(name), usePosition) + } yield { + val (defStart, defEnd) = defPos + val (useStart, useEnd) = usePos + c.allLocations.exists { case TokenLocation(useT, defT) => + val defSpan = defT.unit._1 + val useSpan = useT.unit._1 + defSpan.startIndex == defStart && defSpan.endIndex == defEnd && useSpan.startIndex == useStart && useSpan.endIndex == useEnd + } + }).getOrElse(false) + } + + def locationsToString(): List[String] = + c.allLocations.map { case TokenLocation(l, r) => + val lSpan = l.unit._1 + val rSpan = r.unit._1 + s"($l($lSpan):$r($rSpan))" + } + + def checkTokenLoc( + code: String, + checkName: String, + position: Int, + `type`: Type, + // if name is combined + fullName: Option[String] = None, + printFiltered: Boolean = false + ): Boolean = { + + getByPosition(code, checkName, position).exists { case (start, end) => + val res = c.variables.exists { case VariableInfo(definition, _) => + val span = definition.token.unit._1 + definition.name == fullName.getOrElse( + checkName + ) && span.startIndex == start && span.endIndex == end && definition.`type` == `type` + } + + if (printFiltered) + println( + c.variables + .map(_.definition) + .filter(v => v.name == fullName.getOrElse(checkName) && v.`type` == `type`) + .map { case DefinitionInfo(name, token, t) => + val span = token.unit._1 + s"$name(${span.startIndex}:${span.endIndex}) $t" + } + ) + + res + } + + } + } + + private def aquaSource(src: Map[String, String], imports: Map[String, String]) = { + new AquaSources[Id, String, String] { + + override def sources: Id[ValidatedNec[String, Chain[(String, String)]]] = + Validated.validNec(Chain.fromSeq(src.toSeq)) + + override def resolveImport(from: String, imp: String): Id[ValidatedNec[String, String]] = + Validated.validNec(imp) + + override def load(file: String): Id[ValidatedNec[String, String]] = + Validated.fromEither( + (imports ++ src) + .get(file) + .toRight(NonEmptyChain.one(s"Cannot load imported file $file")) + ) + } + } + + def compile( + src: Map[String, String], + imports: Map[String, String] = Map.empty + ): ValidatedNec[AquaError[String, String, S], Map[String, LspContext[S]]] = { + LSPCompiler + .compileToLsp[Id, String, String, Span.S]( + aquaSource(src, imports), + id => txt => Parser.parse(Parser.parserSchema)(txt), + AquaCompilerConf(ConstantRaw.defaultConstants(None)) + ) + .leftMap { errors => + println(errors) + errors + } + } + + it should "return right tokens" in { + val main = + """module Import + |import foo, strFunc, num from "export2.aqua" + | + |import "../gen/OneMore.aqua" + | + |func foo_wrapper() -> string: + | fooResult <- foo() + | if 1 == 1: + | someVar = "aaa" + | strFunc(someVar) + | else: + | someVar = 123 + | num(someVar) + | OneMore fooResult + | OneMore.more_call() + | + |ability Ab: + | someField: u32 + | + |data Str: + | someField: string + | + |func useAbAndStruct{Ab}(): + | s = Str(someField = "asd") + | strFunc(s.someField) + | num(Ab.someField) + | + |""".stripMargin + val src = Map( + "index.aqua" -> main + ) + + val firstImport = + """module Export declares strFunc, num, foo + | + |func absb() -> string: + | <- "ff" + | + |func strFunc(someVar: string) -> string: + | <- someVar + | + |func num(someVar: u32) -> u32: + | <- someVar + | + |func foo() -> string: + | <- "I am MyFooBar foo" + | + |""".stripMargin + + val secondImport = + """ + |service OneMore: + | more_call() + | consume(s: string) + |""".stripMargin + + val imports = Map( + "export2.aqua" -> + firstImport, + "../gen/OneMore.aqua" -> + secondImport + ) + + val res = compile(src, imports).toOption.get.values.head + + val serviceType = ServiceType( + "OneMore", + NonEmptyMap.of( + ("more_call", ArrowType(NilType, NilType)), + ("consume", ArrowType(ProductType.labelled(("s", ScalarType.string) :: Nil), NilType)) + ) + ) + + // inside `foo_wrapper` func + res.checkTokenLoc(main, "fooResult", 0, ScalarType.string) shouldBe true + res.checkLocations("fooResult", 0, 1, main) shouldBe true + + res.checkTokenLoc(main, "someVar", 0, LiteralType.string, None, true) shouldBe true + res.checkLocations("someVar", 0, 1, main) shouldBe true + res.checkTokenLoc(main, "someVar", 2, LiteralType.unsigned) shouldBe true + res.checkLocations("someVar", 2, 3, main) shouldBe true + + // num usage + res.checkLocations("num", 1, 1, firstImport, Some(main)) shouldBe true + // strFunc usage + res.checkLocations("strFunc", 1, 1, firstImport, Some(main)) shouldBe true + res.checkLocations("strFunc", 1, 2, firstImport, Some(main)) shouldBe true + + // Str.field + res.checkTokenLoc(main, "someField", 1, ScalarType.string, Some("Str.someField")) shouldBe true + res.checkLocations("someField", 1, 3, main, None) shouldBe true + + // Ab.field + res.checkTokenLoc( + main, + "someField", + 0, + ScalarType.u32, + Some("Ab.someField"), + true + ) shouldBe true + + // this is tokens from imports, if we will use `FileSpan.F` file names will be different + // OneMore service + res.checkTokenLoc(secondImport, "OneMore", 0, serviceType) shouldBe true + res.checkTokenLoc( + secondImport, + "more_call", + 0, + ArrowType(NilType, NilType), + Some("OneMore.more_call"), + true + ) shouldBe true + res.checkTokenLoc( + secondImport, + "consume", + 0, + ArrowType(ProductType.labelled(("s", ScalarType.string) :: Nil), NilType), + Some("OneMore.consume") + ) shouldBe true + + // strFunc function and argument + res.checkTokenLoc( + firstImport, + "strFunc", + 1, + ArrowType( + ProductType.labelled(("someVar", ScalarType.string) :: Nil), + ProductType(ScalarType.string :: Nil) + ), + None, + true + ) shouldBe true + res.checkTokenLoc(firstImport, "someVar", 0, ScalarType.string) shouldBe true + + // num function and argument + res.checkTokenLoc( + firstImport, + "num", + 1, + ArrowType( + ProductType.labelled(("someVar", ScalarType.u32) :: Nil), + ProductType(ScalarType.u32 :: Nil) + ) + ) shouldBe true + res.checkTokenLoc(firstImport, "someVar", 2, ScalarType.u32, None, true) shouldBe true + + // foo function + res.checkTokenLoc( + firstImport, + "foo", + 1, + ArrowType(NilType, ProductType(ScalarType.string :: Nil)) + ) shouldBe true + } +} diff --git a/language-server/language-server-npm/aqua-lsp-api.d.ts b/language-server/language-server-npm/aqua-lsp-api.d.ts index c59ec827..bf705eee 100644 --- a/language-server/language-server-npm/aqua-lsp-api.d.ts +++ b/language-server/language-server-npm/aqua-lsp-api.d.ts @@ -6,6 +6,11 @@ export interface TokenLocation { endCol: number } +export interface TokenInfo { + location: TokenLocation, + type: string +} + export interface TokenLink { current: TokenLocation, definition: TokenLocation @@ -36,7 +41,8 @@ export interface CompilationResult { errors: ErrorInfo[], warnings: WarningInfo[], locations: TokenLink[], - importLocations: TokenImport[] + importLocations: TokenImport[], + tokens: TokenInfo[] } export class Compiler { diff --git a/model/tree/src/main/scala/aqua/tree/TreeNodeCompanion.scala b/model/tree/src/main/scala/aqua/tree/TreeNodeCompanion.scala index 241c926a..7bc42214 100644 --- a/model/tree/src/main/scala/aqua/tree/TreeNodeCompanion.scala +++ b/model/tree/src/main/scala/aqua/tree/TreeNodeCompanion.scala @@ -1,16 +1,14 @@ package aqua.tree +import aqua.helpers.tree.Tree + import cats.Show import cats.data.Chain import cats.free.Cofree - -import cats.syntax.show.* import cats.syntax.apply.* - +import cats.syntax.show.* import scala.annotation.tailrec -import aqua.helpers.Tree - trait TreeNodeCompanion[T <: TreeNode[T]] { given showTreeLabel: Show[T] diff --git a/parser/src/main/scala/aqua/parser/Ast.scala b/parser/src/main/scala/aqua/parser/Ast.scala index e392c793..37deead0 100644 --- a/parser/src/main/scala/aqua/parser/Ast.scala +++ b/parser/src/main/scala/aqua/parser/Ast.scala @@ -1,10 +1,10 @@ package aqua.parser +import aqua.helpers.tree.Tree import aqua.parser.expr.* import aqua.parser.head.{HeadExpr, HeaderExpr} import aqua.parser.lift.{LiftParser, Span} import aqua.parser.lift.LiftParser.* -import aqua.helpers.Tree import cats.data.{Chain, Validated, ValidatedNec} import cats.syntax.flatMap.* diff --git a/parser/src/main/scala/aqua/parser/lexer/PropertyOp.scala b/parser/src/main/scala/aqua/parser/lexer/PropertyOp.scala index 1e63f266..162b6158 100644 --- a/parser/src/main/scala/aqua/parser/lexer/PropertyOp.scala +++ b/parser/src/main/scala/aqua/parser/lexer/PropertyOp.scala @@ -61,7 +61,7 @@ case class IntoCopy[F[_]: Comonad]( object PropertyOp { private val parseField: P[PropertyOp[Span.S]] = - (`.` *> anyName).lift.map(IntoField(_)) + `.` *> anyName.lift.map(IntoField(_)) val parseArrow: P[PropertyOp[Span.S]] = (`.` *> CallArrowToken.callBraces).map { case CallBraces(name, abilities, args) => diff --git a/parser/src/main/scala/aqua/parser/lexer/Token.scala b/parser/src/main/scala/aqua/parser/lexer/Token.scala index 82f1f841..318e35bf 100644 --- a/parser/src/main/scala/aqua/parser/lexer/Token.scala +++ b/parser/src/main/scala/aqua/parser/lexer/Token.scala @@ -1,7 +1,5 @@ package aqua.parser.lexer -import aqua.parser.lift.Span.S - import cats.data.NonEmptyList import cats.parse.{Accumulator0, Parser as P, Parser0 as P0} import cats.syntax.functor.* diff --git a/parser/src/main/scala/aqua/parser/lift/FileSpan.scala b/parser/src/main/scala/aqua/parser/lift/FileSpan.scala index e1a2b0c8..abd10324 100644 --- a/parser/src/main/scala/aqua/parser/lift/FileSpan.scala +++ b/parser/src/main/scala/aqua/parser/lift/FileSpan.scala @@ -17,6 +17,15 @@ case class FileSpan(name: String, locationMap: Eval[LocationMap], span: Span) { */ def focus(ctx: Int): Option[FileSpan.Focus] = span.focus(locationMap.value, ctx).map(FileSpan.Focus(name, locationMap, ctx, _)) + + override def hashCode(): Int = (name, span).hashCode() + + override def equals(obj: Any): Boolean = { + obj match { + case FileSpan(n, _, s) => n == name && s == span + case _ => false + } + } } object FileSpan { diff --git a/semantics/src/main/scala/aqua/semantics/Prog.scala b/semantics/src/main/scala/aqua/semantics/Prog.scala index aadddaf4..813a000b 100644 --- a/semantics/src/main/scala/aqua/semantics/Prog.scala +++ b/semantics/src/main/scala/aqua/semantics/Prog.scala @@ -38,14 +38,6 @@ sealed abstract class Prog[Alg[_]: Monad, A] extends (Alg[A] => Alg[A]) { (_: Unit, m: A) => N.endScope() as m ) ) - - def locationsScope[S[_]]()(implicit L: LocationsAlgebra[S, Alg]): Prog[Alg, A] = - wrap( - RunAround( - L.beginScope(), - (_: Unit, m: A) => L.endScope() as m - ) - ) } case class RunAfter[Alg[_]: Monad, A](prog: Alg[A]) extends Prog[Alg, A] { diff --git a/semantics/src/main/scala/aqua/semantics/expr/AbilitySem.scala b/semantics/src/main/scala/aqua/semantics/expr/AbilitySem.scala index e0798cfd..a47f8b1f 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/AbilitySem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/AbilitySem.scala @@ -28,7 +28,7 @@ class AbilitySem[S[_]](val expr: AbilityExpr[S]) extends AnyVal { ): Prog[Alg, Raw] = { Prog.after_( for { - defs <- D.purgeDefs(expr.name) + defs <- D.purgeDefs() fields = defs.view.mapValues(d => d.name -> d.`type`).toMap abilityType <- T.defineAbilityType(expr.name, fields) result = abilityType.map(st => TypeRaw(expr.name.value, st)) diff --git a/semantics/src/main/scala/aqua/semantics/expr/DataStructSem.scala b/semantics/src/main/scala/aqua/semantics/expr/DataStructSem.scala index 6d5cb54e..53563165 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/DataStructSem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/DataStructSem.scala @@ -22,7 +22,7 @@ class DataStructSem[S[_]](val expr: DataStructExpr[S]) extends AnyVal { ): Prog[Alg, Raw] = Prog.after((_: Raw) => for { - defs <- D.purgeDefs(expr.name) + defs <- D.purgeDefs() fields = defs.view.mapValues(d => d.name -> d.`type`).toMap structType <- T.defineStructType(expr.name, fields) result = structType.map(st => TypeRaw(expr.name.value, st)) diff --git a/semantics/src/main/scala/aqua/semantics/expr/ServiceSem.scala b/semantics/src/main/scala/aqua/semantics/expr/ServiceSem.scala index 0d8162ff..febb0d38 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/ServiceSem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/ServiceSem.scala @@ -45,7 +45,7 @@ class ServiceSem[S[_]](val expr: ServiceExpr[S]) extends AnyVal { ) ) serviceType <- EitherT.fromOptionF( - T.defineServiceType(expr.name, arrowsByName.toSortedMap.toMap), + T.defineServiceType(expr.name, arrowsByName.toSortedMap), Raw.error("Failed to define service type") ) arrowsDefs = arrows.map { case (name, _) => name.value -> name }.toNem diff --git a/semantics/src/main/scala/aqua/semantics/expr/func/ArrowSem.scala b/semantics/src/main/scala/aqua/semantics/expr/func/ArrowSem.scala index ac8e7771..abcaa290 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/func/ArrowSem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/func/ArrowSem.scala @@ -137,7 +137,6 @@ class ArrowSem[S[_]](val expr: ArrowExpr[S]) extends AnyVal { T: TypesAlgebra[S, Alg], N: NamesAlgebra[S, Alg], A: AbilitiesAlgebra[S, Alg], - L: LocationsAlgebra[S, Alg], M: ManglerAlgebra[Alg] ): Prog[Alg, Raw] = Prog @@ -147,6 +146,4 @@ class ArrowSem[S[_]](val expr: ArrowExpr[S]) extends AnyVal { ) .abilitiesScope(expr.arrowTypeExpr) .namesScope(expr.arrowTypeExpr) - .locationsScope() - } diff --git a/semantics/src/main/scala/aqua/semantics/expr/func/CatchSem.scala b/semantics/src/main/scala/aqua/semantics/expr/func/CatchSem.scala index 7ac8b160..9a815542 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/func/CatchSem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/func/CatchSem.scala @@ -42,6 +42,4 @@ class CatchSem[S[_]](val expr: CatchExpr[S]) extends AnyVal { ) .abilitiesScope[S](expr.token) .namesScope(expr.token) - .locationsScope() - } diff --git a/semantics/src/main/scala/aqua/semantics/expr/func/DeclareStreamSem.scala b/semantics/src/main/scala/aqua/semantics/expr/func/DeclareStreamSem.scala index f14ae67d..d8e18ed4 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/func/DeclareStreamSem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/func/DeclareStreamSem.scala @@ -8,14 +8,9 @@ import aqua.raw.value.VarRaw import aqua.semantics.Prog import aqua.semantics.rules.names.NamesAlgebra import aqua.semantics.rules.types.TypesAlgebra -import aqua.types.* import cats.Monad -import cats.data.Chain import cats.data.OptionT -import cats.syntax.applicative.* -import cats.syntax.flatMap.* -import cats.syntax.functor.* class DeclareStreamSem[S[_]](val expr: DeclareStreamExpr[S]) { diff --git a/semantics/src/main/scala/aqua/semantics/expr/func/ElseOtherwiseSem.scala b/semantics/src/main/scala/aqua/semantics/expr/func/ElseOtherwiseSem.scala index 2dc27746..df98dd71 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/func/ElseOtherwiseSem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/func/ElseOtherwiseSem.scala @@ -39,5 +39,4 @@ class ElseOtherwiseSem[S[_]](val expr: ElseOtherwiseExpr[S]) extends AnyVal { ) .abilitiesScope(expr.token) .namesScope(expr.token) - .locationsScope() } diff --git a/semantics/src/main/scala/aqua/semantics/expr/func/IfSem.scala b/semantics/src/main/scala/aqua/semantics/expr/func/IfSem.scala index 43acdabe..bd25334b 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/func/IfSem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/func/IfSem.scala @@ -56,5 +56,4 @@ class IfSem[S[_]](val expr: IfExpr[S]) extends AnyVal { ) .abilitiesScope[S](expr.token) .namesScope[S](expr.token) - .locationsScope() } diff --git a/semantics/src/main/scala/aqua/semantics/expr/func/TrySem.scala b/semantics/src/main/scala/aqua/semantics/expr/func/TrySem.scala index 1b6297f7..d5b286a3 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/func/TrySem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/func/TrySem.scala @@ -37,5 +37,4 @@ class TrySem[S[_]](val expr: TryExpr[S]) extends AnyVal { ) .abilitiesScope(expr.token) .namesScope(expr.token) - .locationsScope() } diff --git a/semantics/src/main/scala/aqua/semantics/rules/ValuesAlgebra.scala b/semantics/src/main/scala/aqua/semantics/rules/ValuesAlgebra.scala index 9ec2b3aa..4ddded3d 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/ValuesAlgebra.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/ValuesAlgebra.scala @@ -1,9 +1,7 @@ package aqua.semantics.rules -import aqua.errors.Errors.internalError import aqua.helpers.syntax.optiont.* import aqua.parser.lexer.* -import aqua.parser.lexer.InfixToken.value import aqua.parser.lexer.InfixToken.{BoolOp, CmpOp, EqOp, MathOp, Op as InfOp} import aqua.parser.lexer.PrefixToken.Op as PrefOp import aqua.raw.value.* diff --git a/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesInterpreter.scala index 78d078b8..0f3ee8fa 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/abilities/AbilitiesInterpreter.scala @@ -1,22 +1,20 @@ package aqua.semantics.rules.abilities -import aqua.parser.lexer.{Name, NamedTypeToken, Token, ValueToken} +import aqua.parser.lexer.{Name, NamedTypeToken, Token} +import aqua.raw.RawContext import aqua.raw.value.ValueRaw -import aqua.raw.{RawContext, ServiceRaw} import aqua.semantics.Levenshtein import aqua.semantics.rules.locations.LocationsAlgebra import aqua.semantics.rules.mangler.ManglerAlgebra import aqua.semantics.rules.report.ReportAlgebra import aqua.semantics.rules.{StackInterpreter, abilities} -import aqua.types.{ArrowType, ServiceType} +import aqua.types.ArrowType import cats.data.{NonEmptyMap, State} import cats.syntax.applicative.* import cats.syntax.apply.* -import cats.syntax.foldable.* import cats.syntax.functor.* import cats.syntax.option.* -import cats.syntax.traverse.* import monocle.Lens import monocle.macros.GenLens @@ -56,12 +54,6 @@ class AbilitiesInterpreter[S[_], X](using case false => for { _ <- modify(_.defineService(name, defaultId)) - // TODO: Is it used? - _ <- locations.addTokenWithFields( - name.value, - name, - arrowDefs.toNel.toList - ) } yield true } diff --git a/semantics/src/main/scala/aqua/semantics/rules/definitions/DefinitionsAlgebra.scala b/semantics/src/main/scala/aqua/semantics/rules/definitions/DefinitionsAlgebra.scala index 62a336e9..c44b99ed 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/definitions/DefinitionsAlgebra.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/definitions/DefinitionsAlgebra.scala @@ -9,7 +9,7 @@ import cats.data.{NonEmptyList, NonEmptyMap} trait DefinitionsAlgebra[S[_], Alg[_]] { def defineDef(name: Name[S], `type`: Type): Alg[Boolean] - def purgeDefs(token: NamedTypeToken[S]): Alg[Map[String, DefinitionsState.Def[S]]] + def purgeDefs(): Alg[Map[String, DefinitionsState.Def[S]]] def defineArrow(arrow: Name[S], `type`: ArrowType): Alg[Boolean] diff --git a/semantics/src/main/scala/aqua/semantics/rules/definitions/DefinitionsInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/definitions/DefinitionsInterpreter.scala index 0103b598..43e75ea0 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/definitions/DefinitionsInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/definitions/DefinitionsInterpreter.scala @@ -1,28 +1,17 @@ package aqua.semantics.rules.definitions import aqua.parser.lexer.{Name, NamedTypeToken, Token} -import aqua.semantics.rules.StackInterpreter import aqua.semantics.rules.report.ReportAlgebra -import aqua.semantics.rules.abilities.AbilitiesState -import aqua.semantics.rules.locations.{LocationsAlgebra, LocationsState} -import aqua.semantics.rules.types.TypesState import aqua.types.{ArrowType, Type} -import cats.data.{NonEmptyList, NonEmptyMap, State} -import monocle.Lens -import monocle.macros.GenLens -import cats.syntax.applicative.* -import cats.syntax.apply.* -import cats.syntax.flatMap.* +import cats.data.{NonEmptyList, State} import cats.syntax.functor.* import cats.syntax.option.* - -import scala.collection.immutable.SortedMap +import monocle.Lens class DefinitionsInterpreter[S[_], X](implicit lens: Lens[X, DefinitionsState[S]], - report: ReportAlgebra[S, State[X, *]], - locations: LocationsAlgebra[S, State[X, *]] + report: ReportAlgebra[S, State[X, *]] ) extends DefinitionsAlgebra[S, State[X, *]] { type SX[A] = State[X, A] @@ -55,16 +44,9 @@ class DefinitionsInterpreter[S[_], X](implicit override def defineArrow(arrow: Name[S], `type`: ArrowType): SX[Boolean] = define(arrow, `type`, "arrow") - override def purgeDefs( - token: NamedTypeToken[S] - ): SX[Map[String, DefinitionsState.Def[S]]] = + override def purgeDefs(): SX[Map[String, DefinitionsState.Def[S]]] = getState.map(_.definitions).flatMap { defs => - val names = defs.view.mapValues(_.name) - for { - _ <- locations - .addTokenWithFields(token.value, token, names.toList) - .whenA(defs.nonEmpty) _ <- modify(_.copy(definitions = Map.empty)) } yield defs } diff --git a/semantics/src/main/scala/aqua/semantics/rules/locations/DummyLocationsInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/locations/DummyLocationsInterpreter.scala index 43a876b1..c49d72ed 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/locations/DummyLocationsInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/locations/DummyLocationsInterpreter.scala @@ -1,23 +1,19 @@ package aqua.semantics.rules.locations import aqua.parser.lexer.Token -import aqua.semantics.rules.StackInterpreter -import aqua.semantics.rules.types.TypesState -import monocle.Lens -import monocle.macros.GenLens -import cats.data.{NonEmptyList, NonEmptyMap, State} +import cats.data.State class DummyLocationsInterpreter[S[_], X] extends LocationsAlgebra[S, State[X, *]] { - def addToken(name: String, token: Token[S]): State[X, Unit] = State.pure(()) + def addDefinition(definition: DefinitionInfo[S]): State[X, Unit] = State.pure(()) - def addTokenWithFields( - name: String, - token: Token[S], - fields: List[(String, Token[S])] + def addDefinitionWithFields( + definition: DefinitionInfo[S], + fields: List[DefinitionInfo[S]] ): State[X, Unit] = State.pure(()) - def pointFieldLocation(typeName: String, fieldName: String, token: Token[S]): State[X, Unit] = State.pure(()) + def pointFieldLocation(typeName: String, fieldName: String, token: Token[S]): State[X, Unit] = + State.pure(()) def pointTokenWithFieldLocation( typeName: String, diff --git a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala index 3a41fbdb..59b4864d 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsAlgebra.scala @@ -1,16 +1,23 @@ package aqua.semantics.rules.locations + import aqua.parser.lexer.Token +import aqua.types.Type trait LocationsAlgebra[S[_], Alg[_]] { - def addToken(name: String, token: Token[S]): Alg[Unit] - def addTokenWithFields(name: String, token: Token[S], fields: List[(String, Token[S])]): Alg[Unit] + def addDefinition(definition: DefinitionInfo[S]): Alg[Unit] - def pointTokenWithFieldLocation(typeName: String, typeToken: Token[S], fieldName: String, token: Token[S]): Alg[Unit] + def addDefinitionWithFields( + definition: DefinitionInfo[S], + fields: List[DefinitionInfo[S]] + ): Alg[Unit] + + def pointTokenWithFieldLocation( + typeName: String, + typeToken: Token[S], + fieldName: String, + token: Token[S] + ): Alg[Unit] def pointFieldLocation(typeName: String, fieldName: String, token: Token[S]): Alg[Unit] def pointLocation(name: String, token: Token[S]): Alg[Unit] def pointLocations(locations: List[(String, Token[S])]): Alg[Unit] - - def beginScope(): Alg[Unit] - - def endScope(): Alg[Unit] } diff --git a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala index cb0b37d0..dd11b98e 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/locations/LocationsState.scala @@ -1,26 +1,55 @@ package aqua.semantics.rules.locations +import aqua.helpers.syntax.list.* import aqua.parser.lexer.Token -import aqua.semantics.rules.types.TypesState + import cats.kernel.Monoid +import scribe.Logging case class LocationsState[S[_]]( - tokens: Map[String, Token[S]] = Map.empty[String, Token[S]], - locations: List[(Token[S], Token[S])] = Nil, - stack: List[LocationsState[S]] = Nil -) { + variables: List[VariableInfo[S]] = Nil +) extends Logging { - lazy val allLocations: List[(Token[S], Token[S])] = locations + def addDefinitions(newDefinitions: List[DefinitionInfo[S]]): LocationsState[S] = + copy(variables = newDefinitions.map(d => VariableInfo(d)) ++ variables) + + def addDefinition(newDef: DefinitionInfo[S]): LocationsState[S] = + copy(variables = VariableInfo(newDef) +: variables) + + private def addOccurrenceToFirst( + vars: List[VariableInfo[S]], + name: String, + token: Token[S] + ): List[VariableInfo[S]] = { + if (!vars.exists(_.definition.name == name)) + logger.error(s"Unexpected. Cannot add occurrence for $name") + + vars.updateFirst(_.definition.name == name, v => v.copy(occurrences = token +: v.occurrences)) + } + + def addLocation( + name: String, + token: Token[S] + ): LocationsState[S] = + copy(variables = addOccurrenceToFirst(variables, name, token)) + + def addLocations( + locations: List[(String, Token[S])] + ): LocationsState[S] = + locations.foldLeft(this) { case (st, (name, token)) => + st.addLocation(name, token) + } } object LocationsState { - implicit def locationsStateMonoid[S[_]]: Monoid[LocationsState[S]] = new Monoid[LocationsState[S]] { - override def empty: LocationsState[S] = LocationsState() + implicit def locationsStateMonoid[S[_]]: Monoid[LocationsState[S]] = + new Monoid[LocationsState[S]] { + override def empty: LocationsState[S] = LocationsState() - override def combine(x: LocationsState[S], y: LocationsState[S]): LocationsState[S] = - LocationsState( - tokens = x.tokens ++ y.tokens - ) - } + override def combine(x: LocationsState[S], y: LocationsState[S]): LocationsState[S] = + LocationsState( + variables = x.variables ++ y.variables + ) + } } diff --git a/semantics/src/main/scala/aqua/semantics/rules/locations/VariableInfo.scala b/semantics/src/main/scala/aqua/semantics/rules/locations/VariableInfo.scala new file mode 100644 index 00000000..efca87c0 --- /dev/null +++ b/semantics/src/main/scala/aqua/semantics/rules/locations/VariableInfo.scala @@ -0,0 +1,11 @@ +package aqua.semantics.rules.locations + +import aqua.parser.lexer.Token +import aqua.types.Type + +case class DefinitionInfo[S[_]](name: String, token: Token[S], `type`: Type) +case class TokenLocation[S[_]](usage: Token[S], definition: Token[S]) + +case class VariableInfo[S[_]](definition: DefinitionInfo[S], occurrences: List[Token[S]] = Nil) { + def allLocations: List[TokenLocation[S]] = occurrences.map(o => TokenLocation(o, definition.token)) +} diff --git a/semantics/src/main/scala/aqua/semantics/rules/names/NamesInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/names/NamesInterpreter.scala index e3357eab..d47cd85e 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/names/NamesInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/names/NamesInterpreter.scala @@ -4,15 +4,12 @@ import aqua.errors.Errors.internalError import aqua.parser.lexer.{Name, Token} import aqua.semantics.Levenshtein import aqua.semantics.rules.StackInterpreter -import aqua.semantics.rules.locations.LocationsAlgebra +import aqua.semantics.rules.locations.{DefinitionInfo, LocationsAlgebra} import aqua.semantics.rules.report.ReportAlgebra import aqua.types.{ArrowType, StreamType, Type} import cats.data.{OptionT, State} import cats.syntax.all.* -import cats.syntax.applicative.* -import cats.syntax.flatMap.* -import cats.syntax.functor.* import monocle.Lens import monocle.macros.GenLens @@ -117,13 +114,13 @@ class NamesInterpreter[S[_], X](using case None => mapStackHeadM(report.error(name, "Cannot define a variable in the root scope").as(false))( fr => (fr.addName(name, `type`) -> true).pure - ) <* locations.addToken(name.value, name) + ) <* locations.addDefinition(DefinitionInfo(name.value, name, `type`)) } override def derive(name: Name[S], `type`: Type, derivedFrom: Set[String]): State[X, Boolean] = define(name, `type`).flatTap(defined => mapStackHead_(_.derived(name, derivedFrom)).whenA(defined) - ) <* locations.addToken(name.value, name) + ) override def getDerivedFrom(fromNames: List[Set[String]]): State[X, List[Set[String]]] = mapStackHead(Nil)(frame => @@ -142,7 +139,7 @@ class NamesInterpreter[S[_], X](using constants = st.constants.updated(name.value, `type`) ) ).as(true) - }.flatTap(_ => locations.addToken(name.value, name)) + }.flatTap(_ => locations.addDefinition(DefinitionInfo(name.value, name, `type`))) override def defineArrow(name: Name[S], arrowType: ArrowType, isRoot: Boolean): SX[Boolean] = readName(name.value).flatMap { @@ -166,8 +163,10 @@ class NamesInterpreter[S[_], X](using report .error(name, "Cannot define a variable in the root scope") .as(false) - )(fr => (fr.addArrow(name, arrowType) -> true).pure) - }.flatTap(_ => locations.addToken(name.value, name)) + )(fr => (fr.addArrow(name, arrowType) -> true).pure).flatTap(_ => + locations.addDefinition(DefinitionInfo[S](name.value, name, arrowType)) + ) + } override def streamsDefinedWithinScope(): SX[Map[String, StreamType]] = mapStackHead(Map.empty) { frame => diff --git a/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala index 7df3cddd..4f76de4d 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala @@ -3,14 +3,14 @@ package aqua.semantics.rules.types import aqua.parser.lexer.* import aqua.raw.value.* import aqua.semantics.rules.StackInterpreter -import aqua.semantics.rules.locations.LocationsAlgebra +import aqua.semantics.rules.locations.{DefinitionInfo, LocationsAlgebra} import aqua.semantics.rules.report.ReportAlgebra import aqua.semantics.rules.types.TypeResolution.TypeResolutionError import aqua.types.* import aqua.types.Type.* +import cats.data.* import cats.data.Validated.{Invalid, Valid} -import cats.data.{Chain, NonEmptyList, NonEmptyMap, OptionT, State} import cats.syntax.applicative.* import cats.syntax.apply.* import cats.syntax.flatMap.* @@ -97,7 +97,8 @@ class TypesInterpreter[S[_], X](using nonEmptyFields => val `type` = AbilityType(name.value, nonEmptyFields) - modify(_.defineType(name, `type`)).as(`type`.some) + locateNamedType(name, `type`, fields) >> modify(_.defineType(name, `type`)) + .as(`type`.some) ) } @@ -131,10 +132,20 @@ class TypesInterpreter[S[_], X](using ).semiflatMap(nonEmptyArrows => val `type` = ServiceType(name.value, nonEmptyArrows) - modify(_.defineType(name, `type`)).as(`type`) + locateNamedType(name, `type`, fields) >> modify(_.defineType(name, `type`)).as(`type`) ).value ) + private def locateNamedType( + name: NamedTypeToken[S], + t: NamedType, + fields: Map[String, (Name[S], Type)] + ) = + locations.addDefinitionWithFields( + DefinitionInfo[S](name.value, name, t), + fields.map { case (n, (t, ty)) => DefinitionInfo[S](n, t, ty) }.toList + ) + override def defineStructType( name: NamedTypeToken[S], fields: Map[String, (Name[S], Type)] @@ -159,7 +170,8 @@ class TypesInterpreter[S[_], X](using )(nonEmptyFields => val `type` = StructType(name.value, nonEmptyFields) - modify(_.defineType(name, `type`)).as(`type`.some) + locateNamedType(name, `type`, fields) >> modify(_.defineType(name, `type`)) + .as(`type`.some) ) ) ) @@ -170,7 +182,7 @@ class TypesInterpreter[S[_], X](using case Some(_) => report.error(name, s"Type `${name.value}` was already defined").as(false) case None => modify(_.defineType(name, target)) - .productL(locations.addToken(name.value, name)) + .productL(locations.addDefinition(DefinitionInfo(name.value, name.asName, target))) .as(true) } diff --git a/types/src/main/scala/aqua/types/Type.scala b/types/src/main/scala/aqua/types/Type.scala index e5f6bf7e..1ddd8058 100644 --- a/types/src/main/scala/aqua/types/Type.scala +++ b/types/src/main/scala/aqua/types/Type.scala @@ -4,15 +4,15 @@ import aqua.errors.Errors.internalError import aqua.types.* import aqua.types.Type.* -import cats.data.NonEmptyList import cats.data.NonEmptyMap import cats.syntax.applicative.* import cats.syntax.foldable.* import cats.syntax.functor.* import cats.syntax.option.* import cats.syntax.partialOrder.* +import cats.syntax.show.* import cats.syntax.traverse.* -import cats.{Eval, Foldable, Functor, PartialOrder, Traverse} +import cats.{Eval, Foldable, Functor, PartialOrder, Show, Traverse} import scala.collection.immutable.SortedMap sealed trait Type { @@ -282,7 +282,8 @@ object CollectionType { .map[Type] { case StreamType(el) => ArrayType(el) case dt: DataType => dt - }.reduceLeftOption(_ `∩` _) + } + .reduceLeftOption(_ `∩` _) .map { // In case we mix values of uncomparable types, intersection returns bottom, meaning "uninhabited type". // But we want to get to TopType instead: this would mean that intersection is empty, and you cannot @@ -516,4 +517,48 @@ object Type { given PartialOrder[Type] = CompareTypes.partialOrder + + given Show[DataType] = { + case LiteralType.signed => + "i32" + case LiteralType.unsigned => + "u32" + case LiteralType.number => + "u32" + case LiteralType.float => + "f32" + case LiteralType.string => + "string" + case LiteralType.bool => + "bool" + case t => + t.toString + } + + // pretty print for Type + given Show[Type] = { + case ArrayType(el) => + s"[]${el.show}" + case OptionType(el) => + s"?${el.show}" + case StreamType(el) => + s"*${el.show}" + case ArrowType(domain, codomain) => + val domainStr = domain match { + case _: LabeledConsType => + domain.toLabelledList().map { case (s, t) => s"$s: ${t.show}" }.mkString("(", ", ", ")") + case _ => domain.toList.mkString("(", ", ", ")") + } + val codomainStr = codomain.toList match { + case Nil => "" + case l => " -> " + l.mkString(", ") + } + domainStr + codomainStr + case nt: NamedType => + s"${nt.fullName}(${nt.fields.map(_.show).toNel.toList.map(kv => kv._1 + ": " + kv._2).mkString(", ")})" + case t: DataType => + t.show + case t => + t.toString + } } diff --git a/utils/helpers/src/main/scala/aqua/helpers/syntax/list.scala b/utils/helpers/src/main/scala/aqua/helpers/syntax/list.scala new file mode 100644 index 00000000..b5dce107 --- /dev/null +++ b/utils/helpers/src/main/scala/aqua/helpers/syntax/list.scala @@ -0,0 +1,19 @@ +package aqua.helpers.syntax + +import scala.annotation.tailrec + +object list { + extension[A] (l: List[A]) { + def updateFirst[B >: A](p: A => Boolean, f: A => B): List[B] = { + @tailrec + def update(left: List[B], right: List[A]): List[B] = + right match { + case a :: tail if p(a) => left.reverse ::: f(a) :: tail + case a :: tail => update(a :: left, tail) + case Nil => left.reverse + } + + update(Nil, l) + } + } +} diff --git a/utils/helpers/src/main/scala/aqua/syntax/optiont.scala b/utils/helpers/src/main/scala/aqua/helpers/syntax/optiont.scala similarity index 100% rename from utils/helpers/src/main/scala/aqua/syntax/optiont.scala rename to utils/helpers/src/main/scala/aqua/helpers/syntax/optiont.scala diff --git a/utils/helpers/src/main/scala/aqua/tree/Tree.scala b/utils/helpers/src/main/scala/aqua/helpers/tree/Tree.scala similarity index 79% rename from utils/helpers/src/main/scala/aqua/tree/Tree.scala rename to utils/helpers/src/main/scala/aqua/helpers/tree/Tree.scala index b958e14b..3ed2c68c 100644 --- a/utils/helpers/src/main/scala/aqua/tree/Tree.scala +++ b/utils/helpers/src/main/scala/aqua/helpers/tree/Tree.scala @@ -1,13 +1,9 @@ -package aqua.helpers +package aqua.helpers.tree -import cats.data.Chain import cats.free.Cofree -import cats.Traverse -import cats.Show -import cats.Eval -import cats.syntax.show.* -import cats.syntax.traverse.* import cats.syntax.foldable.* +import cats.syntax.show.* +import cats.{Eval, Show, Traverse} object Tree {