From 68425ed42a7440aefac82011bbac418ef13bd636 Mon Sep 17 00:00:00 2001 From: InversionSpaces Date: Mon, 13 Nov 2023 09:58:36 +0100 Subject: [PATCH] feat(compiler)!: Prohibit mutating options [LNG-277] (#960) * Parse any nested type * Refactor type system * Fix restriction * Refactor type resolution * Return stream definition * Update examples * Refactor * Refactor * Refactor integration tests * Export service * Add integration test * Fix args provider * Add parser unit tests * Add type resolution unit tests * Add more unit tests * DataTypeToken -> CompositeTypeToken * GeneralStreamType -> MutableStreamType * Refactor * Refactor TypeResolution * colType -> collectionType * Refactor * Fix PushToStreamSem * BasicTypeToken -> ScalarTypeToken * CompositeTypeToken -> BasicTypeToken * Fix for nil * Make stream collectible * Refactor collectible type * Use internalError * Add unit tests --- .../src/main/scala/aqua/run/CliFunc.scala | 22 ++- .../main/scala/aqua/run/TypeValidator.scala | 6 +- .../scala/aqua/definitions/Definitions.scala | 35 ++-- .../main/scala/aqua/backend/OutputFunc.scala | 5 +- .../scala/aqua/backend/OutputService.scala | 5 +- build.sbt | 1 + .../aqua/compiler/AquaCompilerConf.scala | 2 +- .../aqua/examples/errorClear.aqua | 2 +- integration-tests/aqua/examples/funcs.aqua | 2 +- .../aqua/examples/handleResultError.aqua | 2 +- .../aqua/examples/options/option_gen.aqua | 4 +- integration-tests/aqua/examples/stream.aqua | 2 +- .../aqua/examples/streamArgs.aqua | 10 +- integration-tests/aqua/examples/via.aqua | 7 +- .../src/__test__/examples.spec.ts | 35 +++- .../src/examples/streamArgsCall.ts | 5 + integration-tests/src/examples/viaCall.ts | 19 +- .../src/main/scala/aqua/js/Definitions.scala | 6 +- .../src/main/scala/aqua/js/JsonEncoder.scala | 35 ++-- .../aqua/model/inline/ArrowInliner.scala | 6 +- .../scala/aqua/model/inline/TagInliner.scala | 7 +- .../inline/raw/ApplyFunctorRawInliner.scala | 13 +- .../inline/raw/CollectionRawInliner.scala | 26 ++- .../src/main/scala/aqua/raw/ConstantRaw.scala | 2 +- .../src/main/scala/aqua/raw/ops/RawTag.scala | 6 +- .../main/scala/aqua/raw/value/ValueRaw.scala | 25 ++- .../src/main/scala/aqua/res/ResolvedOp.scala | 7 +- model/src/main/scala/aqua/model/OpModel.scala | 9 +- .../model/transform/pre/ArgsProvider.scala | 21 +- .../transform/pre/FuncPreTransformer.scala | 4 +- .../model/transform/topology/Topology.scala | 24 +-- .../aqua/parser/expr/ArrowTypeExpr.scala | 14 +- .../aqua/parser/expr/FieldTypeExpr.scala | 13 +- .../aqua/parser/expr/func/ArrowExpr.scala | 9 +- .../parser/expr/func/DeclareStreamExpr.scala | 11 +- .../scala/aqua/parser/lexer/TypeToken.scala | 80 ++++---- parser/src/test/scala/aqua/AquaSpec.scala | 36 ++-- .../test/scala/aqua/parser/FuncExprSpec.scala | 15 +- .../aqua/parser/lexer/TypeTokenSpec.scala | 111 ++++++++--- .../scala/aqua/semantics/expr/AliasSem.scala | 7 +- .../aqua/semantics/expr/func/ArrowSem.scala | 4 +- .../expr/func/DeclareStreamSem.scala | 47 ++--- .../aqua/semantics/expr/func/ForSem.scala | 35 ++-- .../aqua/semantics/expr/func/FuncOpSem.scala | 14 +- .../aqua/semantics/expr/func/OnSem.scala | 8 +- .../aqua/semantics/expr/func/ParSeqSem.scala | 8 +- .../semantics/expr/func/PushToStreamSem.scala | 32 ++-- .../aqua/semantics/rules/ValuesAlgebra.scala | 41 ++-- .../rules/types/TypeResolution.scala | 112 +++++++++++ .../semantics/rules/types/TypesAlgebra.scala | 14 +- .../rules/types/TypesInterpreter.scala | 116 ++++++++---- .../semantics/rules/types/TypesState.scala | 102 +--------- .../scala/aqua/semantics/ArrowSemSpec.scala | 10 +- .../scala/aqua/semantics/SemanticsSpec.scala | 79 ++++++-- .../aqua/semantics/TypeResolutionSpec.scala | 179 ++++++++++++++++++ .../aqua/semantics/ValuesAlgebraSpec.scala | 4 +- .../scala/aqua/types/IntersectTypes.scala | 28 ++- types/src/main/scala/aqua/types/Type.scala | 132 +++++++++---- .../main/scala/aqua/types/UniteTypes.scala | 22 ++- .../src/test/scala/aqua/types/TypeSpec.scala | 6 +- .../src/main/scala/aqua/syntax/optiont.scala | 20 +- 61 files changed, 1080 insertions(+), 584 deletions(-) create mode 100644 semantics/src/main/scala/aqua/semantics/rules/types/TypeResolution.scala create mode 100644 semantics/src/test/scala/aqua/semantics/TypeResolutionSpec.scala diff --git a/aqua-run/src/main/scala/aqua/run/CliFunc.scala b/aqua-run/src/main/scala/aqua/run/CliFunc.scala index 5e0ba2d3..ce652234 100644 --- a/aqua-run/src/main/scala/aqua/run/CliFunc.scala +++ b/aqua-run/src/main/scala/aqua/run/CliFunc.scala @@ -3,16 +3,16 @@ package aqua.run import aqua.parser.lexer.{CallArrowToken, CollectionToken, LiteralToken, VarToken} import aqua.parser.lift.Span import aqua.raw.value.{CollectionRaw, LiteralRaw, ValueRaw, VarRaw} -import aqua.types.{ArrayType, BottomType} +import aqua.types.* -import cats.data.{NonEmptyChain, NonEmptyList, Validated, ValidatedNec} import cats.data.Validated.{invalid, invalidNec, validNec} -import cats.{~>, Id} +import cats.data.{NonEmptyChain, NonEmptyList, Validated, ValidatedNec} +import cats.syntax.comonad.* +import cats.syntax.either.* +import cats.syntax.option.* import cats.syntax.traverse.* import cats.syntax.validated.* -import cats.syntax.either.* -import cats.syntax.comonad.* -import cats.syntax.option.* +import cats.{Id, ~>} case class CliFunc(name: String, args: List[ValueRaw] = Nil) @@ -52,7 +52,15 @@ object CliFunc { .map( NonEmptyList .fromList(_) - .map(l => CollectionRaw(l, ArrayType(l.head.baseType))) + .map(l => + CollectionRaw( + l, + ArrayType( + // FIXME: Type of Literal should always be a DataType + l.head.baseType.asInstanceOf[DataType] + ) + ) + ) .getOrElse(ValueRaw.Nil) ) .toValidatedNec diff --git a/aqua-run/src/main/scala/aqua/run/TypeValidator.scala b/aqua-run/src/main/scala/aqua/run/TypeValidator.scala index d8ba406b..6b4406bb 100644 --- a/aqua-run/src/main/scala/aqua/run/TypeValidator.scala +++ b/aqua-run/src/main/scala/aqua/run/TypeValidator.scala @@ -2,6 +2,7 @@ package aqua.run import aqua.raw.value.{LiteralRaw, ValueRaw, VarRaw} import aqua.types.* + import cats.data.Validated.{invalidNec, validNec} import cats.data.{Validated, ValidatedNec} import cats.effect.kernel.Async @@ -10,14 +11,11 @@ import cats.syntax.flatMap.* import cats.syntax.partialOrder.* import cats.syntax.show.* import cats.syntax.traverse.* - import scala.collection.immutable.SortedMap import scala.concurrent.ExecutionContext object TypeValidator { - import aqua.types.Type.typesPartialOrder - /** * Compare and validate type from Aqua file and type generated from JSON. * Also, the validation will succeed if the JSON type is missing an array or an optional field. @@ -69,7 +67,7 @@ object TypeValidator { case (l: OptionType, r) => // if we have ?[][]string and [][][]string it must throw an error validateTypes(name, l.element, Some(r), Some((l, r))) - case (l: BoxType, r: BoxType) => + case (l: CollectionType, r: CollectionType) => validateTypes(name, l.element, Some(r.element), fullOptionType.orElse(Some(l, r))) case (l, r) => diff --git a/backend/definitions/src/main/scala/aqua/definitions/Definitions.scala b/backend/definitions/src/main/scala/aqua/definitions/Definitions.scala index 02f75a0f..5a0379ed 100644 --- a/backend/definitions/src/main/scala/aqua/definitions/Definitions.scala +++ b/backend/definitions/src/main/scala/aqua/definitions/Definitions.scala @@ -1,12 +1,12 @@ package aqua.definitions +import aqua.definitions.* import aqua.res.FuncRes import aqua.types.* -import aqua.definitions.* + import io.circe.* import io.circe.parser.* import io.circe.syntax.* - import scala.annotation.tailrec // Represents the Aqua types @@ -16,7 +16,7 @@ sealed trait TypeDefinition { object TypeDefinition { - implicit val encodeProdDefType: Encoder[ProductTypeDef] = { + given Encoder[ProductTypeDef] = { case d @ LabeledProductTypeDef(fields) => Json.obj( ("tag", Json.fromString(d.tag)), @@ -33,7 +33,7 @@ object TypeDefinition { ) } - implicit val encodeDefType: Encoder[TypeDefinition] = { + given Encoder[TypeDefinition] = { case d @ ScalarTypeDef(name) => Json.obj( ("tag", Json.fromString(d.tag)), @@ -68,14 +68,14 @@ object TypeDefinition { ) } - implicit val encodeServiceDefType: Encoder[ServiceDef] = { case ServiceDef(sId, functions, name) => + given Encoder[ServiceDef] = { case ServiceDef(sId, functions, name) => Json.obj( ("defaultServiceId", sId.asJson), - ("functions", encodeProdDefType(functions)) + ("functions", (functions: ProductTypeDef).asJson) ) } - implicit val encodeNamesConfig: Encoder[NamesConfig] = { case n: NamesConfig => + given Encoder[NamesConfig] = { case n: NamesConfig => import n.* Json.obj( ("relay", Json.fromString(relay)), @@ -88,13 +88,12 @@ object TypeDefinition { ) } - implicit val encodeFunctionDefType: Encoder[FunctionDef] = { - case FunctionDef(fName, arrow, names) => - Json.obj( - ("functionName", Json.fromString(fName)), - ("arrow", encodeDefType(arrow)), - ("names", names.asJson) - ) + given Encoder[FunctionDef] = { case FunctionDef(fName, arrow, names) => + Json.obj( + ("functionName", Json.fromString(fName)), + ("arrow", (arrow: TypeDefinition).asJson), + ("names", names.asJson) + ) } def apply(t: Option[Type]): TypeDefinition = t.map(apply).getOrElse(NilTypeDef) @@ -103,7 +102,7 @@ object TypeDefinition { t match { case OptionType(t) => OptionTypeDef(TypeDefinition(t)) - case t: BoxType => ArrayTypeDef(TypeDefinition(t.element)) + case t: CollectionType => ArrayTypeDef(TypeDefinition(t.element)) case StructType(name, fields) => StructTypeDef(name, fields.toSortedMap.view.mapValues(TypeDefinition.apply).toMap) case AbilityType(name, fieldAndArrows) => @@ -198,7 +197,11 @@ case class NamesConfig( ) // Describes service -case class ServiceDef(defaultServiceId: Option[String], functions: LabeledProductTypeDef, name: String) +case class ServiceDef( + defaultServiceId: Option[String], + functions: LabeledProductTypeDef, + name: String +) // Describes top-level function case class FunctionDef( diff --git a/backend/ts/src/main/scala/aqua/backend/OutputFunc.scala b/backend/ts/src/main/scala/aqua/backend/OutputFunc.scala index 03b2b148..a2943e48 100644 --- a/backend/ts/src/main/scala/aqua/backend/OutputFunc.scala +++ b/backend/ts/src/main/scala/aqua/backend/OutputFunc.scala @@ -3,8 +3,11 @@ package aqua.backend import aqua.backend.air.FuncAirGen import aqua.backend.ts.TypeScriptCommon.fixupArgName import aqua.backend.ts.{TSFuncTypes, TypeScriptCommon} +import aqua.definitions.* +import aqua.definitions.TypeDefinition.given import aqua.res.FuncRes import aqua.types.* + import cats.syntax.show.* import io.circe.* import io.circe.parser.* @@ -20,8 +23,6 @@ case class OutputFunc(func: FuncRes, types: Types) { val funcTypes = types.funcType(func) import funcTypes.* - import aqua.definitions.TypeDefinition.* - import aqua.definitions.* def generate: (AirFunction, String) = { val tsAir = FuncAirGen(func).generate diff --git a/backend/ts/src/main/scala/aqua/backend/OutputService.scala b/backend/ts/src/main/scala/aqua/backend/OutputService.scala index e996cdcd..632a0f95 100644 --- a/backend/ts/src/main/scala/aqua/backend/OutputService.scala +++ b/backend/ts/src/main/scala/aqua/backend/OutputService.scala @@ -1,8 +1,11 @@ package aqua.backend import aqua.backend.ts.TypeScriptCommon +import aqua.definitions.* +import aqua.definitions.TypeDefinition.given import aqua.res.ServiceRes import aqua.types.ArrowType + import io.circe.* import io.circe.parser.* import io.circe.syntax.* @@ -14,8 +17,6 @@ case class OutputService(srv: ServiceRes, types: Types) { private val serviceTypes = types.serviceType(srv) import serviceTypes.* - import aqua.definitions.TypeDefinition.* - import aqua.definitions.* def generate: String = val functions = LabeledProductTypeDef( diff --git a/build.sbt b/build.sbt index 65dbb038..40fd61bc 100644 --- a/build.sbt +++ b/build.sbt @@ -122,6 +122,7 @@ lazy val types = crossProject(JVMPlatform, JSPlatform) "org.typelevel" %%% "cats-core" % catsV ) ) + .dependsOn(errors) lazy val parser = crossProject(JVMPlatform, JSPlatform) .withoutSuffixFor(JVMPlatform) diff --git a/compiler/src/main/scala/aqua/compiler/AquaCompilerConf.scala b/compiler/src/main/scala/aqua/compiler/AquaCompilerConf.scala index 22e4992a..6df280e8 100644 --- a/compiler/src/main/scala/aqua/compiler/AquaCompilerConf.scala +++ b/compiler/src/main/scala/aqua/compiler/AquaCompilerConf.scala @@ -7,4 +7,4 @@ import aqua.raw.ConstantRaw * * @param constantsList List of known constants */ -case class AquaCompilerConf(constantsList: List[ConstantRaw]) \ No newline at end of file +case class AquaCompilerConf(constantsList: List[ConstantRaw]) diff --git a/integration-tests/aqua/examples/errorClear.aqua b/integration-tests/aqua/examples/errorClear.aqua index d38b72b9..9c584a8d 100644 --- a/integration-tests/aqua/examples/errorClear.aqua +++ b/integration-tests/aqua/examples/errorClear.aqua @@ -7,7 +7,7 @@ service FailService("fail-srv"): func errorClearTest(node: string, relay: string) -> string, i64: stream: *string - code: ?i64 + code: *i64 on node via relay: try: diff --git a/integration-tests/aqua/examples/funcs.aqua b/integration-tests/aqua/examples/funcs.aqua index 92737e40..2c74c822 100644 --- a/integration-tests/aqua/examples/funcs.aqua +++ b/integration-tests/aqua/examples/funcs.aqua @@ -39,7 +39,7 @@ func ifCalc() -> u64: <- res!0 func cmp(a: i32, b: i32, pred: i8 -> bool) -> bool: - result: ?bool + result: *bool if a < b: result <- pred(-1) diff --git a/integration-tests/aqua/examples/handleResultError.aqua b/integration-tests/aqua/examples/handleResultError.aqua index 487a2bca..75a5dd5c 100644 --- a/integration-tests/aqua/examples/handleResultError.aqua +++ b/integration-tests/aqua/examples/handleResultError.aqua @@ -4,7 +4,7 @@ export handleResultError -- t = true, f = false func handleResultError(t: bool, f: bool) -> string: - opt: ?[]string + opt: *[]string if t == f: -- false opt <<- ["unreachable"] diff --git a/integration-tests/aqua/examples/options/option_gen.aqua b/integration-tests/aqua/examples/options/option_gen.aqua index 394e96c2..f1a7a27a 100644 --- a/integration-tests/aqua/examples/options/option_gen.aqua +++ b/integration-tests/aqua/examples/options/option_gen.aqua @@ -2,7 +2,7 @@ service OptionString("opt_str"): checkOption(str: ?string) -> string func emptyString() -> ?string: - valueEmpty: ?string + valueEmpty: *string <- valueEmpty func checkEmpty() -> string: @@ -11,7 +11,7 @@ func checkEmpty() -> string: <- res func stringAsOption(str: string) -> ?string: - valueEmpty: ?string + valueEmpty: *string valueEmpty <<- str <- valueEmpty diff --git a/integration-tests/aqua/examples/stream.aqua b/integration-tests/aqua/examples/stream.aqua index e678c06f..020df4a4 100644 --- a/integration-tests/aqua/examples/stream.aqua +++ b/integration-tests/aqua/examples/stream.aqua @@ -36,7 +36,7 @@ func returnNilLength() -> u32: <- arr.length func stringNone() -> ?string: - valueNone: ?string + valueNone: *string <- valueNone func returnNone() -> ?string: diff --git a/integration-tests/aqua/examples/streamArgs.aqua b/integration-tests/aqua/examples/streamArgs.aqua index b1cd9b98..727bd773 100644 --- a/integration-tests/aqua/examples/streamArgs.aqua +++ b/integration-tests/aqua/examples/streamArgs.aqua @@ -1,3 +1,7 @@ +aqua StreamArgs + +export retrieve_records, modify_stream, TestService + service TestService("test-service"): get_records(key: string) -> []string @@ -7,4 +11,8 @@ func append_records(peer: string, srum: *[]string): func retrieve_records(peer: string) -> [][]string: records: *[]string append_records(peer, records) - <- records \ No newline at end of file + <- records + +func modify_stream(stream: *string) -> []string: + stream <<- "appended value" + <- stream \ No newline at end of file diff --git a/integration-tests/aqua/examples/via.aqua b/integration-tests/aqua/examples/via.aqua index a04f0b09..b387b847 100644 --- a/integration-tests/aqua/examples/via.aqua +++ b/integration-tests/aqua/examples/via.aqua @@ -1,3 +1,7 @@ +aqua Via + +export viaArr, viaStream, viaOpt + import "@fluencelabs/aqua-lib/builtin.aqua" func viaArr(node_id: string, viaAr: []string) -> Info: @@ -5,13 +9,12 @@ func viaArr(node_id: string, viaAr: []string) -> Info: p <- Peer.identify() <- p - func viaStream(node_id: string, viaStr: *string) -> Info: on node_id via viaStr: p <- Peer.identify() <- p -func viaOpt(relay: string, node_id: string, viaOpt: ?string) -> Info: +func viaOpt(node_id: string, viaOpt: ?string) -> Info: on node_id via viaOpt: p <- Peer.identify() <- p \ No newline at end of file diff --git a/integration-tests/src/__test__/examples.spec.ts b/integration-tests/src/__test__/examples.spec.ts index 13cb6972..5050bc57 100644 --- a/integration-tests/src/__test__/examples.spec.ts +++ b/integration-tests/src/__test__/examples.spec.ts @@ -33,7 +33,11 @@ import { import { abilityCall, complexAbilityCall, - checkAbCallsCall, bugLNG258Call1, bugLNG258Call2, bugLNG258Call3, multipleAbilityWithClosureCall, + checkAbCallsCall, + bugLNG258Call1, + bugLNG258Call2, + bugLNG258Call3, + multipleAbilityWithClosureCall, } from "../examples/abilityCall.js"; import { nilLengthCall, @@ -78,7 +82,10 @@ import { tryCatchCall } from "../examples/tryCatchCall.js"; import { tryOtherwiseCall } from "../examples/tryOtherwiseCall.js"; import { coCall } from "../examples/coCall.js"; import { bugLNG60Call, passArgsCall } from "../examples/passArgsCall.js"; -import { streamArgsCall } from "../examples/streamArgsCall.js"; +import { + streamArgsCall, + modifyStreamCall, +} from "../examples/streamArgsCall.js"; import { streamResultsCall } from "../examples/streamResultsCall.js"; import { structuralTypingCall } from "../examples/structuralTypingCall"; import { @@ -104,7 +111,10 @@ import { multiReturnCall } from "../examples/multiReturnCall.js"; import { declareCall } from "../examples/declareCall.js"; import { genOptions, genOptionsEmptyString } from "../examples/optionsCall.js"; import { lng193BugCall } from "../examples/closureReturnRename.js"; -import {closuresCall, multipleClosuresLNG262BugCall} from "../examples/closures.js"; +import { + closuresCall, + multipleClosuresLNG262BugCall, +} from "../examples/closures.js"; import { closureArrowCaptureCall } from "../examples/closureArrowCapture.js"; import { bugLNG63_2Call, @@ -589,6 +599,18 @@ describe("Testing examples", () => { expect(streamArgsResult).toEqual([["peer_id", "peer_id"]]); }); + it("streamArgs.aqua modify stream", async () => { + let streamArgsResult = await modifyStreamCall([ + "passed value 1", + "passed value 2", + ]); + expect(streamArgsResult).toEqual([ + "passed value 1", + "passed value 2", + "appended value", + ]); + }); + it("streamResults.aqua", async () => { let streamResultsResult = await streamResultsCall(); expect(streamResultsResult).toEqual(["new_name", "new_name", "new_name"]); @@ -934,9 +956,10 @@ describe("Testing examples", () => { it("via.aqua", async () => { let res1 = await viaArrCall(); - let res2 = await viaOptCall(relayPeerId1); - let res3 = await viaOptNullCall(relayPeerId1); - let res4 = await viaStreamCall(relayPeerId1); + let res2 = await viaOptCall(); + let res3 = await viaOptNullCall(); + let res4 = await viaStreamCall(); + expect(res1).not.toHaveLength(0); expect(res1).toEqual(res2); expect(res2).toEqual(res3); expect(res3).toEqual(res4); diff --git a/integration-tests/src/examples/streamArgsCall.ts b/integration-tests/src/examples/streamArgsCall.ts index 920297da..2e0cd855 100644 --- a/integration-tests/src/examples/streamArgsCall.ts +++ b/integration-tests/src/examples/streamArgsCall.ts @@ -1,5 +1,6 @@ import { retrieve_records, + modify_stream, registerTestService, } from "../compiled/examples/streamArgs.js"; @@ -12,3 +13,7 @@ export async function streamArgsCall() { return await retrieve_records("peer_id"); } + +export async function modifyStreamCall(arg: string[]) { + return await modify_stream(arg); +} diff --git a/integration-tests/src/examples/viaCall.ts b/integration-tests/src/examples/viaCall.ts index c50187dd..b6c85dc0 100644 --- a/integration-tests/src/examples/viaCall.ts +++ b/integration-tests/src/examples/viaCall.ts @@ -13,26 +13,21 @@ export async function viaArrCall(): Promise { return res.external_addresses; } -export async function viaOptCall(relayPeerId: string): Promise { - let res2 = await viaOpt(relayPeerId, relays[4].peerId, relays[2].peerId, { - ttl: 30000, - }); +export async function viaOptCall(): Promise { + let res2 = await viaOpt(relays[4].peerId, relays[2].peerId, { ttl: 30000 }); return res2.external_addresses; } -export async function viaOptNullCall(relayPeerId: string): Promise { - let res3 = await viaOpt( - relayPeerId, - relays[4].peerId, - relays[2].peerId || null, - { ttl: 30000 }, - ); +export async function viaOptNullCall(): Promise { + let res3 = await viaOpt(relays[4].peerId, null, { + ttl: 30000, + }); return res3.external_addresses; } -export async function viaStreamCall(relayPeerId: string): Promise { +export async function viaStreamCall(): Promise { let res4 = await viaStream( relays[4].peerId, [relays[2].peerId, relays[1].peerId], diff --git a/js/js-exports/src/main/scala/aqua/js/Definitions.scala b/js/js-exports/src/main/scala/aqua/js/Definitions.scala index 224181aa..64dfafa9 100644 --- a/js/js-exports/src/main/scala/aqua/js/Definitions.scala +++ b/js/js-exports/src/main/scala/aqua/js/Definitions.scala @@ -2,11 +2,11 @@ package aqua.js import aqua.* import aqua.backend.* -import aqua.definitions.{ArrayTypeDef, ArrowTypeDef, BottomTypeDef, FunctionDef, LabeledProductTypeDef, NamesConfig, NilTypeDef, OptionTypeDef, ScalarTypeDef, ServiceDef, StructTypeDef, TopTypeDef, TypeDefinition, UnlabeledProductTypeDef} +import aqua.definitions.* import aqua.res.FuncRes -import aqua.types.{ArrowType, BottomType, BoxType, LabeledConsType, LiteralType, NilType, OptionType, ProductType, ScalarType, StructType, TopType, Type, UnlabeledConsType} -import io.circe.{Encoder, Json} +import aqua.types.* +import io.circe.{Encoder, Json} import scala.scalajs.js import scala.scalajs.js.JSConverters.* import scala.scalajs.js.annotation.{JSExportAll, JSImport} diff --git a/js/js-exports/src/main/scala/aqua/js/JsonEncoder.scala b/js/js-exports/src/main/scala/aqua/js/JsonEncoder.scala index d79c57e8..df260e91 100644 --- a/js/js-exports/src/main/scala/aqua/js/JsonEncoder.scala +++ b/js/js-exports/src/main/scala/aqua/js/JsonEncoder.scala @@ -1,6 +1,7 @@ package aqua.js import aqua.types.* + import cats.data.Validated.{invalid, invalidNec, invalidNel, valid, validNec, validNel} import cats.data.{NonEmptyMap, Validated, ValidatedNec} import cats.syntax.applicative.* @@ -9,7 +10,6 @@ import cats.syntax.flatMap.* import cats.syntax.functor.* import cats.syntax.semigroup.* import cats.syntax.traverse.* - import scala.collection.immutable.SortedMap import scala.scalajs.js @@ -17,18 +17,18 @@ object JsonEncoder { /* Get widest possible type from JSON arrays. For example: JSON: { - field1: [ - { - a: "a", - b: [1,2,3], - c: 4 - }, - { - c: 3 - } - ] - } - There type in array must be { a: ?string, b: []number, c: number + field1: [ + { + a: "a", + b: [1,2,3], + c: 4 + }, + { + c: 3 + } + ] + } + Type in array must be { a: ?string, b: []number, c: number } */ private def compareAndGetWidestType( name: String, @@ -43,8 +43,8 @@ object JsonEncoder { case (la @ ArrayType(_), BottomType) => validNec(la) case (lo @ OptionType(lel), rtt) if lel == rtt => validNec(lo) case (ltt, ro @ OptionType(rel)) if ltt == rel => validNec(ro) - case (BottomType, rb) => validNec(OptionType(rb)) - case (lb, BottomType) => validNec(OptionType(lb)) + case (BottomType, rb: DataType) => validNec(OptionType(rb)) + case (lb: DataType, BottomType) => validNec(OptionType(lb)) case (lst: StructType, rst: StructType) => val lFieldsSM: SortedMap[String, Type] = lst.fields.toSortedMap val rFieldsSM: SortedMap[String, Type] = rst.fields.toSortedMap @@ -100,7 +100,10 @@ object JsonEncoder { .reduce[ValidatedNec[String, Type]] { case (l, t) => compareAndGetWidestType(name, l, t) } - .map(t => ArrayType(t)) + .andThen { + case dt: DataType => validNec(ArrayType(dt)) + case t => invalidNec(s"Unexpected type $t") + } } } case a if t == "object" && !js.isUndefined(arg) && arg != null => diff --git a/model/inline/src/main/scala/aqua/model/inline/ArrowInliner.scala b/model/inline/src/main/scala/aqua/model/inline/ArrowInliner.scala index 701146a2..0be4c3bb 100644 --- a/model/inline/src/main/scala/aqua/model/inline/ArrowInliner.scala +++ b/model/inline/src/main/scala/aqua/model/inline/ArrowInliner.scala @@ -6,17 +6,17 @@ import aqua.model.* import aqua.model.inline.state.{Arrows, Exports, Mangler} import aqua.raw.ops.RawTag import aqua.raw.value.{ValueRaw, VarRaw} -import aqua.types.{AbilityType, ArrowType, BoxType, NamedType, StreamType, Type} +import aqua.types.{AbilityType, ArrowType, CollectionType, NamedType, StreamType, Type} import cats.data.StateT import cats.data.{Chain, IndexedStateT, State} -import cats.syntax.functor.* import cats.syntax.applicative.* import cats.syntax.bifunctor.* import cats.syntax.foldable.* -import cats.syntax.traverse.* +import cats.syntax.functor.* import cats.syntax.option.* import cats.syntax.show.* +import cats.syntax.traverse.* import cats.{Eval, Monoid} import scribe.Logging diff --git a/model/inline/src/main/scala/aqua/model/inline/TagInliner.scala b/model/inline/src/main/scala/aqua/model/inline/TagInliner.scala index 81bb1386..76c08c92 100644 --- a/model/inline/src/main/scala/aqua/model/inline/TagInliner.scala +++ b/model/inline/src/main/scala/aqua/model/inline/TagInliner.scala @@ -8,7 +8,7 @@ import aqua.model.inline.state.{Arrows, Exports, Mangler} import aqua.model.inline.tag.IfTagInliner import aqua.raw.ops.* import aqua.raw.value.* -import aqua.types.{BoxType, CanonStreamType, StreamType} +import aqua.types.{CanonStreamType, CollectionType, StreamType} import cats.data.{Chain, State, StateT} import cats.instances.list.* @@ -31,9 +31,10 @@ import scribe.Logging */ object TagInliner extends Logging { - import RawValueInliner.{valueListToModel, valueToModel} import aqua.model.inline.Inline.parDesugarPrefix + import RawValueInliner.{valueListToModel, valueToModel} + /** * Result of [[RawTag]] inlining * @@ -242,7 +243,7 @@ object TagInliner extends Logging { (v, p) = flattened n <- Mangler[S].findAndForbidName(item) elementType = iterable.`type` match { - case b: BoxType => b.element + case b: CollectionType => b.element case _ => internalError( s"non-box type variable '$iterable' in 'for' expression." diff --git a/model/inline/src/main/scala/aqua/model/inline/raw/ApplyFunctorRawInliner.scala b/model/inline/src/main/scala/aqua/model/inline/raw/ApplyFunctorRawInliner.scala index 2691d156..eb297e17 100644 --- a/model/inline/src/main/scala/aqua/model/inline/raw/ApplyFunctorRawInliner.scala +++ b/model/inline/src/main/scala/aqua/model/inline/raw/ApplyFunctorRawInliner.scala @@ -1,5 +1,9 @@ package aqua.model.inline.raw +import aqua.model.inline.Inline +import aqua.model.inline.Inline.MergeMode.* +import aqua.model.inline.RawValueInliner.unfold +import aqua.model.inline.state.{Arrows, Exports, Mangler} import aqua.model.{ CallModel, CanonicalizeModel, @@ -10,14 +14,11 @@ import aqua.model.{ ValueModel, VarModel } -import aqua.model.inline.Inline.MergeMode.* -import aqua.model.inline.Inline -import aqua.model.inline.state.{Arrows, Exports, Mangler} import aqua.raw.value.{FunctorRaw, ValueRaw} -import cats.data.State +import aqua.types.{ArrayType, CanonStreamType, CollectionType, StreamType} + import cats.data.Chain -import aqua.model.inline.RawValueInliner.unfold -import aqua.types.{ArrayType, BoxType, CanonStreamType, StreamType} +import cats.data.State import cats.syntax.monoid.* import scribe.Logging diff --git a/model/inline/src/main/scala/aqua/model/inline/raw/CollectionRawInliner.scala b/model/inline/src/main/scala/aqua/model/inline/raw/CollectionRawInliner.scala index 3b526d68..45eda954 100644 --- a/model/inline/src/main/scala/aqua/model/inline/raw/CollectionRawInliner.scala +++ b/model/inline/src/main/scala/aqua/model/inline/raw/CollectionRawInliner.scala @@ -1,11 +1,13 @@ package aqua.model.inline.raw -import aqua.model.{CallModel, CanonicalizeModel, NullModel, PushToStreamModel, RestrictionModel, SeqModel, ValueModel, VarModel, XorModel} +import aqua.model.* import aqua.model.inline.Inline import aqua.model.inline.RawValueInliner.valueToModel import aqua.model.inline.state.{Arrows, Exports, Mangler} import aqua.raw.value.CollectionRaw +import aqua.types.StreamMapType import aqua.types.{ArrayType, CanonStreamType, OptionType, StreamType} + import cats.data.{Chain, State} object CollectionRawInliner extends RawInliner[CollectionRaw] { @@ -20,11 +22,15 @@ object CollectionRawInliner extends RawInliner[CollectionRaw] { assignToName: Option[String] = None ): State[S, (ValueModel, Inline)] = for { - streamName <- raw.boxType match { + streamName <- raw.collectionType match { case _: StreamType => - assignToName - .map(s => State.pure(s)) - .getOrElse(Mangler[S].findAndForbidName("stream-inline")) + assignToName.fold( + Mangler[S].findAndForbidName("stream-inline") + )(State.pure) + case _: StreamMapType => + assignToName.fold( + Mangler[S].findAndForbidName("stream_map-inline") + )(State.pure) case _: CanonStreamType => Mangler[S].findAndForbidName("canon_stream-inline") case _: ArrayType => Mangler[S].findAndForbidName("array-inline") case _: OptionType => Mangler[S].findAndForbidName("option-inline") @@ -50,15 +56,15 @@ object CollectionRawInliner extends RawInliner[CollectionRaw] { } canonName <- - if (raw.boxType.isStream) State.pure(streamName) + if (raw.collectionType.isStream) State.pure(streamName) else Mangler[S].findAndForbidName(streamName) - canonType = raw.boxType match { - case StreamType(_) => raw.boxType - case _ => CanonStreamType(raw.boxType.element) + canonType = raw.collectionType match { + case StreamType(_) => raw.collectionType + case _ => CanonStreamType(raw.collectionType.element) } canon = CallModel.Export(canonName, canonType) } yield VarModel(canonName, canon.`type`) -> Inline.tree( - raw.boxType match { + raw.collectionType match { case ArrayType(_) => RestrictionModel(streamName, streamType).wrap( SeqModel.wrap(inlines ++ vals :+ CanonicalizeModel(stream, canon).leaf) diff --git a/model/raw/src/main/scala/aqua/raw/ConstantRaw.scala b/model/raw/src/main/scala/aqua/raw/ConstantRaw.scala index bb8966fe..f8481c2c 100644 --- a/model/raw/src/main/scala/aqua/raw/ConstantRaw.scala +++ b/model/raw/src/main/scala/aqua/raw/ConstantRaw.scala @@ -55,7 +55,7 @@ object ConstantRaw { false ) - def defaultConstants(relayVarName: Option[String]): List[ConstantRaw] = + def defaultConstants(relayVarName: Option[String] = None): List[ConstantRaw] = hostPeerId( relayVarName ) :: initPeerId :: particleTtl :: particleTimestamp :: nil :: lastError :: Nil diff --git a/model/raw/src/main/scala/aqua/raw/ops/RawTag.scala b/model/raw/src/main/scala/aqua/raw/ops/RawTag.scala index 079a4c9e..8d4cc2a9 100644 --- a/model/raw/src/main/scala/aqua/raw/ops/RawTag.scala +++ b/model/raw/src/main/scala/aqua/raw/ops/RawTag.scala @@ -4,12 +4,12 @@ import aqua.raw.arrow.FuncRaw import aqua.raw.ops.RawTag.Tree import aqua.raw.value.{CallArrowRaw, CallServiceRaw, ValueRaw} import aqua.tree.{TreeNode, TreeNodeCompanion} -import aqua.types.{ArrowType, DataType, ServiceType} +import aqua.types.* import cats.Show import cats.data.{Chain, NonEmptyList} -import cats.syntax.foldable.* import cats.free.Cofree +import cats.syntax.foldable.* sealed trait RawTag extends TreeNode[RawTag] { @@ -160,7 +160,7 @@ case class NextTag(item: String) extends RawTag { override def mapValues(f: ValueRaw => ValueRaw): RawTag = this } -case class RestrictionTag(name: String, `type`: DataType) extends SeqGroupTag { +case class RestrictionTag(name: String, `type`: Type) extends SeqGroupTag { override def restrictsVarNames: Set[String] = Set(name) diff --git a/model/raw/src/main/scala/aqua/raw/value/ValueRaw.scala b/model/raw/src/main/scala/aqua/raw/value/ValueRaw.scala index 19abc606..cf200f50 100644 --- a/model/raw/src/main/scala/aqua/raw/value/ValueRaw.scala +++ b/model/raw/src/main/scala/aqua/raw/value/ValueRaw.scala @@ -1,9 +1,11 @@ package aqua.raw.value +import aqua.errors.Errors.internalError import aqua.types.* +import aqua.types.Type.* -import cats.data.{Chain, NonEmptyList, NonEmptyMap} import cats.Eq +import cats.data.{Chain, NonEmptyList, NonEmptyMap} import cats.syntax.option.* sealed trait ValueRaw { @@ -157,16 +159,27 @@ object LiteralRaw { } } -case class CollectionRaw(values: NonEmptyList[ValueRaw], boxType: BoxType) extends ValueRaw { +case class CollectionRaw( + values: NonEmptyList[ValueRaw], + collectionType: CollectionType +) extends ValueRaw { - lazy val elementType: Type = boxType.element + lazy val elementType: DataType = collectionType.element - override lazy val baseType: Type = boxType + override lazy val baseType: Type = collectionType override def mapValues(f: ValueRaw => ValueRaw): ValueRaw = { val vals = values.map(f) - val el = vals.map(_.`type`).reduceLeft(_ `∩` _) - copy(vals, boxType.withElement(el)) + val types = vals.map(_.`type` match { + case ct: CollectibleType => ct + case t => internalError(s"Non-collection type in collection: ${t}") + }) + val element = CollectionType.elementTypeOf(types.toList) + + copy( + values = vals, + collectionType = collectionType.withElement(element) + ) } override def varNames: Set[String] = values.toList.flatMap(_.varNames).toSet diff --git a/model/res/src/main/scala/aqua/res/ResolvedOp.scala b/model/res/src/main/scala/aqua/res/ResolvedOp.scala index 52bbf15d..22ad3bb9 100644 --- a/model/res/src/main/scala/aqua/res/ResolvedOp.scala +++ b/model/res/src/main/scala/aqua/res/ResolvedOp.scala @@ -3,10 +3,11 @@ package aqua.res import aqua.model.{CallModel, ForModel, LiteralModel, ValueModel, VarModel} import aqua.raw.ops.Call import aqua.tree.{TreeNode, TreeNodeCompanion} -import aqua.types.DataType +import aqua.types.* + +import cats.Show import cats.data.Chain import cats.free.Cofree -import cats.Show // TODO docs to all traits and objects sealed trait ResolvedOp extends TreeNode[ResolvedOp] @@ -46,7 +47,7 @@ object FoldRes { FoldRes(item, iterable, Mode.Never) } -case class RestrictionRes(item: String, `type`: DataType) extends ResolvedOp { +case class RestrictionRes(item: String, `type`: Type) extends ResolvedOp { override def toString: String = s"(new ${`type`.airPrefix}$item " } diff --git a/model/src/main/scala/aqua/model/OpModel.scala b/model/src/main/scala/aqua/model/OpModel.scala index ab8a3c15..a34fc6a7 100644 --- a/model/src/main/scala/aqua/model/OpModel.scala +++ b/model/src/main/scala/aqua/model/OpModel.scala @@ -4,13 +4,12 @@ import aqua.model.OpModel.Tree import aqua.tree.{TreeNode, TreeNodeCompanion} import aqua.types.* -import cats.data.Chain -import cats.free.Cofree -import cats.Show import cats.Eval +import cats.Show +import cats.data.Chain import cats.data.NonEmptyList +import cats.free.Cofree import cats.syntax.functor.* - import scala.annotation.tailrec sealed trait OpModel extends TreeNode[OpModel] { @@ -129,7 +128,7 @@ case class NextModel(item: String) extends OpModel { // TODO: Refactor out `name` and `type` to // something like VarModel without properties -case class RestrictionModel(name: String, `type`: DataType) extends SeqGroupModel { +case class RestrictionModel(name: String, `type`: Type) extends SeqGroupModel { override def usesVarNames: Set[String] = Set.empty override def restrictsVarNames: Set[String] = Set(name) diff --git a/model/transform/src/main/scala/aqua/model/transform/pre/ArgsProvider.scala b/model/transform/src/main/scala/aqua/model/transform/pre/ArgsProvider.scala index 92162a05..909a526e 100644 --- a/model/transform/src/main/scala/aqua/model/transform/pre/ArgsProvider.scala +++ b/model/transform/src/main/scala/aqua/model/transform/pre/ArgsProvider.scala @@ -18,7 +18,7 @@ object ArgsProvider { // Variable name to store the value of the argument varName: String, // Type of the argument - t: DataType + t: DataType | StreamType ) } @@ -46,18 +46,21 @@ case class ArgsFromService(dataServiceId: ValueRaw) extends ArgsProvider { ) } + private def getDataOp(name: String, varName: String, t: DataType): RawTag.Tree = + CallArrowRawTag + .service( + dataServiceId, + name, + Call(Nil, Call.Export(varName, t) :: Nil) + ) + .leaf + def getDataOp(arg: ArgsProvider.Arg): RawTag.Tree = arg.t match { case st: StreamType => getStreamDataOp(arg.name, arg.varName, st) - case _ => - CallArrowRawTag - .service( - dataServiceId, - arg.name, - Call(Nil, Call.Export(arg.varName, arg.t) :: Nil) - ) - .leaf + case dt: DataType => + getDataOp(arg.name, arg.varName, dt) } override def provideArgs(args: List[ArgsProvider.Arg]): List[RawTag.Tree] = diff --git a/model/transform/src/main/scala/aqua/model/transform/pre/FuncPreTransformer.scala b/model/transform/src/main/scala/aqua/model/transform/pre/FuncPreTransformer.scala index b3eea8ff..2abe7840 100644 --- a/model/transform/src/main/scala/aqua/model/transform/pre/FuncPreTransformer.scala +++ b/model/transform/src/main/scala/aqua/model/transform/pre/FuncPreTransformer.scala @@ -81,7 +81,7 @@ case class FuncPreTransformer( (name, s"-$name-arg-", typ) } - val dataArgs = args.collect { case (name, varName, t: DataType) => + val nonArrowArgs = args.collect { case (name, varName, t: (DataType | StreamType)) => ArgsProvider.Arg(name, varName, t) } @@ -95,7 +95,7 @@ case class FuncPreTransformer( ) val provideArgs = argsProvider.provideArgs( - relayArg.toList ::: dataArgs + relayArg.toList ::: nonArrowArgs ) val handleResults = resultsHandler.handleResults( diff --git a/model/transform/src/main/scala/aqua/model/transform/topology/Topology.scala b/model/transform/src/main/scala/aqua/model/transform/topology/Topology.scala index aa721e72..7296872d 100644 --- a/model/transform/src/main/scala/aqua/model/transform/topology/Topology.scala +++ b/model/transform/src/main/scala/aqua/model/transform/topology/Topology.scala @@ -1,28 +1,28 @@ package aqua.model.transform.topology import aqua.errors.Errors.internalError -import aqua.model.transform.topology.TopologyPath -import aqua.model.transform.cursor.ChainZipper -import aqua.model.transform.topology.strategy.* import aqua.model.* +import aqua.model.transform.cursor.ChainZipper +import aqua.model.transform.topology.TopologyPath +import aqua.model.transform.topology.strategy.* import aqua.raw.value.{LiteralRaw, ValueRaw} import aqua.res.{ApRes, CanonRes, FoldRes, MakeRes, NextRes, ResolvedOp, SeqRes} -import aqua.types.{ArrayType, BoxType, CanonStreamType, ScalarType, StreamType} +import aqua.types.{ArrayType, CanonStreamType, CollectionType, ScalarType, StreamType} import cats.Eval import cats.data.Chain.{==:, nil} import cats.data.OptionT import cats.data.{Chain, NonEmptyChain, NonEmptyList, OptionT} import cats.free.Cofree -import cats.syntax.traverse.* -import cats.syntax.show.* -import cats.syntax.apply.* -import cats.syntax.option.* -import cats.syntax.flatMap.* -import cats.syntax.foldable.* -import cats.syntax.applicative.* import cats.instances.map.* import cats.kernel.Monoid +import cats.syntax.applicative.* +import cats.syntax.apply.* +import cats.syntax.flatMap.* +import cats.syntax.foldable.* +import cats.syntax.option.* +import cats.syntax.show.* +import cats.syntax.traverse.* import scribe.Logging /** @@ -370,7 +370,7 @@ object Topology extends Logging { reversed: Boolean = false ): Chain[Res] = peerIds.map { v => v.`type` match { - case _: BoxType => + case _: CollectionType => val itemName = "-via-peer-" val steps = Chain( MakeRes.hop(VarModel(itemName, ScalarType.string, Chain.empty)), diff --git a/parser/src/main/scala/aqua/parser/expr/ArrowTypeExpr.scala b/parser/src/main/scala/aqua/parser/expr/ArrowTypeExpr.scala index 42980e06..fb523f00 100644 --- a/parser/src/main/scala/aqua/parser/expr/ArrowTypeExpr.scala +++ b/parser/src/main/scala/aqua/parser/expr/ArrowTypeExpr.scala @@ -2,13 +2,14 @@ package aqua.parser.expr import aqua.parser.Expr import aqua.parser.lexer.Token.* -import aqua.parser.lexer.{ArrowTypeToken, DataTypeToken, Name} +import aqua.parser.lexer.{ArrowTypeToken, BasicTypeToken, Name} import aqua.parser.lift.LiftParser +import aqua.parser.lift.Span +import aqua.parser.lift.Span.{P0ToSpan, PToSpan} + import cats.Comonad import cats.parse.Parser import cats.~> -import aqua.parser.lift.Span -import aqua.parser.lift.Span.{P0ToSpan, PToSpan} case class ArrowTypeExpr[F[_]](name: Name[F], `type`: ArrowTypeToken[F]) extends Expr[F](ArrowTypeExpr, name) { @@ -19,8 +20,9 @@ object ArrowTypeExpr extends Expr.Leaf { override val p: Parser[ArrowTypeExpr[Span.S]] = (Name.p ~ ((` : ` *> ArrowTypeToken.`arrowdef`( - DataTypeToken.`datatypedef` - )) | ArrowTypeToken.`arrowWithNames`(DataTypeToken.`datatypedef`))).map { case (name, t) => - ArrowTypeExpr(name, t) + BasicTypeToken.`compositetypedef` + )) | ArrowTypeToken.`arrowWithNames`(BasicTypeToken.`compositetypedef`))).map { + case (name, t) => + ArrowTypeExpr(name, t) } } diff --git a/parser/src/main/scala/aqua/parser/expr/FieldTypeExpr.scala b/parser/src/main/scala/aqua/parser/expr/FieldTypeExpr.scala index 1cd23010..0f587925 100644 --- a/parser/src/main/scala/aqua/parser/expr/FieldTypeExpr.scala +++ b/parser/src/main/scala/aqua/parser/expr/FieldTypeExpr.scala @@ -2,15 +2,16 @@ package aqua.parser.expr import aqua.parser.Expr import aqua.parser.lexer.Token.* -import aqua.parser.lexer.{DataTypeToken, Name, StreamTypeToken} +import aqua.parser.lexer.{BasicTypeToken, Name, StreamTypeToken} import aqua.parser.lift.LiftParser -import cats.Comonad -import cats.parse.Parser -import cats.~> import aqua.parser.lift.Span import aqua.parser.lift.Span.{P0ToSpan, PToSpan} -case class FieldTypeExpr[F[_]](name: Name[F], `type`: DataTypeToken[F]) +import cats.Comonad +import cats.parse.Parser +import cats.~> + +case class FieldTypeExpr[F[_]](name: Name[F], `type`: BasicTypeToken[F]) extends Expr[F](FieldTypeExpr, name) { override def mapK[K[_]: Comonad](fk: F ~> K): FieldTypeExpr[K] = @@ -20,7 +21,7 @@ case class FieldTypeExpr[F[_]](name: Name[F], `type`: DataTypeToken[F]) object FieldTypeExpr extends Expr.Leaf { override val p: Parser[FieldTypeExpr[Span.S]] = - ((Name.p <* ` : `) ~ DataTypeToken.`datatypedef`).map { case (name, t) => + ((Name.p <* ` : `) ~ BasicTypeToken.`compositetypedef`).map { case (name, t) => FieldTypeExpr(name, t) } } diff --git a/parser/src/main/scala/aqua/parser/expr/func/ArrowExpr.scala b/parser/src/main/scala/aqua/parser/expr/func/ArrowExpr.scala index d918f3d6..3eaf56e0 100644 --- a/parser/src/main/scala/aqua/parser/expr/func/ArrowExpr.scala +++ b/parser/src/main/scala/aqua/parser/expr/func/ArrowExpr.scala @@ -1,13 +1,14 @@ package aqua.parser.expr.func -import aqua.parser.{ArrowReturnError, Ast, Expr, ParserError} -import aqua.parser.lexer.{ArrowTypeToken, DataTypeToken, TypeToken, ValueToken} +import aqua.parser.lexer.{ArrowTypeToken, BasicTypeToken, TypeToken, ValueToken} import aqua.parser.lift.LiftParser +import aqua.parser.lift.Span +import aqua.parser.lift.Span.{P0ToSpan, PToSpan} +import aqua.parser.{ArrowReturnError, Ast, Expr, ParserError} + import cats.Comonad import cats.parse.Parser import cats.~> -import aqua.parser.lift.Span -import aqua.parser.lift.Span.{P0ToSpan, PToSpan} case class ArrowExpr[F[_]](arrowTypeExpr: ArrowTypeToken[F]) extends Expr[F](ArrowExpr, arrowTypeExpr) { diff --git a/parser/src/main/scala/aqua/parser/expr/func/DeclareStreamExpr.scala b/parser/src/main/scala/aqua/parser/expr/func/DeclareStreamExpr.scala index 34e4d3b7..ed2fedea 100644 --- a/parser/src/main/scala/aqua/parser/expr/func/DeclareStreamExpr.scala +++ b/parser/src/main/scala/aqua/parser/expr/func/DeclareStreamExpr.scala @@ -3,14 +3,15 @@ package aqua.parser.expr.func import aqua.parser.Expr import aqua.parser.expr.func.DeclareStreamExpr import aqua.parser.lexer.Token.* -import aqua.parser.lexer.{DataTypeToken, Name, Token, TypeToken} +import aqua.parser.lexer.{BasicTypeToken, Name, Token, TypeToken} import aqua.parser.lift.LiftParser -import cats.parse.Parser as P -import cats.{Comonad, ~>} import aqua.parser.lift.Span import aqua.parser.lift.Span.{P0ToSpan, PToSpan} -case class DeclareStreamExpr[F[_]](name: Name[F], `type`: DataTypeToken[F]) +import cats.parse.Parser as P +import cats.{Comonad, ~>} + +case class DeclareStreamExpr[F[_]](name: Name[F], `type`: BasicTypeToken[F]) extends Expr[F](DeclareStreamExpr, name) { override def mapK[K[_]: Comonad](fk: F ~> K): DeclareStreamExpr[K] = @@ -20,7 +21,7 @@ case class DeclareStreamExpr[F[_]](name: Name[F], `type`: DataTypeToken[F]) object DeclareStreamExpr extends Expr.Leaf { override val p: P[DeclareStreamExpr[Span.S]] = - ((Name.p <* ` : `) ~ DataTypeToken.`datatypedef`).map { case (name, t) => + ((Name.p <* ` : `) ~ BasicTypeToken.`compositetypedef`).map { case (name, t) => DeclareStreamExpr(name, t) } diff --git a/parser/src/main/scala/aqua/parser/lexer/TypeToken.scala b/parser/src/main/scala/aqua/parser/lexer/TypeToken.scala index 6f97d0f7..2abd4559 100644 --- a/parser/src/main/scala/aqua/parser/lexer/TypeToken.scala +++ b/parser/src/main/scala/aqua/parser/lexer/TypeToken.scala @@ -3,39 +3,46 @@ package aqua.parser.lexer import aqua.parser.lexer.Token.* import aqua.parser.lift.LiftParser import aqua.parser.lift.LiftParser.* +import aqua.parser.lift.Span +import aqua.parser.lift.Span.{P0ToSpan, PToSpan, S} import aqua.types.ScalarType + import cats.Comonad +import cats.data.NonEmptyList import cats.parse.{Accumulator0, Parser as P, Parser0 as P0} import cats.syntax.comonad.* import cats.syntax.functor.* import cats.~> -import aqua.parser.lift.Span -import aqua.parser.lift.Span.{P0ToSpan, PToSpan, S} -import cats.data.NonEmptyList sealed trait TypeToken[S[_]] extends Token[S] { def mapK[K[_]: Comonad](fk: S ~> K): TypeToken[K] } -sealed trait DataTypeToken[S[_]] extends TypeToken[S] { - override def mapK[K[_]: Comonad](fk: S ~> K): DataTypeToken[K] +sealed trait BasicTypeToken[S[_]] extends TypeToken[S] { + override def mapK[K[_]: Comonad](fk: S ~> K): BasicTypeToken[K] } case class TopBottomToken[S[_]: Comonad](override val unit: S[Unit], isTop: Boolean) - extends DataTypeToken[S] { + extends BasicTypeToken[S] { override def as[T](v: T): S[T] = unit.as(v) def isBottom: Boolean = !isTop override def mapK[K[_]: Comonad](fk: S ~> K): TopBottomToken[K] = copy(fk(unit), isTop) } -case class ArrayTypeToken[S[_]: Comonad](override val unit: S[Unit], data: DataTypeToken[S]) - extends DataTypeToken[S] { +case class ArrayTypeToken[S[_]: Comonad](override val unit: S[Unit], data: BasicTypeToken[S]) + extends BasicTypeToken[S] { override def as[T](v: T): S[T] = unit.as(v) override def mapK[K[_]: Comonad](fk: S ~> K): ArrayTypeToken[K] = copy(fk(unit), data.mapK(fk)) } -case class StreamTypeToken[S[_]: Comonad](override val unit: S[Unit], data: DataTypeToken[S]) - extends DataTypeToken[S] { +object ArrayTypeToken { + + val `arraytypedef`: P[ArrayTypeToken[Span.S]] = + (`[]`.lift ~ BasicTypeToken.`compositetypedef`).map(ud => ArrayTypeToken(ud._1, ud._2)) +} + +case class StreamTypeToken[S[_]: Comonad](override val unit: S[Unit], data: BasicTypeToken[S]) + extends BasicTypeToken[S] { override def as[T](v: T): S[T] = unit.as(v) override def mapK[K[_]: Comonad](fk: S ~> K): StreamTypeToken[K] = copy(fk(unit), data.mapK(fk)) } @@ -43,14 +50,12 @@ case class StreamTypeToken[S[_]: Comonad](override val unit: S[Unit], data: Data object StreamTypeToken { val `streamtypedef`: P[StreamTypeToken[Span.S]] = - ((`*`.lift <* P.not(`*`).withContext("Nested streams '**type' are prohibited")) - ~ DataTypeToken.`withoutstreamdatatypedef`) - .map(ud => StreamTypeToken(ud._1, ud._2)) + (`*`.lift ~ BasicTypeToken.`compositetypedef`).map(ud => StreamTypeToken(ud._1, ud._2)) } -case class OptionTypeToken[F[_]: Comonad](override val unit: F[Unit], data: DataTypeToken[F]) - extends DataTypeToken[F] { +case class OptionTypeToken[F[_]: Comonad](override val unit: F[Unit], data: BasicTypeToken[F]) + extends BasicTypeToken[F] { override def as[T](v: T): F[T] = unit.as(v) override def mapK[K[_]: Comonad](fk: F ~> K): OptionTypeToken[K] = @@ -60,11 +65,11 @@ case class OptionTypeToken[F[_]: Comonad](override val unit: F[Unit], data: Data object OptionTypeToken { val `optiontypedef`: P[OptionTypeToken[Span.S]] = - (`?`.lift ~ DataTypeToken.`withoutstreamdatatypedef`).map(ud => OptionTypeToken(ud._1, ud._2)) + (`?`.lift ~ BasicTypeToken.`compositetypedef`).map(ud => OptionTypeToken(ud._1, ud._2)) } -case class NamedTypeToken[F[_]: Comonad](name: F[String]) extends DataTypeToken[F] { +case class NamedTypeToken[F[_]: Comonad](name: F[String]) extends BasicTypeToken[F] { override def as[T](v: T): F[T] = name.as(v) def asName: Name[F] = Name[F](name) @@ -84,21 +89,22 @@ object NamedTypeToken { `Class`.repSep(`.`).string.lift.map(NamedTypeToken(_)) } -case class BasicTypeToken[F[_]: Comonad](scalarType: F[ScalarType]) extends DataTypeToken[F] { +case class ScalarTypeToken[F[_]: Comonad](scalarType: F[ScalarType]) extends BasicTypeToken[F] { override def as[T](v: T): F[T] = scalarType.as(v) - override def mapK[K[_]: Comonad](fk: F ~> K): BasicTypeToken[K] = + override def mapK[K[_]: Comonad](fk: F ~> K): ScalarTypeToken[K] = copy(fk(scalarType)) + def value: ScalarType = scalarType.extract } -object BasicTypeToken { +object ScalarTypeToken { - val `basictypedef`: P[BasicTypeToken[Span.S]] = + val scalartypedef: P[ScalarTypeToken[Span.S]] = P.oneOf( ScalarType.all.map(n ⇒ P.string(n.name).as(n)).toList ).lift - .map(BasicTypeToken(_)) + .map(ScalarTypeToken.apply) } case class ArrowTypeToken[S[_]: Comonad]( @@ -153,29 +159,20 @@ object ArrowTypeToken { } } -object DataTypeToken { - - val `arraytypedef`: P[ArrayTypeToken[Span.S]] = - (`[]`.lift ~ `withoutstreamdatatypedef`).map(ud => ArrayTypeToken(ud._1, ud._2)) +object BasicTypeToken { val `topbottomdef`: P[TopBottomToken[Span.S]] = `⊥`.lift.map(TopBottomToken(_, isTop = false)) | `⊤`.lift.map(TopBottomToken(_, isTop = true)) - def `withoutstreamdatatypedef`: P[DataTypeToken[Span.S]] = + def `compositetypedef`: P[BasicTypeToken[Span.S]] = P.oneOf( - P.defer(`topbottomdef`) :: P.defer(`arraytypedef`) :: P.defer( - OptionTypeToken.`optiontypedef` - ) :: BasicTypeToken.`basictypedef` :: NamedTypeToken.dotted :: Nil - ) - - def `datatypedef`: P[DataTypeToken[Span.S]] = - P.oneOf( - P.defer(`topbottomdef`) :: P.defer(`arraytypedef`) :: P.defer( - StreamTypeToken.`streamtypedef` - ) :: P.defer( - OptionTypeToken.`optiontypedef` - ) :: BasicTypeToken.`basictypedef` :: NamedTypeToken.dotted :: Nil + P.defer(`topbottomdef`) :: + P.defer(ArrayTypeToken.`arraytypedef`) :: + P.defer(StreamTypeToken.`streamtypedef`) :: + P.defer(OptionTypeToken.`optiontypedef`) :: + ScalarTypeToken.`scalartypedef` :: + NamedTypeToken.dotted :: Nil ) } @@ -184,9 +181,8 @@ object TypeToken { val `typedef`: P[TypeToken[Span.S]] = P.oneOf( - ArrowTypeToken - .`arrowdef`(DataTypeToken.`datatypedef`) - .backtrack :: DataTypeToken.`datatypedef` :: Nil + ArrowTypeToken.`arrowdef`(BasicTypeToken.`compositetypedef`).backtrack :: + BasicTypeToken.`compositetypedef` :: Nil ) } diff --git a/parser/src/test/scala/aqua/AquaSpec.scala b/parser/src/test/scala/aqua/AquaSpec.scala index 1df39e04..6aca46db 100644 --- a/parser/src/test/scala/aqua/AquaSpec.scala +++ b/parser/src/test/scala/aqua/AquaSpec.scala @@ -3,25 +3,25 @@ package aqua import aqua.AquaSpec.spanToId import aqua.parser.expr.* import aqua.parser.expr.func.* -import aqua.parser.lexer.InfixToken.Op as InfixOp -import aqua.parser.lexer.PrefixToken.Op as PrefixOp -import aqua.parser.lexer.InfixToken.Op.* -import aqua.parser.lexer.PrefixToken.Op.* import aqua.parser.head.FromExpr.NameOrAbAs import aqua.parser.head.{FromExpr, UseFromExpr} import aqua.parser.lexer.* +import aqua.parser.lexer.InfixToken.Op.* +import aqua.parser.lexer.InfixToken.Op as InfixOp +import aqua.parser.lexer.PrefixToken.Op.* +import aqua.parser.lexer.PrefixToken.Op as PrefixOp import aqua.parser.lexer.Token.LiftToken import aqua.parser.lift.LiftParser.Implicits.idLiftParser -import aqua.types.LiteralType.{bool, number, signed, string, unsigned} -import aqua.types.{LiteralType, ScalarType} -import cats.{~>, Id} -import org.scalatest.EitherValues import aqua.parser.lift.Span import aqua.parser.lift.Span.{P0ToSpan, PToSpan} -import cats.~> -import cats.syntax.bifunctor.* -import cats.data.NonEmptyList +import aqua.types.LiteralType.{bool, number, signed, string, unsigned} +import aqua.types.{LiteralType, ScalarType} +import cats.data.NonEmptyList +import cats.syntax.bifunctor.* +import cats.{Id, ~>} +import cats.~> +import org.scalatest.EitherValues import scala.collection.mutable import scala.language.implicitConversions @@ -71,14 +71,14 @@ object AquaSpec { def toArrayType(str: String): ArrayTypeToken[Id] = ArrayTypeToken[Id]((), str) def toArrowType( - args: List[DataTypeToken[Id]], - res: Option[DataTypeToken[Id]] + args: List[BasicTypeToken[Id]], + res: Option[BasicTypeToken[Id]] ): ArrowTypeToken[Id] = ArrowTypeToken[Id]((), args.map(None -> _), res.toList) def toNamedArrow( args: List[(String, TypeToken[Id])], - res: List[DataTypeToken[Id]] + res: List[BasicTypeToken[Id]] ): ArrowTypeToken[Id] = ArrowTypeToken[Id]((), args.map(ab => Some(Name[Id](ab._1)) -> ab._2), res) @@ -90,15 +90,15 @@ object AquaSpec { def toArgSc(str: String, scalarType: ScalarType): Arg[Id] = Arg[Id](str, scToBt(scalarType)) - def scToBt(sc: ScalarType): BasicTypeToken[Id] = BasicTypeToken[Id](sc) + def scToBt(sc: ScalarType): ScalarTypeToken[Id] = ScalarTypeToken[Id](sc) - val boolSc: BasicTypeToken[Id] = BasicTypeToken[Id](ScalarType.bool) - val stringSc: BasicTypeToken[Id] = BasicTypeToken[Id](ScalarType.string) + val boolSc: ScalarTypeToken[Id] = ScalarTypeToken[Id](ScalarType.bool) + val stringSc: ScalarTypeToken[Id] = ScalarTypeToken[Id](ScalarType.string) given Conversion[String, Name[Id]] = toName given Conversion[String, NamedTypeToken[Id]] = toNamedType given Conversion[Int, LiteralToken[Id]] = toNumber - given Conversion[ScalarType, BasicTypeToken[Id]] = scToBt + given Conversion[ScalarType, ScalarTypeToken[Id]] = scToBt } trait AquaSpec extends EitherValues { diff --git a/parser/src/test/scala/aqua/parser/FuncExprSpec.scala b/parser/src/test/scala/aqua/parser/FuncExprSpec.scala index 331ece51..83943d45 100644 --- a/parser/src/test/scala/aqua/parser/FuncExprSpec.scala +++ b/parser/src/test/scala/aqua/parser/FuncExprSpec.scala @@ -7,21 +7,20 @@ import aqua.parser.lexer.* import aqua.parser.lift.Span import aqua.types.ScalarType.* -import cats.{Eval, Id} import cats.data.Chain.* import cats.data.Validated.{Invalid, Valid} import cats.data.{Chain, NonEmptyList} import cats.free.Cofree import cats.syntax.foldable.* +import cats.{Eval, Id} import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatest.{Inside, Inspectors} - import scala.collection.mutable import scala.language.implicitConversions class FuncExprSpec extends AnyFlatSpec with Matchers with Inside with Inspectors with AquaSpec { - import AquaSpec.{*, given} + import AquaSpec.{given, *} private val parser = Parser.spanParser @@ -31,7 +30,7 @@ class FuncExprSpec extends AnyFlatSpec with Matchers with Inside with Inspectors ) val arrowToken = - ArrowTypeToken[Id]((), List(None -> BasicTypeToken[Id](u8)), List(BasicTypeToken[Id](bool))) + ArrowTypeToken[Id]((), List(None -> ScalarTypeToken[Id](u8)), List(ScalarTypeToken[Id](bool))) arrowExpr("(peer: PeerId, other: u8 -> bool)") should be( ArrowExpr[Id]( toNamedArrow(("peer" -> toNamedType("PeerId")) :: ("other" -> arrowToken) :: Nil, Nil) @@ -41,8 +40,8 @@ class FuncExprSpec extends AnyFlatSpec with Matchers with Inside with Inspectors val arrowToken2 = ArrowTypeToken[Id]( (), - List(None -> BasicTypeToken[Id](u32), None -> BasicTypeToken[Id](u64)), - List(BasicTypeToken[Id](bool)) + List(None -> ScalarTypeToken[Id](u32), None -> ScalarTypeToken[Id](u64)), + List(ScalarTypeToken[Id](bool)) ) arrowExpr("(peer: PeerId, other: u32, u64 -> bool)") should be( ArrowExpr[Id]( @@ -50,12 +49,12 @@ class FuncExprSpec extends AnyFlatSpec with Matchers with Inside with Inspectors ) ) - val arrowToken3 = ArrowTypeToken[Id]((), List(None -> BasicTypeToken[Id](u32)), Nil) + val arrowToken3 = ArrowTypeToken[Id]((), List(None -> ScalarTypeToken[Id](u32)), Nil) arrowExpr("(peer: PeerId, ret: u32 -> ()) -> string, u32") should be( ArrowExpr[Id]( toNamedArrow( ("peer" -> toNamedType("PeerId")) :: ("ret" -> arrowToken3) :: Nil, - BasicTypeToken[Id](string) :: BasicTypeToken[Id](u32) :: Nil + ScalarTypeToken[Id](string) :: ScalarTypeToken[Id](u32) :: Nil ) ) ) diff --git a/parser/src/test/scala/aqua/parser/lexer/TypeTokenSpec.scala b/parser/src/test/scala/aqua/parser/lexer/TypeTokenSpec.scala index 3a647098..7ffd8495 100644 --- a/parser/src/test/scala/aqua/parser/lexer/TypeTokenSpec.scala +++ b/parser/src/test/scala/aqua/parser/lexer/TypeTokenSpec.scala @@ -2,40 +2,52 @@ package aqua.parser.lexer import aqua.parser.lift.LiftParser.Implicits.idLiftParser import aqua.types.ScalarType -import aqua.types.ScalarType.u32 + import cats.Id import cats.parse.Parser +import cats.syntax.option.* import org.scalatest.EitherValues import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import scala.language.implicitConversions - class TypeTokenSpec extends AnyFlatSpec with Matchers with EitherValues { import aqua.AquaSpec._ - implicit def strToBt(st: ScalarType): BasicTypeToken[Id] = BasicTypeToken[Id](st) + def stToStt(st: ScalarType): ScalarTypeToken[Id] = ScalarTypeToken(st) - "Basic type" should "parse" in { - BasicTypeToken.`basictypedef`.parseAll("u32").value.mapK(spanToId) should be(strToBt(u32)) - BasicTypeToken.`basictypedef`.parseAll("()").isLeft should be(true) + "basic type token" should "parse scalar types" in { + ScalarType.all.foreach(st => + ScalarTypeToken.`scalartypedef` + .parseAll(st.name) + .value + .mapK(spanToId) should be(stToStt(st)) + ) } - "Return type" should "parse" in { + it should "not parse empty brackets" in { + ScalarTypeToken.`scalartypedef` + .parseAll("()") + .isLeft should be(true) + } + + "arrow type token" should "parse type def" in { def typedef(str: String) = ArrowTypeToken.typeDef().parseAll(str).value.mapK(spanToId) - def returndef(str: String) = - ArrowTypeToken.returnDef().parseAll(str).value.map(_.mapK(spanToId)) - typedef("(A -> ())") should be( ArrowTypeToken[Id]((), List((None, NamedTypeToken[Id]("A"))), Nil) ) + typedef("(A -> B)") should be( ArrowTypeToken[Id]((), List((None, NamedTypeToken[Id]("A"))), List(NamedTypeToken[Id]("B"))) ) + } + + it should "parse return def" in { + def returndef(str: String) = + ArrowTypeToken.returnDef().parseAll(str).value.map(_.mapK(spanToId)) returndef("(A -> B), (C -> D)") should be( List( @@ -69,11 +81,16 @@ class TypeTokenSpec extends AnyFlatSpec with Matchers with EitherValues { ) } - "Arrow type" should "parse" in { + it should "parse arrow def" in { def arrowdef(str: String) = - ArrowTypeToken.`arrowdef`(DataTypeToken.`datatypedef`).parseAll(str).value.mapK(spanToId) + ArrowTypeToken + .`arrowdef`(BasicTypeToken.`compositetypedef`) + .parseAll(str) + .value + .mapK(spanToId) + def arrowWithNames(str: String) = ArrowTypeToken - .`arrowWithNames`(DataTypeToken.`datatypedef`) + .`arrowWithNames`(BasicTypeToken.`compositetypedef`) .parseAll(str) .value .mapK(spanToId) @@ -81,6 +98,7 @@ class TypeTokenSpec extends AnyFlatSpec with Matchers with EitherValues { arrowdef("-> B") should be( ArrowTypeToken[Id]((), Nil, List(NamedTypeToken[Id]("B"))) ) + arrowdef("A -> B") should be( ArrowTypeToken[Id]( (), @@ -147,9 +165,11 @@ class TypeTokenSpec extends AnyFlatSpec with Matchers with EitherValues { arrowWithNames("{SomeAb, SecondAb}(a: A) -> B") should be( ArrowTypeToken[Id]( (), - (Some(Name[Id]("SomeAb")) -> NamedTypeToken[Id]("SomeAb")) :: (Some(Name[Id]( - "SecondAb" - )) -> NamedTypeToken[Id]("SecondAb")) :: ( + (Some(Name[Id]("SomeAb")) -> NamedTypeToken[Id]("SomeAb")) :: (Some( + Name[Id]( + "SecondAb" + ) + ) -> NamedTypeToken[Id]("SecondAb")) :: ( Some(Name[Id]("a")) -> NamedTypeToken[Id]("A") ) :: Nil, List(NamedTypeToken[Id]("B")) @@ -159,25 +179,28 @@ class TypeTokenSpec extends AnyFlatSpec with Matchers with EitherValues { arrowdef("u32 -> Boo") should be( ArrowTypeToken[Id]( (), - (None -> strToBt(u32)) :: Nil, + (None -> stToStt(ScalarType.u32)) :: Nil, List(NamedTypeToken[Id]("Boo")) ) ) + TypeToken.`typedef`.parseAll("u32 -> ()").value.mapK(spanToId) should be( - ArrowTypeToken[Id]((), (None -> strToBt(u32)) :: Nil, Nil) + ArrowTypeToken[Id]((), (None -> stToStt(ScalarType.u32)) :: Nil, Nil) ) + arrowdef("A, u32 -> B") should be( ArrowTypeToken[Id]( (), - (None -> NamedTypeToken[Id]("A")) :: (None -> strToBt(u32)) :: Nil, + (None -> NamedTypeToken[Id]("A")) :: (None -> stToStt(ScalarType.u32)) :: Nil, List(NamedTypeToken[Id]("B")) ) ) + arrowdef("[]Absolutely, u32 -> B, C") should be( ArrowTypeToken[Id]( (), (Option.empty[Name[Id]] -> ArrayTypeToken[Id]((), NamedTypeToken[Id]("Absolutely"))) :: - (Option.empty[Name[Id]] -> strToBt(u32)) :: Nil, + (Option.empty[Name[Id]] -> stToStt(ScalarType.u32)) :: Nil, NamedTypeToken[Id]("B") :: NamedTypeToken[Id]("C") :: Nil ) @@ -185,18 +208,46 @@ class TypeTokenSpec extends AnyFlatSpec with Matchers with EitherValues { } - "Array type" should "parse" in { - def typedef(str: String) = TypeToken.`typedef`.parseAll(str).value.mapK(spanToId) + "data type token" should "parse nested types" in { + def typedef(str: String): BasicTypeToken[Id] = + BasicTypeToken.`compositetypedef`.parseAll(str).value.mapK(spanToId) - typedef("[]Something") should be( - ArrayTypeToken[Id]((), NamedTypeToken[Id]("Something")) + val baseTypes: List[(String, BasicTypeToken[Id])] = List( + "u32" -> stToStt(ScalarType.u32), + "string" -> stToStt(ScalarType.string), + "Named" -> NamedTypeToken[Id]("Named") ) - typedef("[]u32") should be( - ArrayTypeToken[Id]((), strToBt(u32)) - ) - typedef("[][]u32") should be( - ArrayTypeToken[Id]((), ArrayTypeToken[Id]((), strToBt(u32))) + + val modifiers: List[(String, BasicTypeToken[Id] => BasicTypeToken[Id])] = List( + "[]" -> ((t: BasicTypeToken[Id]) => ArrayTypeToken[Id]((), t)), + "?" -> ((t: BasicTypeToken[Id]) => OptionTypeToken[Id]((), t)), + "*" -> ((t: BasicTypeToken[Id]) => StreamTypeToken[Id]((), t)) ) + + LazyList + // Generate all cartesian products of modifiers + .unfold(modifiers)(prod => + ( + prod, + for { + m <- modifiers + (sm, mt) = m + p <- prod + (sp, pt) = p + } yield (sm + sp, mt.compose(pt)) + ).some + ) + .take(6) + .foreach { mods => + for { + base <- baseTypes + (bs, bt) = base + mod <- mods + (ms, mt) = mod + // Apply modifiers to base type + (st, t) = (ms + bs, mt(bt)) + } typedef(st) should be(t) + } } } diff --git a/semantics/src/main/scala/aqua/semantics/expr/AliasSem.scala b/semantics/src/main/scala/aqua/semantics/expr/AliasSem.scala index f2818698..bf709462 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/AliasSem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/AliasSem.scala @@ -4,14 +4,15 @@ import aqua.parser.expr.AliasExpr import aqua.raw.{Raw, TypeRaw} import aqua.semantics.Prog import aqua.semantics.rules.types.TypesAlgebra -import cats.syntax.functor.* -import cats.Monad + import cats.Applicative +import cats.Monad import cats.syntax.flatMap.* +import cats.syntax.functor.* class AliasSem[S[_]](val expr: AliasExpr[S]) extends AnyVal { - def program[Alg[_]: Monad](implicit T: TypesAlgebra[S, Alg]): Prog[Alg, Raw] = + def program[Alg[_]: Monad](using T: TypesAlgebra[S, Alg]): Prog[Alg, Raw] = T.resolveType(expr.target).flatMap { case Some(t) => T.defineAlias(expr.name, t) as (TypeRaw(expr.name.value, t): Raw) case None => Applicative[Alg].pure(Raw.error("Alias type unresolved")) diff --git a/semantics/src/main/scala/aqua/semantics/expr/func/ArrowSem.scala b/semantics/src/main/scala/aqua/semantics/expr/func/ArrowSem.scala index eca0b2b9..4b64cb08 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/func/ArrowSem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/func/ArrowSem.scala @@ -58,13 +58,13 @@ class ArrowSem[S[_]](val expr: ArrowExpr[S]) extends AnyVal { // TODO: wrap with local on...via... val retsAndArgs = retValues zip funcArrow.codomain.toList - val dataArgsNames = funcArrow.domain.labelledData.map { case (name, _) => name } + val streamArgNames = funcArrow.domain.labelledStreams.map { case (name, _) => name } val streamsThatReturnAsStreams = retsAndArgs.collect { case (VarRaw(n, StreamType(_)), StreamType(_)) => n }.toSet // Remove arguments, and values returned as streams - val localStreams = streamsInScope -- dataArgsNames -- streamsThatReturnAsStreams + val localStreams = streamsInScope -- streamArgNames -- streamsThatReturnAsStreams // process stream that returns as not streams and all Apply*Raw retsAndArgs.traverse { diff --git a/semantics/src/main/scala/aqua/semantics/expr/func/DeclareStreamSem.scala b/semantics/src/main/scala/aqua/semantics/expr/func/DeclareStreamSem.scala index f3db63db..f14ae67d 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/func/DeclareStreamSem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/func/DeclareStreamSem.scala @@ -1,48 +1,39 @@ package aqua.semantics.expr.func -import aqua.raw.ops.DeclareStreamTag +import aqua.helpers.syntax.optiont.* import aqua.parser.expr.func.DeclareStreamExpr import aqua.raw.Raw +import aqua.raw.ops.DeclareStreamTag import aqua.raw.value.VarRaw import aqua.semantics.Prog import aqua.semantics.rules.names.NamesAlgebra import aqua.semantics.rules.types.TypesAlgebra -import aqua.types.{ArrayType, OptionType, StreamType} +import aqua.types.* + import cats.Monad import cats.data.Chain +import cats.data.OptionT import cats.syntax.applicative.* import cats.syntax.flatMap.* import cats.syntax.functor.* class DeclareStreamSem[S[_]](val expr: DeclareStreamExpr[S]) { - def program[Alg[_]: Monad](implicit + def program[Alg[_]: Monad](using N: NamesAlgebra[S, Alg], T: TypesAlgebra[S, Alg] - ): Prog[Alg, Raw] = - Prog.leaf( - T.resolveType(expr.`type`) - .flatMap { - case Some(t: StreamType) => - N.define(expr.name, t).map(b => Option.when(b)(t)) - case Some(t: OptionType) => - val streamType = StreamType(t.element) - N.define(expr.name, streamType).map(b => Option.when(b)(streamType)) - case Some(at @ ArrayType(t)) => - val streamType = StreamType(t) - T.ensureTypeMatches(expr.`type`, streamType, at).map(b => Option.when(b)(streamType)) - case Some(t) => - val streamType = StreamType(t) - T.ensureTypeMatches(expr.`type`, streamType, t).map(b => Option.when(b)(streamType)) - case None => - None.pure[Alg] - } - .map { - case Some(streamType) => - val valueModel = VarRaw(expr.name.value, streamType) - DeclareStreamTag(valueModel).funcOpLeaf: Raw - case None => Raw.error(s"Name `${expr.name.value}` not defined") - } - ) + ): Prog[Alg, Raw] = Prog.leaf { + val sem = for { + streamType <- OptionT( + T.resolveStreamType(expr.`type`) + ) + _ <- OptionT.withFilterF( + N.define(expr.name, streamType) + ) + valueModel = VarRaw(expr.name.value, streamType) + } yield DeclareStreamTag(valueModel).funcOpLeaf: Raw + + sem.getOrElse(Raw.error(s"Name `${expr.name.value}` not defined")) + } } diff --git a/semantics/src/main/scala/aqua/semantics/expr/func/ForSem.scala b/semantics/src/main/scala/aqua/semantics/expr/func/ForSem.scala index 2ea4db30..1e384bf6 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/func/ForSem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/func/ForSem.scala @@ -1,27 +1,27 @@ package aqua.semantics.expr.func -import aqua.raw.Raw import aqua.parser.expr.func.ForExpr +import aqua.parser.expr.func.ForExpr.Mode import aqua.parser.lexer.{Name, ValueToken} -import aqua.raw.value.ValueRaw +import aqua.raw.Raw import aqua.raw.ops.* import aqua.raw.ops.ForTag +import aqua.raw.value.ValueRaw import aqua.semantics.Prog +import aqua.semantics.expr.func.FuncOpSem import aqua.semantics.rules.ValuesAlgebra import aqua.semantics.rules.abilities.AbilitiesAlgebra import aqua.semantics.rules.names.NamesAlgebra import aqua.semantics.rules.types.TypesAlgebra -import aqua.types.{ArrayType, BoxType, StreamType} -import aqua.semantics.expr.func.FuncOpSem +import aqua.types.* import cats.Monad -import cats.data.Chain +import cats.data.{Chain, OptionT} import cats.syntax.applicative.* import cats.syntax.apply.* import cats.syntax.flatMap.* import cats.syntax.functor.* import cats.syntax.option.* -import aqua.parser.expr.func.ForExpr.Mode class ForSem[S[_]](val expr: ForExpr[S]) extends AnyVal { @@ -74,19 +74,18 @@ class ForSem[S[_]](val expr: ForExpr[S]) extends AnyVal { object ForSem { - def beforeFor[S[_], F[_]: Monad](item: Name[S], iterable: ValueToken[S])(implicit + def beforeFor[S[_], F[_]: Monad]( + item: Name[S], + iterable: ValueToken[S] + )(using V: ValuesAlgebra[S, F], N: NamesAlgebra[S, F], T: TypesAlgebra[S, F] - ): F[Option[ValueRaw]] = - V.valueToRaw(iterable).flatMap { - case Some(vm) => - vm.`type` match { - case t: BoxType => - N.define(item, t.element).as(vm.some) - case dt => - T.ensureTypeMatches(iterable, ArrayType(dt), dt).as(none) - } - case _ => none.pure - } + ): F[Option[ValueRaw]] = (for { + value <- V.valueToIterable(iterable) + (raw, typ) = value + _ <- OptionT.liftF( + N.define(item, typ.element) + ) + } yield raw).value } diff --git a/semantics/src/main/scala/aqua/semantics/expr/func/FuncOpSem.scala b/semantics/src/main/scala/aqua/semantics/expr/func/FuncOpSem.scala index 5568de92..f2227b84 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/func/FuncOpSem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/func/FuncOpSem.scala @@ -1,17 +1,17 @@ package aqua.semantics.expr.func +import aqua.raw.Raw +import aqua.raw.ops.{RawTag, RestrictionTag} +import aqua.semantics.rules.names.NamesAlgebra + import cats.Monad import cats.syntax.functor.* -import aqua.semantics.rules.names.NamesAlgebra -import aqua.raw.Raw -import aqua.raw.ops.{RawTag, RestrictionTag} - object FuncOpSem { - def restrictStreamsInScope[S[_], Alg[_]: Monad](tree: RawTag.Tree)(using - N: NamesAlgebra[S, Alg] - ): Alg[RawTag.Tree] = N + def restrictStreamsInScope[S[_], Alg[_]: Monad]( + tree: RawTag.Tree + )(using N: NamesAlgebra[S, Alg]): Alg[RawTag.Tree] = N .streamsDefinedWithinScope() .map(streams => streams.toList diff --git a/semantics/src/main/scala/aqua/semantics/expr/func/OnSem.scala b/semantics/src/main/scala/aqua/semantics/expr/func/OnSem.scala index 56a4c4c1..b19c71a6 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/func/OnSem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/func/OnSem.scala @@ -1,23 +1,23 @@ package aqua.semantics.expr.func -import aqua.raw.ops.{FuncOp, OnTag} import aqua.parser.expr.func.OnExpr import aqua.parser.lexer.ValueToken import aqua.raw.Raw +import aqua.raw.ops.{FuncOp, OnTag} import aqua.raw.value.ValueRaw import aqua.semantics.Prog import aqua.semantics.rules.ValuesAlgebra import aqua.semantics.rules.abilities.AbilitiesAlgebra import aqua.semantics.rules.types.TypesAlgebra -import aqua.types.{BoxType, OptionType, ScalarType} +import aqua.types.{CollectionType, OptionType, ScalarType} import cats.data.Chain import cats.data.OptionT import cats.syntax.applicative.* import cats.syntax.apply.* import cats.syntax.flatMap.* -import cats.syntax.traverse.* import cats.syntax.functor.* +import cats.syntax.traverse.* import cats.{Monad, Traverse} class OnSem[S[_]](val expr: OnExpr[S]) extends AnyVal { @@ -64,7 +64,7 @@ object OnSem { .traverse(v => OptionT(V.valueToRaw(v)).filterF { vm => val expectedType = vm.`type` match { - case _: BoxType => OptionType(ScalarType.string) + case _: CollectionType => OptionType(ScalarType.string) case _ => ScalarType.string } diff --git a/semantics/src/main/scala/aqua/semantics/expr/func/ParSeqSem.scala b/semantics/src/main/scala/aqua/semantics/expr/func/ParSeqSem.scala index 71a255d1..670154e7 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/func/ParSeqSem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/func/ParSeqSem.scala @@ -1,24 +1,24 @@ package aqua.semantics.expr.func -import aqua.raw.Raw import aqua.parser.expr.func.ParSeqExpr -import aqua.raw.value.ValueRaw +import aqua.raw.Raw import aqua.raw.ops.* import aqua.raw.ops.ForTag +import aqua.raw.value.ValueRaw import aqua.semantics.Prog import aqua.semantics.rules.ValuesAlgebra import aqua.semantics.rules.abilities.AbilitiesAlgebra import aqua.semantics.rules.names.NamesAlgebra import aqua.semantics.rules.types.TypesAlgebra -import aqua.types.{ArrayType, BoxType, StreamType} +import aqua.types.{ArrayType, CollectionType, StreamType} import cats.Monad import cats.data.Chain -import cats.syntax.option.* import cats.syntax.applicative.* import cats.syntax.apply.* import cats.syntax.flatMap.* import cats.syntax.functor.* +import cats.syntax.option.* class ParSeqSem[S[_]](val expr: ParSeqExpr[S]) extends AnyVal { diff --git a/semantics/src/main/scala/aqua/semantics/expr/func/PushToStreamSem.scala b/semantics/src/main/scala/aqua/semantics/expr/func/PushToStreamSem.scala index be8eb3df..076408d2 100644 --- a/semantics/src/main/scala/aqua/semantics/expr/func/PushToStreamSem.scala +++ b/semantics/src/main/scala/aqua/semantics/expr/func/PushToStreamSem.scala @@ -1,15 +1,19 @@ package aqua.semantics.expr.func -import aqua.raw.ops.{Call, PushToStreamTag} +import aqua.helpers.syntax.optiont.* import aqua.parser.expr.func.PushToStreamExpr import aqua.parser.lexer.Token import aqua.raw.Raw +import aqua.raw.ops.{Call, PushToStreamTag} import aqua.semantics.Prog import aqua.semantics.rules.ValuesAlgebra import aqua.semantics.rules.names.NamesAlgebra import aqua.semantics.rules.types.TypesAlgebra -import aqua.types.{ArrayType, StreamType, Type} +import aqua.types.* +import aqua.types.TopType + import cats.Monad +import cats.data.OptionT import cats.syntax.applicative.* import cats.syntax.apply.* import cats.syntax.flatMap.* @@ -22,24 +26,14 @@ class PushToStreamSem[S[_]](val expr: PushToStreamExpr[S]) extends AnyVal { elementToken: Token[S], stream: Type, element: Type - )(implicit - T: TypesAlgebra[S, Alg] - ): Alg[Boolean] = - stream match { - case StreamType(st) => - T.ensureTypeMatches(elementToken, st, element) - case _ => - T.ensureTypeMatches( - streamToken, - StreamType(element match { - case StreamType(e) => ArrayType(e) - case _ => element - }), - stream - ) - } + )(using T: TypesAlgebra[S, Alg]): Alg[Boolean] = ( + T.typeToStream(streamToken, stream), + T.typeToCollectible(elementToken, element) + ).merged.semiflatMap { case (st, et) => + T.ensureTypeMatches(elementToken, st.element, et) + }.getOrElse(false) - def program[Alg[_]: Monad](implicit + def program[Alg[_]: Monad](using N: NamesAlgebra[S, Alg], T: TypesAlgebra[S, Alg], V: ValuesAlgebra[S, Alg] diff --git a/semantics/src/main/scala/aqua/semantics/rules/ValuesAlgebra.scala b/semantics/src/main/scala/aqua/semantics/rules/ValuesAlgebra.scala index 2b7e643b..30429a31 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/ValuesAlgebra.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/ValuesAlgebra.scala @@ -1,6 +1,9 @@ package aqua.semantics.rules +import aqua.errors.Errors.internalError +import aqua.helpers.syntax.optiont.* import aqua.parser.lexer.* +import aqua.parser.lexer.InfixToken.value import aqua.parser.lexer.InfixToken.{BoolOp, CmpOp, EqOp, MathOp, Op as InfOp} import aqua.parser.lexer.PrefixToken.Op as PrefOp import aqua.raw.value.* @@ -9,7 +12,6 @@ import aqua.semantics.rules.names.NamesAlgebra import aqua.semantics.rules.report.ReportAlgebra import aqua.semantics.rules.types.TypesAlgebra import aqua.types.* -import aqua.helpers.syntax.optiont.* import cats.Monad import cats.data.{NonEmptyList, OptionT} @@ -151,26 +153,22 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](using raws .zip(values) .traverse { case (raw, token) => - T.ensureTypeIsCollectible(token, raw.`type`) - .map(Option.when(_)(raw)) + T.typeToCollectible(token, raw.`type`).map(raw -> _) } - .map(_.sequence) + .value ) raw = valuesRawChecked.map(raws => NonEmptyList .fromList(raws) .fold(ValueRaw.Nil) { nonEmpty => - val element = raws.map(_.`type`).reduceLeft(_ `∩` _) - // In case we mix values of uncomparable types, intersection returns bottom, meaning "uninhabited type". - // But we want to get to TopType instead: this would mean that intersection is empty, and you cannot - // make any decision about the structure of type, but can push anything inside - val elementNotBottom = if (element == BottomType) TopType else element + val (values, types) = nonEmpty.unzip + val element = CollectionType.elementTypeOf(types.toList) CollectionRaw( - nonEmpty, + values, ct.mode match { - case CollectionToken.Mode.StreamMode => StreamType(elementNotBottom) - case CollectionToken.Mode.ArrayMode => ArrayType(elementNotBottom) - case CollectionToken.Mode.OptionMode => OptionType(elementNotBottom) + case CollectionToken.Mode.StreamMode => StreamType(element) + case CollectionToken.Mode.ArrayMode => ArrayType(element) + case CollectionToken.Mode.OptionMode => OptionType(element) } ) } @@ -323,14 +321,19 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](using } ) + def valueToIterable(v: ValueToken[S]): OptionT[Alg, (ValueRaw, CollectionType)] = + for { + raw <- OptionT(valueToRaw(v)) + typ <- T.typeToIterable(v, raw.`type`) + } yield raw -> typ + def valueToTypedRaw(v: ValueToken[S], expectedType: Type): Alg[Option[ValueRaw]] = - OptionT(valueToRaw(v)) - .flatMap(raw => - OptionT.whenM( - T.ensureTypeMatches(v, expectedType, raw.`type`) - )(raw.pure) + (for { + raw <- OptionT(valueToRaw(v)) + _ <- OptionT.withFilterF( + T.ensureTypeMatches(v, expectedType, raw.`type`) ) - .value + } yield raw).value def valueToStringRaw(v: ValueToken[S]): Alg[Option[ValueRaw]] = valueToTypedRaw(v, LiteralType.string) diff --git a/semantics/src/main/scala/aqua/semantics/rules/types/TypeResolution.scala b/semantics/src/main/scala/aqua/semantics/rules/types/TypeResolution.scala new file mode 100644 index 00000000..140c5c22 --- /dev/null +++ b/semantics/src/main/scala/aqua/semantics/rules/types/TypeResolution.scala @@ -0,0 +1,112 @@ +package aqua.semantics.rules.types + +import aqua.parser.lexer.* +import aqua.types.* + +import cats.data.ValidatedNec +import cats.syntax.apply.* +import cats.syntax.bifunctor.* +import cats.syntax.either.* +import cats.syntax.functor.* +import cats.syntax.option.* +import cats.syntax.traverse.* +import cats.syntax.validated.* + +final case class TypeResolution[S[_], +T]( + `type`: T, + definitions: List[(Token[S], NamedTypeToken[S])] +) + +object TypeResolution { + + final case class TypeResolutionError[S[_]]( + token: Token[S], + hint: String + ) + + type Res[S[_], A] = ValidatedNec[ + TypeResolutionError[S], + TypeResolution[S, A] + ] + + private def resolveCollection[S[_]]( + tt: TypeToken[S], + collectionName: String, + collectionType: DataType => Type + )(state: TypesState[S]): Res[S, Type] = + resolveTypeToken(tt)(state).andThen { + case TypeResolution(it: DataType, t) => + TypeResolution(collectionType(it), t).validNec + case TypeResolution(it, _) => + TypeResolutionError( + tt, + s"$collectionName could not contain values of type $it" + ).invalidNec + } + + def resolveTypeToken[S[_]]( + tt: TypeToken[S] + )(state: TypesState[S]): Res[S, Type] = + tt match { + case TopBottomToken(_, isTop) => + val `type` = if (isTop) TopType else BottomType + + TypeResolution(`type`, Nil).validNec + case ArrayTypeToken(_, dtt) => + resolveCollection(dtt, "Array", ArrayType.apply)(state) + case StreamTypeToken(_, dtt) => + resolveCollection(dtt, "Stream", StreamType.apply)(state) + case OptionTypeToken(_, dtt) => + resolveCollection(dtt, "Option", OptionType.apply)(state) + case ntt: NamedTypeToken[S] => + val defs = state + .getTypeDefinition(ntt.value) + .toList + .map(ntt -> _) + + state + .getType(ntt.value) + .map(typ => TypeResolution(typ, defs)) + .toValidNec( + TypeResolutionError( + ntt, + s"Type ${ntt.value} is not defined" + ) + ) + case stt: ScalarTypeToken[S] => + TypeResolution(stt.value, Nil).validNec + case att: ArrowTypeToken[S] => + resolveArrowDef(att)(state) + } + + def resolveArrowDef[S[_]]( + arrowTypeToken: ArrowTypeToken[S] + )(state: TypesState[S]): Res[S, ArrowType] = { + val res = arrowTypeToken.res + .traverse(typeToken => resolveTypeToken(typeToken)(state).toEither) + val args = arrowTypeToken.args.traverse { case (argName, typeToken) => + resolveTypeToken(typeToken)(state) + .map(argName.map(_.value) -> _) + .toEither + } + + (args, res).mapN { (args, res) => + val (argsLabeledTypes, argsTokens) = + args.map { case lbl -> TypeResolution(typ, tkn) => + (lbl, typ) -> tkn + }.unzip.map(_.flatten) + val (resTypes, resTokens) = + res.map { case TypeResolution(typ, tkn) => + typ -> tkn + }.unzip.map(_.flatten) + + val typ = ArrowType( + ProductType.maybeLabelled(argsLabeledTypes), + ProductType(resTypes) + ) + val defs = (argsTokens ++ resTokens) + + TypeResolution(typ, defs) + }.toValidated + } +} diff --git a/semantics/src/main/scala/aqua/semantics/rules/types/TypesAlgebra.scala b/semantics/src/main/scala/aqua/semantics/rules/types/TypesAlgebra.scala index 99a6c4d2..742ae69d 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/types/TypesAlgebra.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/types/TypesAlgebra.scala @@ -3,14 +3,18 @@ package aqua.semantics.rules.types import aqua.parser.lexer.* import aqua.raw.value.{PropertyRaw, ValueRaw} import aqua.types.* +import aqua.types.Type.* -import cats.data.NonEmptyMap import cats.data.NonEmptyList +import cats.data.NonEmptyMap +import cats.data.OptionT trait TypesAlgebra[S[_], Alg[_]] { def resolveType(token: TypeToken[S]): Alg[Option[Type]] - + + def resolveStreamType(token: TypeToken[S]): Alg[Option[StreamType]] + def resolveNamedType(token: TypeToken[S]): Alg[Option[AbilityType | StructType]] def getType(name: String): Alg[Option[Type]] @@ -56,7 +60,11 @@ trait TypesAlgebra[S[_], Alg[_]] { def ensureTypeMatches(token: Token[S], expected: Type, givenType: Type): Alg[Boolean] - def ensureTypeIsCollectible(token: Token[S], givenType: Type): Alg[Boolean] + def typeToCollectible(token: Token[S], givenType: Type): OptionT[Alg, CollectibleType] + + def typeToStream(token: Token[S], givenType: Type): OptionT[Alg, StreamType] + + def typeToIterable(token: Token[S], givenType: Type): OptionT[Alg, CollectionType] def ensureTypeOneOf[T <: Type]( token: Token[S], diff --git a/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala b/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala index add0cc17..00fd1678 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/types/TypesInterpreter.scala @@ -1,35 +1,28 @@ package aqua.semantics.rules.types import aqua.parser.lexer.* -import aqua.raw.value.{ - FunctorRaw, - IntoArrowRaw, - IntoCopyRaw, - IntoFieldRaw, - IntoIndexRaw, - PropertyRaw, - ValueRaw -} -import aqua.semantics.rules.locations.LocationsAlgebra +import aqua.raw.value.* import aqua.semantics.rules.StackInterpreter +import aqua.semantics.rules.locations.LocationsAlgebra import aqua.semantics.rules.report.ReportAlgebra -import aqua.semantics.rules.types.TypesStateHelper.{TypeResolution, TypeResolutionError} +import aqua.semantics.rules.types.TypeResolution.TypeResolutionError import aqua.types.* +import aqua.types.Type.* import cats.data.Validated.{Invalid, Valid} import cats.data.{Chain, NonEmptyList, NonEmptyMap, OptionT, State} import cats.syntax.applicative.* import cats.syntax.apply.* import cats.syntax.flatMap.* -import cats.syntax.functor.* -import cats.syntax.traverse.* import cats.syntax.foldable.* -import cats.{~>, Applicative} +import cats.syntax.functor.* import cats.syntax.option.* +import cats.syntax.traverse.* +import cats.{Applicative, ~>} import monocle.Lens import monocle.macros.GenLens - import scala.collection.immutable.SortedMap +import scala.reflect.TypeTest class TypesInterpreter[S[_], X](using lens: Lens[X, TypesState[S]], @@ -49,15 +42,22 @@ class TypesInterpreter[S[_], X](using getState.map(st => st.strict.get(name)) override def resolveType(token: TypeToken[S]): State[X, Option[Type]] = - getState.map(TypesStateHelper.resolveTypeToken(token)).flatMap { - case Some(TypeResolution(typ, tokens)) => + getState.map(TypeResolution.resolveTypeToken(token)).flatMap { + case Valid(TypeResolution(typ, tokens)) => val tokensLocs = tokens.map { case (t, n) => n.value -> t } locations.pointLocations(tokensLocs).as(typ.some) - case None => - // TODO: Give more specific error message - report.error(token, s"Unresolved type").as(None) + case Invalid(errors) => + errors.traverse_ { case TypeResolutionError(token, hint) => + report.error(token, hint) + }.as(none) } + override def resolveStreamType(token: TypeToken[S]): State[X, Option[StreamType]] = + OptionT(resolveType(token)).flatMapF { + case st: StreamType => st.some.pure[ST] + case t => report.error(token, s"Expected stream type, got $t").as(none) + }.value + def resolveNamedType(token: TypeToken[S]): State[X, Option[AbilityType | StructType]] = resolveType(token).flatMap(_.flatTraverse { case t: (AbilityType | StructType) => Option(t).pure @@ -65,7 +65,7 @@ class TypesInterpreter[S[_], X](using }) override def resolveArrowDef(arrowDef: ArrowTypeToken[S]): State[X, Option[ArrowType]] = - getState.map(TypesStateHelper.resolveArrowDef(arrowDef)).flatMap { + getState.map(TypeResolution.resolveArrowDef(arrowDef)).flatMap { case Valid(TypeResolution(tt, tokens)) => val tokensLocs = tokens.map { case (t, n) => n.value -> t } locations.pointLocations(tokensLocs).as(tt.some) @@ -142,11 +142,7 @@ class TypesInterpreter[S[_], X](using ensureNameNotDefined(name.value, name, ifDefined = none)( fields.toList.traverse { case (field, (fieldName, t: DataType)) => - t match { - case _: StreamType => - report.error(fieldName, s"Field '$field' has stream type").as(none) - case _ => (field -> t).some.pure[ST] - } + (field -> t).some.pure[ST] case (field, (fieldName, t)) => report .error( @@ -294,7 +290,7 @@ class TypesInterpreter[S[_], X](using op.idx.fold( State.pure(Some(IntoIndexRaw(idx, ot.element))) )(v => report.error(v, s"Options might have only one element, use ! to get it").as(None)) - case rt: BoxType => + case rt: CollectionType => State.pure(Some(IntoIndexRaw(idx, rt.element))) case _ => report.error(op, s"Expected $rootT to be a collection type").as(None) @@ -318,7 +314,7 @@ class TypesInterpreter[S[_], X](using true case (LiteralType.signed, rst: ScalarType) if ScalarType.number(rst) => true - case (lbt: BoxType, rbt: BoxType) => + case (lbt: CollectionType, rbt: CollectionType) => isComparable(lbt.element, rbt.element) // Prohibit comparing abilities case (_: AbilityType, _: AbilityType) => @@ -329,7 +325,7 @@ class TypesInterpreter[S[_], X](using case (LiteralType(xs, _), LiteralType(ys, _)) => xs.intersect(ys).nonEmpty case _ => - lt.uniteTop(rt) != TopType + lt `∪` rt != TopType } if (isComparable(left, right)) State.pure(true) @@ -379,35 +375,71 @@ class TypesInterpreter[S[_], X](using } } case _ => - val notes = - if (expected.acceptsValueOf(OptionType(givenType))) + val notes = (expected, givenType) match { + case (_, dt: DataType) if expected.acceptsValueOf(OptionType(dt)) => "note: Try converting value to optional" :: Nil - else if (givenType.acceptsValueOf(OptionType(expected))) + case (dt: DataType, _) if givenType.acceptsValueOf(OptionType(dt)) => "note: You're providing an optional value where normal value is expected." :: "You can extract value with `!`, but be aware it may trigger join behaviour." :: Nil - else Nil + case _ => Nil + } + report .error( token, - "Types mismatch." :: s"expected: $expected" :: s"given: $givenType" :: Nil ++ notes + "Types mismatch." +: + s"expected: $expected" +: + s"given: $givenType" +: + notes ) .as(false) } } - override def ensureTypeIsCollectible(token: Token[S], givenType: Type): State[X, Boolean] = + private def typeTo[T <: Type]( + token: Token[S], + givenType: Type, + error: String + )(using tt: TypeTest[Type, T]): OptionT[State[X, *], T] = givenType match { - case _: DataType => true.pure + case t: T => OptionT.pure(t) case _ => - report - .error( - token, - s"Value of type '$givenType' could not be put into a collection" - ) - .as(false) + OptionT.liftF( + report.error(token, error) + ) *> OptionT.none } + override def typeToCollectible( + token: Token[S], + givenType: Type + ): OptionT[State[X, *], CollectibleType] = + typeTo[CollectibleType]( + token, + givenType, + s"Value of type '$givenType' could not be put into a collection" + ) + + override def typeToStream( + token: Token[S], + givenType: Type + ): OptionT[State[X, *], StreamType] = + typeTo[StreamType]( + token, + givenType, + s"Expected stream value, got value of type '$givenType'" + ) + + override def typeToIterable( + token: Token[S], + givenType: Type + ): OptionT[State[X, *], CollectionType] = + typeTo[CollectionType]( + token, + givenType, + s"Value of type '$givenType' could not be iterated over" + ) + override def ensureTypeOneOf[T <: Type]( token: Token[S], expected: Set[T], diff --git a/semantics/src/main/scala/aqua/semantics/rules/types/TypesState.scala b/semantics/src/main/scala/aqua/semantics/rules/types/TypesState.scala index 731d1cbd..0749c1a3 100644 --- a/semantics/src/main/scala/aqua/semantics/rules/types/TypesState.scala +++ b/semantics/src/main/scala/aqua/semantics/rules/types/TypesState.scala @@ -1,20 +1,19 @@ package aqua.semantics.rules.types -import aqua.raw.value.{FunctorRaw, IntoIndexRaw, LiteralRaw, PropertyRaw, ValueRaw} import aqua.parser.lexer.* -import aqua.types.* import aqua.raw.RawContext +import aqua.raw.value.{FunctorRaw, IntoIndexRaw, LiteralRaw, PropertyRaw, ValueRaw} +import aqua.types.* import cats.data.Validated.{Invalid, Valid} import cats.data.{Chain, NonEmptyChain, ValidatedNec} import cats.kernel.Monoid -import cats.syntax.option.* -import cats.syntax.traverse.* -import cats.syntax.validated.* import cats.syntax.apply.* import cats.syntax.bifunctor.* import cats.syntax.functor.* -import cats.syntax.apply.* +import cats.syntax.option.* +import cats.syntax.traverse.* +import cats.syntax.validated.* case class TypesState[S[_]]( fields: Map[String, (Name[S], Type)] = Map(), @@ -37,97 +36,6 @@ case class TypesState[S[_]]( definitions.get(name) } -object TypesStateHelper { - - final case class TypeResolution[S[_], +T]( - `type`: T, - definitions: List[(Token[S], NamedTypeToken[S])] - ) - - final case class TypeResolutionError[S[_]]( - token: Token[S], - hint: String - ) - - def resolveTypeToken[S[_]](tt: TypeToken[S])( - state: TypesState[S] - ): Option[TypeResolution[S, Type]] = - tt match { - case TopBottomToken(_, isTop) => - val `type` = if (isTop) TopType else BottomType - - TypeResolution(`type`, Nil).some - case ArrayTypeToken(_, dtt) => - resolveTypeToken(dtt)(state).collect { case TypeResolution(it: DataType, t) => - TypeResolution(ArrayType(it), t) - } - case StreamTypeToken(_, dtt) => - resolveTypeToken(dtt)(state).collect { case TypeResolution(it: DataType, t) => - TypeResolution(StreamType(it), t) - } - case OptionTypeToken(_, dtt) => - resolveTypeToken(dtt)(state).collect { case TypeResolution(it: DataType, t) => - TypeResolution(OptionType(it), t) - } - case ntt: NamedTypeToken[S] => - val defs = state - .getTypeDefinition(ntt.value) - .toList - .map(ntt -> _) - - state - .getType(ntt.value) - .map(typ => TypeResolution(typ, defs)) - case btt: BasicTypeToken[S] => - TypeResolution(btt.value, Nil).some - case att: ArrowTypeToken[S] => - resolveArrowDef(att)(state).toOption - } - - def resolveArrowDef[S[_]](arrowTypeToken: ArrowTypeToken[S])( - state: TypesState[S] - ): ValidatedNec[TypeResolutionError[S], TypeResolution[S, ArrowType]] = { - val res = arrowTypeToken.res.traverse(typeToken => - resolveTypeToken(typeToken)(state) - .toValidNec( - TypeResolutionError( - typeToken, - "Can not resolve the result type" - ) - ) - ) - val args = arrowTypeToken.args.traverse { case (argName, typeToken) => - resolveTypeToken(typeToken)(state) - .toValidNec( - TypeResolutionError( - typeToken, - "Can not resolve the argument type" - ) - ) - .map(argName.map(_.value) -> _) - } - - (args, res).mapN { (args, res) => - val (argsLabeledTypes, argsTokens) = - args.map { case lbl -> TypeResolution(typ, tkn) => - (lbl, typ) -> tkn - }.unzip.map(_.flatten) - val (resTypes, resTokens) = - res.map { case TypeResolution(typ, tkn) => - typ -> tkn - }.unzip.map(_.flatten) - - val typ = ArrowType( - ProductType.maybeLabelled(argsLabeledTypes), - ProductType(resTypes) - ) - val defs = (argsTokens ++ resTokens) - - TypeResolution(typ, defs) - } - } -} - object TypesState { final case class TypeDefinition[S[_]]( diff --git a/semantics/src/test/scala/aqua/semantics/ArrowSemSpec.scala b/semantics/src/test/scala/aqua/semantics/ArrowSemSpec.scala index 1620dd1e..39242705 100644 --- a/semantics/src/test/scala/aqua/semantics/ArrowSemSpec.scala +++ b/semantics/src/test/scala/aqua/semantics/ArrowSemSpec.scala @@ -1,7 +1,7 @@ package aqua.semantics import aqua.parser.expr.func.ArrowExpr -import aqua.parser.lexer.{BasicTypeToken, Name} +import aqua.parser.lexer.{Name, ScalarTypeToken} import aqua.raw.Raw import aqua.raw.arrow.ArrowRaw import aqua.raw.ops.* @@ -12,12 +12,12 @@ import aqua.types.* import aqua.types.ScalarType.* import cats.Id -import cats.syntax.applicative.* import cats.data.{NonEmptyList, NonEmptyMap, State} +import cats.syntax.applicative.* import org.scalatest.EitherValues +import org.scalatest.Inside import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import org.scalatest.Inside class ArrowSemSpec extends AnyFlatSpec with Matchers with EitherValues with Inside { @@ -84,7 +84,7 @@ class ArrowSemSpec extends AnyFlatSpec with Matchers with EitherValues with Insi val state = getState(seq)(program("(a: string, b: u32) -> u32")) state.errors.headOption.get shouldBe RulesViolated[Id]( - BasicTypeToken[Id](u32), + ScalarTypeToken[Id](u32), "Types mismatch, expected: u32, given: string" :: Nil ) @@ -112,7 +112,7 @@ class ArrowSemSpec extends AnyFlatSpec with Matchers with EitherValues with Insi state.errors shouldBe empty inside(raw) { case ArrowRaw(_, Nil, bodyRes) => - bodyRes shouldBe body + bodyRes.equalsOrShowDiff(body) shouldBe true } } diff --git a/semantics/src/test/scala/aqua/semantics/SemanticsSpec.scala b/semantics/src/test/scala/aqua/semantics/SemanticsSpec.scala index d2315075..8b92e3c2 100644 --- a/semantics/src/test/scala/aqua/semantics/SemanticsSpec.scala +++ b/semantics/src/test/scala/aqua/semantics/SemanticsSpec.scala @@ -1,26 +1,27 @@ package aqua.semantics -import aqua.raw.RawContext import aqua.parser.Ast -import aqua.raw.ops.{Call, CallArrowRawTag, FuncOp, OnTag, ParTag, RawTag, SeqGroupTag, SeqTag} import aqua.parser.Parser import aqua.parser.lift.{LiftParser, Span} +import aqua.raw.ConstantRaw +import aqua.raw.RawContext +import aqua.raw.ops.* +import aqua.raw.ops.{Call, CallArrowRawTag, FuncOp, OnTag, ParTag, RawTag, SeqGroupTag, SeqTag} import aqua.raw.value.* import aqua.types.* -import aqua.raw.ops.* -import org.scalatest.flatspec.AnyFlatSpec -import org.scalatest.matchers.should.Matchers -import org.scalatest.Inside -import cats.~> +import cats.Eval +import cats.data.State +import cats.data.Validated import cats.data.{Chain, EitherNec, NonEmptyChain} +import cats.free.Cofree +import cats.syntax.foldable.* import cats.syntax.show.* import cats.syntax.traverse.* -import cats.syntax.foldable.* -import cats.data.Validated -import cats.free.Cofree -import cats.data.State -import cats.Eval +import cats.~> +import org.scalatest.Inside +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers class SemanticsSpec extends AnyFlatSpec with Matchers with Inside { @@ -40,7 +41,11 @@ class SemanticsSpec extends AnyFlatSpec with Matchers with Inside { Any ] ): Unit = inside(parser(script)) { case Validated.Valid(ast) => - val init = RawContext.blank + val init = RawContext.blank.copy( + parts = Chain + .fromSeq(ConstantRaw.defaultConstants()) + .map(const => RawContext.blank -> const) + ) inside(semantics.process(ast, init).value.run)(test) } @@ -839,4 +844,52 @@ class SemanticsSpec extends AnyFlatSpec with Matchers with Inside { } } } + + it should "allow pushing `nil` to stream" in { + def test(quantifier: String) = { + val script = s""" + |func test() -> []${quantifier}string: + | stream: *${quantifier}string + | stream <<- nil + | <- stream + |""".stripMargin + + insideBody(script) { body => + matchSubtree(body) { case (PushToStreamTag(VarRaw(name, _), _), _) => + name shouldEqual "nil" + } + } + } + + test("?") + test("[]") + } + + it should "allow putting stream into collection" in { + def test(t: String, p: String) = { + val script = s""" + |service Srv("test-srv"): + | consume(value: ${t}[]string) + | + |func test(): + | stream: *string + | Srv.consume(${p}[stream]) + |""".stripMargin + + insideBody(script) { body => + println(body.show) + matchSubtree(body) { case (CallArrowRawTag(_, ca: CallArrowRaw), _) => + inside(ca.arguments) { case (c: CollectionRaw) :: Nil => + c.values.exists { + case VarRaw(name, _) => name == "stream" + case _ => false + } should be(true) + } + } + } + } + + test("[]", "") + test("?", "?") + } } diff --git a/semantics/src/test/scala/aqua/semantics/TypeResolutionSpec.scala b/semantics/src/test/scala/aqua/semantics/TypeResolutionSpec.scala new file mode 100644 index 00000000..cfb33501 --- /dev/null +++ b/semantics/src/test/scala/aqua/semantics/TypeResolutionSpec.scala @@ -0,0 +1,179 @@ +package aqua.semantics + +import aqua.parser.lexer.* +import aqua.semantics.rules.types.TypeResolution.TypeResolutionError +import aqua.semantics.rules.types.{TypeResolution, TypesState} +import aqua.types.* + +import cats.Endo +import cats.Id +import cats.SemigroupK +import cats.data.NonEmptyMap +import cats.data.Validated.* +import cats.kernel.Semigroup +import cats.syntax.foldable.* +import cats.syntax.option.* +import cats.syntax.semigroup.* +import org.scalatest.Inside +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +class TypeResolutionSpec extends AnyFlatSpec with Matchers with Inside { + + given [A, B]: Semigroup[(Endo[A], Endo[B])] with { + private val algebraA = SemigroupK[Endo].algebra[A] + private val algebraB = SemigroupK[Endo].algebra[B] + + override def combine(x: (Endo[A], Endo[B]), y: (Endo[A], Endo[B])): (Endo[A], Endo[B]) = + (algebraA.combine(x._1, y._1), algebraB.combine(x._2, y._2)) + } + + def stt(st: ScalarType): ScalarTypeToken[Id] = ScalarTypeToken(st) + + def ntt(name: String): NamedTypeToken[Id] = NamedTypeToken(name) + + def resolve( + token: TypeToken[Id], + types: Map[String, Type] + ): TypeResolution.Res[Id, Type] = + TypeResolution.resolveTypeToken(token)(TypesState(strict = types)) + + val validCollectionModifiers: LazyList[ + List[(Endo[BasicTypeToken[Id]], DataType => Type)] + ] = { + val baseModifiers: List[(Endo[BasicTypeToken[Id]], Endo[DataType])] = List( + (ArrayTypeToken[Id]((), _)) -> (ArrayType.apply), + (OptionTypeToken[Id]((), _)) -> (OptionType.apply) + ) + + val streamModifier = (dt: BasicTypeToken[Id]) => StreamTypeToken[Id]((), dt) + + val dataModifiers = LazyList.unfold(baseModifiers) { mods => + ( + mods, + for { + m <- mods + b <- baseModifiers + } yield m combine b + ).some + } + + dataModifiers.map { mods => + mods.map { case (token, typ) => + (token andThen streamModifier) -> (typ andThen StreamType.apply) + } ++ mods + }.prepended(List((streamModifier, StreamType.apply))).take(6) + } + + val structType = StructType("Struct", NonEmptyMap.of("field" -> ScalarType.i8)) + + "TypeResolution resolveTypeToken" should "resolve basic types" in { + val baseTypes = List( + stt(ScalarType.u32) -> ScalarType.u32, + stt(ScalarType.string) -> ScalarType.string, + ntt("Struct") -> structType + ) + + for { + base <- baseTypes + (token, expected) = base + } inside(resolve(token, Map("Struct" -> structType))) { + case Valid(TypeResolution(result, Nil)) => + result shouldEqual expected + } + } + + it should "resolve nested types" in { + val baseTypes = List( + stt(ScalarType.u32) -> ScalarType.u32, + stt(ScalarType.string) -> ScalarType.string, + ntt("Struct") -> structType + ) + + validCollectionModifiers + .take(6) + .toList + .flatten + .foreach(modifier => + for { + base <- baseTypes + (btoken, btype) = base + (mod, typ) = modifier + } inside(resolve(mod(btoken), Map("Struct" -> structType))) { + case Valid(TypeResolution(result, Nil)) => + result shouldEqual typ(btype) + } + ) + } + + it should "forbid services and abilities in collections" in { + val arrow = NonEmptyMap.of("arrow" -> ArrowType(ProductType(Nil), ProductType(Nil))) + + val serviceType = ServiceType("Srv", arrow) + val abilityType = AbilityType("Abl", arrow) + + val types = List( + ntt(serviceType.name) -> serviceType, + ntt(abilityType.name) -> abilityType + ) + + validCollectionModifiers + .take(6) + .toList + .flatten + .foreach(modifier => + for { + base <- types + (btoken, btype) = base + (mod, _) = modifier + } inside( + resolve( + mod(btoken), + Map( + serviceType.name -> serviceType, + abilityType.name -> abilityType + ) + ) + ) { case Invalid(errors) => + errors.exists(_.hint.contains("contain")) shouldBe true + } + ) + } + + it should "forbid streams inside any collection" in { + val baseTypes = List( + stt(ScalarType.u32), + stt(ScalarType.string), + ntt("Struct") + ) + + val modifiers = validCollectionModifiers + .map(_.map { case (token, _) => token }) + .take(3) + .toList + .flatten + + for { + left <- modifiers + right <- identity[BasicTypeToken[Id]] +: modifiers + base <- baseTypes + t = left(StreamTypeToken[Id]((), right(base))) + } inside( + resolve(t, Map(structType.name -> structType)) + ) { case Invalid(errors) => + errors.exists(_.hint.contains("of type *")) shouldBe true + } + } + + it should "forbid stream of streams through alias" in { + val streamType = StreamType(ScalarType.u32) + + val t = StreamTypeToken[Id]((), ntt("Als")) + + inside( + resolve(t, Map("Als" -> streamType)) + ) { case Invalid(errors) => + errors.exists(_.hint.contains("of type *")) shouldBe true + } + } +} diff --git a/semantics/src/test/scala/aqua/semantics/ValuesAlgebraSpec.scala b/semantics/src/test/scala/aqua/semantics/ValuesAlgebraSpec.scala index 41309126..107d6bd1 100644 --- a/semantics/src/test/scala/aqua/semantics/ValuesAlgebraSpec.scala +++ b/semantics/src/test/scala/aqua/semantics/ValuesAlgebraSpec.scala @@ -12,13 +12,13 @@ import aqua.semantics.rules.names.{NamesAlgebra, NamesInterpreter, NamesState} import aqua.semantics.rules.report.{ReportAlgebra, ReportInterpreter} import aqua.semantics.rules.types.{TypesAlgebra, TypesInterpreter} import aqua.types.* + import cats.Id import cats.data.{NonEmptyList, NonEmptyMap, State} import monocle.syntax.all.* import org.scalatest.Inside import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers - import scala.collection.immutable.SortedMap class ValuesAlgebraSpec extends AnyFlatSpec with Matchers with Inside { @@ -137,7 +137,7 @@ class ValuesAlgebraSpec extends AnyFlatSpec with Matchers with Inside { val vl = variable("left") val vr = variable("right") - val ut = lt.uniteTop(rt) + val ut = lt `∪` rt val state = genState( vars = Map( diff --git a/types/src/main/scala/aqua/types/IntersectTypes.scala b/types/src/main/scala/aqua/types/IntersectTypes.scala index 7dc1041b..88d8938e 100644 --- a/types/src/main/scala/aqua/types/IntersectTypes.scala +++ b/types/src/main/scala/aqua/types/IntersectTypes.scala @@ -1,5 +1,7 @@ package aqua.types +import aqua.errors.Errors.internalError + import cats.Monoid import cats.data.NonEmptyMap @@ -19,6 +21,12 @@ case class IntersectTypes(scalarsCombine: ScalarsCombine.T) extends Monoid[Type] ap.toList.zip(bp.toList).map(combine) ) + private def combineDataTypes(a: DataType, b: DataType): DataType = + (a `∩` b) match { + case d: DataType => d + case t => internalError(s"$a ∩ $b yields non-data type $t") + } + override def combine(a: Type, b: Type): Type = (a, b) match { case _ if CompareTypes(a, b) == 0.0 => a @@ -39,18 +47,20 @@ case class IntersectTypes(scalarsCombine: ScalarsCombine.T) extends Monoid[Type] combineProducts(aa.codomain, bb.codomain) ) - case (ac: OptionType, bc: BoxType) => - OptionType(ac.element `∩` bc.element) + case (ac: OptionType, bc: CollectionType) => + OptionType(combineDataTypes(ac.element, bc.element)) + case (ac: CollectionType, bc: OptionType) => + OptionType(combineDataTypes(ac.element, bc.element)) - case (ac: BoxType, bc: OptionType) => - OptionType(ac.element `∩` bc.element) + case (ac: ArrayType, bc: CollectionType) => + ArrayType(combineDataTypes(ac.element, bc.element)) + case (ac: CollectionType, bc: ArrayType) => + ArrayType(combineDataTypes(ac.element, bc.element)) - case (ac: ArrayType, bc: BoxType) => - ArrayType(ac.element `∩` bc.element) - case (ac: BoxType, bc: ArrayType) => - ArrayType(ac.element `∩` bc.element) case (ac: StreamType, bc: StreamType) => - StreamType(ac.element `∩` bc.element) + StreamType(combineDataTypes(ac.element, bc.element)) + case (ac: StreamMapType, bc: StreamMapType) => + StreamMapType(combineDataTypes(ac.element, bc.element)) case (a: ScalarType, b: ScalarType) => scalarsCombine(a, b) diff --git a/types/src/main/scala/aqua/types/Type.scala b/types/src/main/scala/aqua/types/Type.scala index 7a7a1fe4..5df10c50 100644 --- a/types/src/main/scala/aqua/types/Type.scala +++ b/types/src/main/scala/aqua/types/Type.scala @@ -1,33 +1,39 @@ package aqua.types -import cats.PartialOrder -import cats.data.NonEmptyMap +import aqua.errors.Errors.internalError +import aqua.types.Type.* + import cats.Eval -import cats.syntax.traverse.* +import cats.PartialOrder +import cats.data.NonEmptyList +import cats.data.NonEmptyMap import cats.syntax.applicative.* import cats.syntax.option.* +import cats.syntax.partialOrder.* +import cats.syntax.traverse.* sealed trait Type { - def acceptsValueOf(incoming: Type): Boolean = { - import Type.typesPartialOrder - import cats.syntax.partialOrder._ + def acceptsValueOf(incoming: Type): Boolean = this >= incoming - } def isInhabited: Boolean = true - infix def `∩`(other: Type): Type = intersectBottom(other) + infix def `∩`[T <: Type](other: T): Type = intersectBottom(other) - def intersectTop(other: Type): Type = IntersectTypes.top.combine(this, other) + private final def intersectTop(other: Type): Type = + IntersectTypes.top.combine(this, other) - def intersectBottom(other: Type): Type = IntersectTypes.bottom.combine(this, other) + private final def intersectBottom(other: Type): Type = + IntersectTypes.bottom.combine(this, other) - infix def `∪`(other: Type): Type = uniteTop(other) + infix def `∪`[T <: Type](other: T): Type = uniteTop(other) - def uniteTop(other: Type): Type = UniteTypes.top.combine(this, other) + private final def uniteTop(other: Type): Type = + UniteTypes.top.combine(this, other) - def uniteBottom(other: Type): Type = UniteTypes.bottom.combine(this, other) + private final def uniteBottom(other: Type): Type = + UniteTypes.bottom.combine(this, other) def properties: Map[String, Type] = Map.empty @@ -73,8 +79,7 @@ sealed trait ProductType extends Type { */ def toLabelledList(prefix: String = "arg", index: Int = 0): List[(String, Type)] = this match { case LabeledConsType(label, t, pt) => (label -> t) :: pt.toLabelledList(prefix, index + 1) - case UnlabeledConsType(t, pt) => - (s"$prefix$index" -> t) :: pt.toLabelledList(prefix, index + 1) + case UnlabeledConsType(t, pt) => (s"$prefix$index" -> t) :: pt.toLabelledList(prefix, index + 1) case _ => Nil } @@ -85,6 +90,15 @@ sealed trait ProductType extends Type { pt.labelledData case _ => Nil } + + lazy val labelledStreams: List[(String, StreamType)] = this match { + case LabeledConsType(label, t: StreamType, pt) => + (label -> t) :: pt.labelledStreams + case ConsType(_, pt) => + pt.labelledStreams + case _ => Nil + } + } object ProductType { @@ -247,42 +261,73 @@ object LiteralType { def forInt(n: Long): LiteralType = if (n < 0) signed else unsigned } -sealed trait BoxType extends DataType { +sealed trait CollectionType extends Type { def isStream: Boolean - def element: Type + def element: DataType - def withElement(t: Type): BoxType + def withElement(t: DataType): CollectionType override def properties: Map[String, Type] = Map("length" -> ScalarType.u32) } -case class CanonStreamType(element: Type) extends BoxType { +object CollectionType { - override def isStream: Boolean = false + def elementTypeOf(types: List[CollectibleType]): DataType = + NonEmptyList + .fromList(types) + .fold(BottomType)( + _.map { + case StreamType(el) => ArrayType(el) + case dt: DataType => dt + }.reduce[Type](_ `∩` _) match { + // In case we mix values of uncomparable types, intersection returns bottom, meaning "uninhabited type". + // But we want to get to TopType instead: this would mean that intersection is empty, and you cannot + // make any decision about the structure of type, but can push anything inside + case BottomType => TopType + case dt: DataType => dt + case t => + internalError( + s"Expected data type from " + + s"intersection of ${types.mkString(", ")}; " + + s"got $t" + ) + } + ) +} + +case class CanonStreamType( + override val element: DataType +) extends DataType with CollectionType { + + override val isStream: Boolean = false override def toString: String = "#" + element - override def withElement(t: Type): BoxType = copy(element = t) + override def withElement(t: DataType): CollectionType = copy(element = t) } -case class ArrayType(element: Type) extends BoxType { +case class ArrayType( + override val element: DataType +) extends DataType with CollectionType { - override def isStream: Boolean = false + override val isStream: Boolean = false override def toString: String = "[]" + element - override def withElement(t: Type): BoxType = copy(element = t) + override def withElement(t: DataType): CollectionType = copy(element = t) } -case class OptionType(element: Type) extends BoxType { +case class OptionType( + override val element: DataType +) extends DataType with CollectionType { - override def isStream: Boolean = false + override val isStream: Boolean = false override def toString: String = "?" + element - override def withElement(t: Type): BoxType = copy(element = t) + override def withElement(t: DataType): CollectionType = copy(element = t) } sealed trait NamedType extends Type { @@ -374,7 +419,13 @@ case class StructType(name: String, fields: NonEmptyMap[String, Type]) s"$fullName{${fields.map(_.toString).toNel.toList.map(kv => kv._1 + ": " + kv._2).mkString(", ")}}" } -case class StreamMapType(element: Type) extends DataType { +sealed trait MutableStreamType extends Type with CollectionType + +case class StreamMapType(override val element: DataType) extends MutableStreamType { + + override val isStream: Boolean = true + + override def withElement(t: DataType): CollectionType = copy(element = t) override def toString: String = s"%$element" } @@ -383,6 +434,15 @@ object StreamMapType { def top(): StreamMapType = StreamMapType(TopType) } +case class StreamType(override val element: DataType) extends MutableStreamType { + + override val isStream: Boolean = true + + override def toString: String = s"*$element" + + override def withElement(t: DataType): CollectionType = copy(element = t) +} + case class ServiceType(name: String, fields: NonEmptyMap[String, ArrowType]) extends NamedType { override val specifier: String = "service" @@ -424,17 +484,13 @@ case class ArrowType(domain: ProductType, codomain: ProductType) extends Type { s"$domain -> $codomain" } -case class StreamType(element: Type) extends BoxType { - - override def isStream: Boolean = true - - override def toString: String = s"*$element" - - override def withElement(t: Type): BoxType = copy(element = t) -} - object Type { - implicit lazy val typesPartialOrder: PartialOrder[Type] = + /** + * `StreamType` is collectible with canonicalization + */ + type CollectibleType = DataType | StreamType + + given PartialOrder[Type] = CompareTypes.partialOrder } diff --git a/types/src/main/scala/aqua/types/UniteTypes.scala b/types/src/main/scala/aqua/types/UniteTypes.scala index edbc74f6..ec606ca6 100644 --- a/types/src/main/scala/aqua/types/UniteTypes.scala +++ b/types/src/main/scala/aqua/types/UniteTypes.scala @@ -1,8 +1,9 @@ package aqua.types +import aqua.errors.Errors.internalError + import cats.Monoid import cats.data.NonEmptyMap - import scala.annotation.tailrec /** @@ -28,6 +29,12 @@ case class UniteTypes(scalarsCombine: ScalarsCombine.T) extends Monoid[Type]: step(a.toList, b.toList, Nil) } + def combineDataTypes(a: DataType, b: DataType): DataType = + (a `∪` b) match { + case d: DataType => d + case t => internalError(s"$a ∪ $b yields non-data type $t") + } + override def combine(a: Type, b: Type): Type = (a, b) match { case (ap: ProductType, bp: ProductType) => @@ -52,18 +59,19 @@ case class UniteTypes(scalarsCombine: ScalarsCombine.T) extends Monoid[Type]: ) case (ac: OptionType, bc: ArrayType) => - ArrayType(ac.element `∪` bc.element) - + ArrayType(combineDataTypes(ac.element, bc.element)) case (ac: ArrayType, bc: OptionType) => - ArrayType(ac.element `∪` bc.element) + ArrayType(combineDataTypes(ac.element, bc.element)) case (ac: ArrayType, bc: ArrayType) => - ArrayType(ac.element `∪` bc.element) + ArrayType(combineDataTypes(ac.element, bc.element)) case (ac: OptionType, bc: OptionType) => - OptionType(ac.element `∪` bc.element) + OptionType(combineDataTypes(ac.element, bc.element)) case (ac: StreamType, bc: StreamType) => - StreamType(ac.element `∩` bc.element) + StreamType(combineDataTypes(ac.element, bc.element)) + case (ac: StreamMapType, bc: StreamMapType) => + StreamMapType(combineDataTypes(ac.element, bc.element)) case (a: ScalarType, b: ScalarType) => scalarsCombine(a, b) diff --git a/types/src/test/scala/aqua/types/TypeSpec.scala b/types/src/test/scala/aqua/types/TypeSpec.scala index 73616300..07ee71d3 100644 --- a/types/src/test/scala/aqua/types/TypeSpec.scala +++ b/types/src/test/scala/aqua/types/TypeSpec.scala @@ -1,6 +1,5 @@ package aqua.types -import aqua.types.Type.typesPartialOrder import cats.data.NonEmptyMap import cats.kernel.PartialOrder import cats.syntax.partialOrder._ @@ -15,7 +14,7 @@ class TypeSpec extends AnyFlatSpec with Matchers { def `?`(t: DataType): DataType = OptionType(t) - def `*`(t: DataType): DataType = StreamType(t) + def `*`(t: DataType): StreamType = StreamType(t) def accepts(recv: Type, incoming: Type) = recv >= incoming @@ -76,7 +75,8 @@ class TypeSpec extends AnyFlatSpec with Matchers { "structs of scalars with literals" should "be variant" in { val one: Type = StructType("one", NonEmptyMap.of("field" -> u64)) - val two: Type = StructType("two", NonEmptyMap.of("field" -> LiteralType.number, "other" -> string)) + val two: Type = + StructType("two", NonEmptyMap.of("field" -> LiteralType.number, "other" -> string)) accepts(one, two) should be(true) accepts(two, one) should be(false) diff --git a/utils/helpers/src/main/scala/aqua/syntax/optiont.scala b/utils/helpers/src/main/scala/aqua/syntax/optiont.scala index 2b499f51..d191c8d3 100644 --- a/utils/helpers/src/main/scala/aqua/syntax/optiont.scala +++ b/utils/helpers/src/main/scala/aqua/syntax/optiont.scala @@ -1,8 +1,10 @@ package aqua.helpers.syntax -import cats.{Functor, Monad} import cats.data.OptionT +import cats.syntax.apply.* +import cats.syntax.flatMap.* import cats.syntax.functor.* +import cats.{Functor, Monad} object optiont { @@ -28,4 +30,20 @@ object optiont { )(using F: Monad[F]): OptionT[F, B] = o.flatTransform(f.andThen(_.value)) } + + extension [F[_]: Monad, A, B]( + t: Tuple2[OptionT[F, A], OptionT[F, B]] + ) { + + /** + * Merges `OptionT`s into `OptionT` of a tuple, + * **executing both effects**. + */ + def merged: OptionT[F, (A, B)] = OptionT( + for { + a <- t._1.value + b <- t._2.value + } yield (a, b).tupled + ) + } }