mirror of
https://github.com/fluencelabs/aqua.git
synced 2024-12-04 14:40:17 +00:00
feat(compiler)!: Prohibit mutating options [LNG-277] (#960)
* Parse any nested type * Refactor type system * Fix restriction * Refactor type resolution * Return stream definition * Update examples * Refactor * Refactor * Refactor integration tests * Export service * Add integration test * Fix args provider * Add parser unit tests * Add type resolution unit tests * Add more unit tests * DataTypeToken -> CompositeTypeToken * GeneralStreamType -> MutableStreamType * Refactor * Refactor TypeResolution * colType -> collectionType * Refactor * Fix PushToStreamSem * BasicTypeToken -> ScalarTypeToken * CompositeTypeToken -> BasicTypeToken * Fix for nil * Make stream collectible * Refactor collectible type * Use internalError * Add unit tests
This commit is contained in:
parent
313502ecae
commit
68425ed42a
@ -3,16 +3,16 @@ package aqua.run
|
|||||||
import aqua.parser.lexer.{CallArrowToken, CollectionToken, LiteralToken, VarToken}
|
import aqua.parser.lexer.{CallArrowToken, CollectionToken, LiteralToken, VarToken}
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.raw.value.{CollectionRaw, LiteralRaw, ValueRaw, VarRaw}
|
import aqua.raw.value.{CollectionRaw, LiteralRaw, ValueRaw, VarRaw}
|
||||||
import aqua.types.{ArrayType, BottomType}
|
import aqua.types.*
|
||||||
|
|
||||||
import cats.data.{NonEmptyChain, NonEmptyList, Validated, ValidatedNec}
|
|
||||||
import cats.data.Validated.{invalid, invalidNec, validNec}
|
import cats.data.Validated.{invalid, invalidNec, validNec}
|
||||||
import cats.{~>, Id}
|
import cats.data.{NonEmptyChain, NonEmptyList, Validated, ValidatedNec}
|
||||||
|
import cats.syntax.comonad.*
|
||||||
|
import cats.syntax.either.*
|
||||||
|
import cats.syntax.option.*
|
||||||
import cats.syntax.traverse.*
|
import cats.syntax.traverse.*
|
||||||
import cats.syntax.validated.*
|
import cats.syntax.validated.*
|
||||||
import cats.syntax.either.*
|
import cats.{Id, ~>}
|
||||||
import cats.syntax.comonad.*
|
|
||||||
import cats.syntax.option.*
|
|
||||||
|
|
||||||
case class CliFunc(name: String, args: List[ValueRaw] = Nil)
|
case class CliFunc(name: String, args: List[ValueRaw] = Nil)
|
||||||
|
|
||||||
@ -52,7 +52,15 @@ object CliFunc {
|
|||||||
.map(
|
.map(
|
||||||
NonEmptyList
|
NonEmptyList
|
||||||
.fromList(_)
|
.fromList(_)
|
||||||
.map(l => CollectionRaw(l, ArrayType(l.head.baseType)))
|
.map(l =>
|
||||||
|
CollectionRaw(
|
||||||
|
l,
|
||||||
|
ArrayType(
|
||||||
|
// FIXME: Type of Literal should always be a DataType
|
||||||
|
l.head.baseType.asInstanceOf[DataType]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
.getOrElse(ValueRaw.Nil)
|
.getOrElse(ValueRaw.Nil)
|
||||||
)
|
)
|
||||||
.toValidatedNec
|
.toValidatedNec
|
||||||
|
@ -2,6 +2,7 @@ package aqua.run
|
|||||||
|
|
||||||
import aqua.raw.value.{LiteralRaw, ValueRaw, VarRaw}
|
import aqua.raw.value.{LiteralRaw, ValueRaw, VarRaw}
|
||||||
import aqua.types.*
|
import aqua.types.*
|
||||||
|
|
||||||
import cats.data.Validated.{invalidNec, validNec}
|
import cats.data.Validated.{invalidNec, validNec}
|
||||||
import cats.data.{Validated, ValidatedNec}
|
import cats.data.{Validated, ValidatedNec}
|
||||||
import cats.effect.kernel.Async
|
import cats.effect.kernel.Async
|
||||||
@ -10,14 +11,11 @@ import cats.syntax.flatMap.*
|
|||||||
import cats.syntax.partialOrder.*
|
import cats.syntax.partialOrder.*
|
||||||
import cats.syntax.show.*
|
import cats.syntax.show.*
|
||||||
import cats.syntax.traverse.*
|
import cats.syntax.traverse.*
|
||||||
|
|
||||||
import scala.collection.immutable.SortedMap
|
import scala.collection.immutable.SortedMap
|
||||||
import scala.concurrent.ExecutionContext
|
import scala.concurrent.ExecutionContext
|
||||||
|
|
||||||
object TypeValidator {
|
object TypeValidator {
|
||||||
|
|
||||||
import aqua.types.Type.typesPartialOrder
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Compare and validate type from Aqua file and type generated from JSON.
|
* Compare and validate type from Aqua file and type generated from JSON.
|
||||||
* Also, the validation will succeed if the JSON type is missing an array or an optional field.
|
* Also, the validation will succeed if the JSON type is missing an array or an optional field.
|
||||||
@ -69,7 +67,7 @@ object TypeValidator {
|
|||||||
case (l: OptionType, r) =>
|
case (l: OptionType, r) =>
|
||||||
// if we have ?[][]string and [][][]string it must throw an error
|
// if we have ?[][]string and [][][]string it must throw an error
|
||||||
validateTypes(name, l.element, Some(r), Some((l, r)))
|
validateTypes(name, l.element, Some(r), Some((l, r)))
|
||||||
case (l: BoxType, r: BoxType) =>
|
case (l: CollectionType, r: CollectionType) =>
|
||||||
validateTypes(name, l.element, Some(r.element), fullOptionType.orElse(Some(l, r)))
|
validateTypes(name, l.element, Some(r.element), fullOptionType.orElse(Some(l, r)))
|
||||||
|
|
||||||
case (l, r) =>
|
case (l, r) =>
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
package aqua.definitions
|
package aqua.definitions
|
||||||
|
|
||||||
|
import aqua.definitions.*
|
||||||
import aqua.res.FuncRes
|
import aqua.res.FuncRes
|
||||||
import aqua.types.*
|
import aqua.types.*
|
||||||
import aqua.definitions.*
|
|
||||||
import io.circe.*
|
import io.circe.*
|
||||||
import io.circe.parser.*
|
import io.circe.parser.*
|
||||||
import io.circe.syntax.*
|
import io.circe.syntax.*
|
||||||
|
|
||||||
import scala.annotation.tailrec
|
import scala.annotation.tailrec
|
||||||
|
|
||||||
// Represents the Aqua types
|
// Represents the Aqua types
|
||||||
@ -16,7 +16,7 @@ sealed trait TypeDefinition {
|
|||||||
|
|
||||||
object TypeDefinition {
|
object TypeDefinition {
|
||||||
|
|
||||||
implicit val encodeProdDefType: Encoder[ProductTypeDef] = {
|
given Encoder[ProductTypeDef] = {
|
||||||
case d @ LabeledProductTypeDef(fields) =>
|
case d @ LabeledProductTypeDef(fields) =>
|
||||||
Json.obj(
|
Json.obj(
|
||||||
("tag", Json.fromString(d.tag)),
|
("tag", Json.fromString(d.tag)),
|
||||||
@ -33,7 +33,7 @@ object TypeDefinition {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
implicit val encodeDefType: Encoder[TypeDefinition] = {
|
given Encoder[TypeDefinition] = {
|
||||||
case d @ ScalarTypeDef(name) =>
|
case d @ ScalarTypeDef(name) =>
|
||||||
Json.obj(
|
Json.obj(
|
||||||
("tag", Json.fromString(d.tag)),
|
("tag", Json.fromString(d.tag)),
|
||||||
@ -68,14 +68,14 @@ object TypeDefinition {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
implicit val encodeServiceDefType: Encoder[ServiceDef] = { case ServiceDef(sId, functions, name) =>
|
given Encoder[ServiceDef] = { case ServiceDef(sId, functions, name) =>
|
||||||
Json.obj(
|
Json.obj(
|
||||||
("defaultServiceId", sId.asJson),
|
("defaultServiceId", sId.asJson),
|
||||||
("functions", encodeProdDefType(functions))
|
("functions", (functions: ProductTypeDef).asJson)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
implicit val encodeNamesConfig: Encoder[NamesConfig] = { case n: NamesConfig =>
|
given Encoder[NamesConfig] = { case n: NamesConfig =>
|
||||||
import n.*
|
import n.*
|
||||||
Json.obj(
|
Json.obj(
|
||||||
("relay", Json.fromString(relay)),
|
("relay", Json.fromString(relay)),
|
||||||
@ -88,13 +88,12 @@ object TypeDefinition {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
implicit val encodeFunctionDefType: Encoder[FunctionDef] = {
|
given Encoder[FunctionDef] = { case FunctionDef(fName, arrow, names) =>
|
||||||
case FunctionDef(fName, arrow, names) =>
|
Json.obj(
|
||||||
Json.obj(
|
("functionName", Json.fromString(fName)),
|
||||||
("functionName", Json.fromString(fName)),
|
("arrow", (arrow: TypeDefinition).asJson),
|
||||||
("arrow", encodeDefType(arrow)),
|
("names", names.asJson)
|
||||||
("names", names.asJson)
|
)
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
def apply(t: Option[Type]): TypeDefinition = t.map(apply).getOrElse(NilTypeDef)
|
def apply(t: Option[Type]): TypeDefinition = t.map(apply).getOrElse(NilTypeDef)
|
||||||
@ -103,7 +102,7 @@ object TypeDefinition {
|
|||||||
t match {
|
t match {
|
||||||
case OptionType(t) =>
|
case OptionType(t) =>
|
||||||
OptionTypeDef(TypeDefinition(t))
|
OptionTypeDef(TypeDefinition(t))
|
||||||
case t: BoxType => ArrayTypeDef(TypeDefinition(t.element))
|
case t: CollectionType => ArrayTypeDef(TypeDefinition(t.element))
|
||||||
case StructType(name, fields) =>
|
case StructType(name, fields) =>
|
||||||
StructTypeDef(name, fields.toSortedMap.view.mapValues(TypeDefinition.apply).toMap)
|
StructTypeDef(name, fields.toSortedMap.view.mapValues(TypeDefinition.apply).toMap)
|
||||||
case AbilityType(name, fieldAndArrows) =>
|
case AbilityType(name, fieldAndArrows) =>
|
||||||
@ -198,7 +197,11 @@ case class NamesConfig(
|
|||||||
)
|
)
|
||||||
|
|
||||||
// Describes service
|
// Describes service
|
||||||
case class ServiceDef(defaultServiceId: Option[String], functions: LabeledProductTypeDef, name: String)
|
case class ServiceDef(
|
||||||
|
defaultServiceId: Option[String],
|
||||||
|
functions: LabeledProductTypeDef,
|
||||||
|
name: String
|
||||||
|
)
|
||||||
|
|
||||||
// Describes top-level function
|
// Describes top-level function
|
||||||
case class FunctionDef(
|
case class FunctionDef(
|
||||||
|
@ -3,8 +3,11 @@ package aqua.backend
|
|||||||
import aqua.backend.air.FuncAirGen
|
import aqua.backend.air.FuncAirGen
|
||||||
import aqua.backend.ts.TypeScriptCommon.fixupArgName
|
import aqua.backend.ts.TypeScriptCommon.fixupArgName
|
||||||
import aqua.backend.ts.{TSFuncTypes, TypeScriptCommon}
|
import aqua.backend.ts.{TSFuncTypes, TypeScriptCommon}
|
||||||
|
import aqua.definitions.*
|
||||||
|
import aqua.definitions.TypeDefinition.given
|
||||||
import aqua.res.FuncRes
|
import aqua.res.FuncRes
|
||||||
import aqua.types.*
|
import aqua.types.*
|
||||||
|
|
||||||
import cats.syntax.show.*
|
import cats.syntax.show.*
|
||||||
import io.circe.*
|
import io.circe.*
|
||||||
import io.circe.parser.*
|
import io.circe.parser.*
|
||||||
@ -20,8 +23,6 @@ case class OutputFunc(func: FuncRes, types: Types) {
|
|||||||
val funcTypes = types.funcType(func)
|
val funcTypes = types.funcType(func)
|
||||||
|
|
||||||
import funcTypes.*
|
import funcTypes.*
|
||||||
import aqua.definitions.TypeDefinition.*
|
|
||||||
import aqua.definitions.*
|
|
||||||
|
|
||||||
def generate: (AirFunction, String) = {
|
def generate: (AirFunction, String) = {
|
||||||
val tsAir = FuncAirGen(func).generate
|
val tsAir = FuncAirGen(func).generate
|
||||||
|
@ -1,8 +1,11 @@
|
|||||||
package aqua.backend
|
package aqua.backend
|
||||||
|
|
||||||
import aqua.backend.ts.TypeScriptCommon
|
import aqua.backend.ts.TypeScriptCommon
|
||||||
|
import aqua.definitions.*
|
||||||
|
import aqua.definitions.TypeDefinition.given
|
||||||
import aqua.res.ServiceRes
|
import aqua.res.ServiceRes
|
||||||
import aqua.types.ArrowType
|
import aqua.types.ArrowType
|
||||||
|
|
||||||
import io.circe.*
|
import io.circe.*
|
||||||
import io.circe.parser.*
|
import io.circe.parser.*
|
||||||
import io.circe.syntax.*
|
import io.circe.syntax.*
|
||||||
@ -14,8 +17,6 @@ case class OutputService(srv: ServiceRes, types: Types) {
|
|||||||
private val serviceTypes = types.serviceType(srv)
|
private val serviceTypes = types.serviceType(srv)
|
||||||
|
|
||||||
import serviceTypes.*
|
import serviceTypes.*
|
||||||
import aqua.definitions.TypeDefinition.*
|
|
||||||
import aqua.definitions.*
|
|
||||||
|
|
||||||
def generate: String =
|
def generate: String =
|
||||||
val functions = LabeledProductTypeDef(
|
val functions = LabeledProductTypeDef(
|
||||||
|
@ -122,6 +122,7 @@ lazy val types = crossProject(JVMPlatform, JSPlatform)
|
|||||||
"org.typelevel" %%% "cats-core" % catsV
|
"org.typelevel" %%% "cats-core" % catsV
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
.dependsOn(errors)
|
||||||
|
|
||||||
lazy val parser = crossProject(JVMPlatform, JSPlatform)
|
lazy val parser = crossProject(JVMPlatform, JSPlatform)
|
||||||
.withoutSuffixFor(JVMPlatform)
|
.withoutSuffixFor(JVMPlatform)
|
||||||
|
@ -7,4 +7,4 @@ import aqua.raw.ConstantRaw
|
|||||||
*
|
*
|
||||||
* @param constantsList List of known constants
|
* @param constantsList List of known constants
|
||||||
*/
|
*/
|
||||||
case class AquaCompilerConf(constantsList: List[ConstantRaw])
|
case class AquaCompilerConf(constantsList: List[ConstantRaw])
|
||||||
|
@ -7,7 +7,7 @@ service FailService("fail-srv"):
|
|||||||
|
|
||||||
func errorClearTest(node: string, relay: string) -> string, i64:
|
func errorClearTest(node: string, relay: string) -> string, i64:
|
||||||
stream: *string
|
stream: *string
|
||||||
code: ?i64
|
code: *i64
|
||||||
|
|
||||||
on node via relay:
|
on node via relay:
|
||||||
try:
|
try:
|
||||||
|
@ -39,7 +39,7 @@ func ifCalc() -> u64:
|
|||||||
<- res!0
|
<- res!0
|
||||||
|
|
||||||
func cmp(a: i32, b: i32, pred: i8 -> bool) -> bool:
|
func cmp(a: i32, b: i32, pred: i8 -> bool) -> bool:
|
||||||
result: ?bool
|
result: *bool
|
||||||
|
|
||||||
if a < b:
|
if a < b:
|
||||||
result <- pred(-1)
|
result <- pred(-1)
|
||||||
|
@ -4,7 +4,7 @@ export handleResultError
|
|||||||
|
|
||||||
-- t = true, f = false
|
-- t = true, f = false
|
||||||
func handleResultError(t: bool, f: bool) -> string:
|
func handleResultError(t: bool, f: bool) -> string:
|
||||||
opt: ?[]string
|
opt: *[]string
|
||||||
|
|
||||||
if t == f: -- false
|
if t == f: -- false
|
||||||
opt <<- ["unreachable"]
|
opt <<- ["unreachable"]
|
||||||
|
@ -2,7 +2,7 @@ service OptionString("opt_str"):
|
|||||||
checkOption(str: ?string) -> string
|
checkOption(str: ?string) -> string
|
||||||
|
|
||||||
func emptyString() -> ?string:
|
func emptyString() -> ?string:
|
||||||
valueEmpty: ?string
|
valueEmpty: *string
|
||||||
<- valueEmpty
|
<- valueEmpty
|
||||||
|
|
||||||
func checkEmpty() -> string:
|
func checkEmpty() -> string:
|
||||||
@ -11,7 +11,7 @@ func checkEmpty() -> string:
|
|||||||
<- res
|
<- res
|
||||||
|
|
||||||
func stringAsOption(str: string) -> ?string:
|
func stringAsOption(str: string) -> ?string:
|
||||||
valueEmpty: ?string
|
valueEmpty: *string
|
||||||
valueEmpty <<- str
|
valueEmpty <<- str
|
||||||
<- valueEmpty
|
<- valueEmpty
|
||||||
|
|
||||||
|
@ -36,7 +36,7 @@ func returnNilLength() -> u32:
|
|||||||
<- arr.length
|
<- arr.length
|
||||||
|
|
||||||
func stringNone() -> ?string:
|
func stringNone() -> ?string:
|
||||||
valueNone: ?string
|
valueNone: *string
|
||||||
<- valueNone
|
<- valueNone
|
||||||
|
|
||||||
func returnNone() -> ?string:
|
func returnNone() -> ?string:
|
||||||
|
@ -1,3 +1,7 @@
|
|||||||
|
aqua StreamArgs
|
||||||
|
|
||||||
|
export retrieve_records, modify_stream, TestService
|
||||||
|
|
||||||
service TestService("test-service"):
|
service TestService("test-service"):
|
||||||
get_records(key: string) -> []string
|
get_records(key: string) -> []string
|
||||||
|
|
||||||
@ -7,4 +11,8 @@ func append_records(peer: string, srum: *[]string):
|
|||||||
func retrieve_records(peer: string) -> [][]string:
|
func retrieve_records(peer: string) -> [][]string:
|
||||||
records: *[]string
|
records: *[]string
|
||||||
append_records(peer, records)
|
append_records(peer, records)
|
||||||
<- records
|
<- records
|
||||||
|
|
||||||
|
func modify_stream(stream: *string) -> []string:
|
||||||
|
stream <<- "appended value"
|
||||||
|
<- stream
|
@ -1,3 +1,7 @@
|
|||||||
|
aqua Via
|
||||||
|
|
||||||
|
export viaArr, viaStream, viaOpt
|
||||||
|
|
||||||
import "@fluencelabs/aqua-lib/builtin.aqua"
|
import "@fluencelabs/aqua-lib/builtin.aqua"
|
||||||
|
|
||||||
func viaArr(node_id: string, viaAr: []string) -> Info:
|
func viaArr(node_id: string, viaAr: []string) -> Info:
|
||||||
@ -5,13 +9,12 @@ func viaArr(node_id: string, viaAr: []string) -> Info:
|
|||||||
p <- Peer.identify()
|
p <- Peer.identify()
|
||||||
<- p
|
<- p
|
||||||
|
|
||||||
|
|
||||||
func viaStream(node_id: string, viaStr: *string) -> Info:
|
func viaStream(node_id: string, viaStr: *string) -> Info:
|
||||||
on node_id via viaStr:
|
on node_id via viaStr:
|
||||||
p <- Peer.identify()
|
p <- Peer.identify()
|
||||||
<- p
|
<- p
|
||||||
|
|
||||||
func viaOpt(relay: string, node_id: string, viaOpt: ?string) -> Info:
|
func viaOpt(node_id: string, viaOpt: ?string) -> Info:
|
||||||
on node_id via viaOpt:
|
on node_id via viaOpt:
|
||||||
p <- Peer.identify()
|
p <- Peer.identify()
|
||||||
<- p
|
<- p
|
@ -33,7 +33,11 @@ import {
|
|||||||
import {
|
import {
|
||||||
abilityCall,
|
abilityCall,
|
||||||
complexAbilityCall,
|
complexAbilityCall,
|
||||||
checkAbCallsCall, bugLNG258Call1, bugLNG258Call2, bugLNG258Call3, multipleAbilityWithClosureCall,
|
checkAbCallsCall,
|
||||||
|
bugLNG258Call1,
|
||||||
|
bugLNG258Call2,
|
||||||
|
bugLNG258Call3,
|
||||||
|
multipleAbilityWithClosureCall,
|
||||||
} from "../examples/abilityCall.js";
|
} from "../examples/abilityCall.js";
|
||||||
import {
|
import {
|
||||||
nilLengthCall,
|
nilLengthCall,
|
||||||
@ -78,7 +82,10 @@ import { tryCatchCall } from "../examples/tryCatchCall.js";
|
|||||||
import { tryOtherwiseCall } from "../examples/tryOtherwiseCall.js";
|
import { tryOtherwiseCall } from "../examples/tryOtherwiseCall.js";
|
||||||
import { coCall } from "../examples/coCall.js";
|
import { coCall } from "../examples/coCall.js";
|
||||||
import { bugLNG60Call, passArgsCall } from "../examples/passArgsCall.js";
|
import { bugLNG60Call, passArgsCall } from "../examples/passArgsCall.js";
|
||||||
import { streamArgsCall } from "../examples/streamArgsCall.js";
|
import {
|
||||||
|
streamArgsCall,
|
||||||
|
modifyStreamCall,
|
||||||
|
} from "../examples/streamArgsCall.js";
|
||||||
import { streamResultsCall } from "../examples/streamResultsCall.js";
|
import { streamResultsCall } from "../examples/streamResultsCall.js";
|
||||||
import { structuralTypingCall } from "../examples/structuralTypingCall";
|
import { structuralTypingCall } from "../examples/structuralTypingCall";
|
||||||
import {
|
import {
|
||||||
@ -104,7 +111,10 @@ import { multiReturnCall } from "../examples/multiReturnCall.js";
|
|||||||
import { declareCall } from "../examples/declareCall.js";
|
import { declareCall } from "../examples/declareCall.js";
|
||||||
import { genOptions, genOptionsEmptyString } from "../examples/optionsCall.js";
|
import { genOptions, genOptionsEmptyString } from "../examples/optionsCall.js";
|
||||||
import { lng193BugCall } from "../examples/closureReturnRename.js";
|
import { lng193BugCall } from "../examples/closureReturnRename.js";
|
||||||
import {closuresCall, multipleClosuresLNG262BugCall} from "../examples/closures.js";
|
import {
|
||||||
|
closuresCall,
|
||||||
|
multipleClosuresLNG262BugCall,
|
||||||
|
} from "../examples/closures.js";
|
||||||
import { closureArrowCaptureCall } from "../examples/closureArrowCapture.js";
|
import { closureArrowCaptureCall } from "../examples/closureArrowCapture.js";
|
||||||
import {
|
import {
|
||||||
bugLNG63_2Call,
|
bugLNG63_2Call,
|
||||||
@ -589,6 +599,18 @@ describe("Testing examples", () => {
|
|||||||
expect(streamArgsResult).toEqual([["peer_id", "peer_id"]]);
|
expect(streamArgsResult).toEqual([["peer_id", "peer_id"]]);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("streamArgs.aqua modify stream", async () => {
|
||||||
|
let streamArgsResult = await modifyStreamCall([
|
||||||
|
"passed value 1",
|
||||||
|
"passed value 2",
|
||||||
|
]);
|
||||||
|
expect(streamArgsResult).toEqual([
|
||||||
|
"passed value 1",
|
||||||
|
"passed value 2",
|
||||||
|
"appended value",
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
it("streamResults.aqua", async () => {
|
it("streamResults.aqua", async () => {
|
||||||
let streamResultsResult = await streamResultsCall();
|
let streamResultsResult = await streamResultsCall();
|
||||||
expect(streamResultsResult).toEqual(["new_name", "new_name", "new_name"]);
|
expect(streamResultsResult).toEqual(["new_name", "new_name", "new_name"]);
|
||||||
@ -934,9 +956,10 @@ describe("Testing examples", () => {
|
|||||||
|
|
||||||
it("via.aqua", async () => {
|
it("via.aqua", async () => {
|
||||||
let res1 = await viaArrCall();
|
let res1 = await viaArrCall();
|
||||||
let res2 = await viaOptCall(relayPeerId1);
|
let res2 = await viaOptCall();
|
||||||
let res3 = await viaOptNullCall(relayPeerId1);
|
let res3 = await viaOptNullCall();
|
||||||
let res4 = await viaStreamCall(relayPeerId1);
|
let res4 = await viaStreamCall();
|
||||||
|
expect(res1).not.toHaveLength(0);
|
||||||
expect(res1).toEqual(res2);
|
expect(res1).toEqual(res2);
|
||||||
expect(res2).toEqual(res3);
|
expect(res2).toEqual(res3);
|
||||||
expect(res3).toEqual(res4);
|
expect(res3).toEqual(res4);
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import {
|
import {
|
||||||
retrieve_records,
|
retrieve_records,
|
||||||
|
modify_stream,
|
||||||
registerTestService,
|
registerTestService,
|
||||||
} from "../compiled/examples/streamArgs.js";
|
} from "../compiled/examples/streamArgs.js";
|
||||||
|
|
||||||
@ -12,3 +13,7 @@ export async function streamArgsCall() {
|
|||||||
|
|
||||||
return await retrieve_records("peer_id");
|
return await retrieve_records("peer_id");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function modifyStreamCall(arg: string[]) {
|
||||||
|
return await modify_stream(arg);
|
||||||
|
}
|
||||||
|
@ -13,26 +13,21 @@ export async function viaArrCall(): Promise<string[]> {
|
|||||||
return res.external_addresses;
|
return res.external_addresses;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function viaOptCall(relayPeerId: string): Promise<string[]> {
|
export async function viaOptCall(): Promise<string[]> {
|
||||||
let res2 = await viaOpt(relayPeerId, relays[4].peerId, relays[2].peerId, {
|
let res2 = await viaOpt(relays[4].peerId, relays[2].peerId, { ttl: 30000 });
|
||||||
ttl: 30000,
|
|
||||||
});
|
|
||||||
|
|
||||||
return res2.external_addresses;
|
return res2.external_addresses;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function viaOptNullCall(relayPeerId: string): Promise<string[]> {
|
export async function viaOptNullCall(): Promise<string[]> {
|
||||||
let res3 = await viaOpt(
|
let res3 = await viaOpt(relays[4].peerId, null, {
|
||||||
relayPeerId,
|
ttl: 30000,
|
||||||
relays[4].peerId,
|
});
|
||||||
relays[2].peerId || null,
|
|
||||||
{ ttl: 30000 },
|
|
||||||
);
|
|
||||||
|
|
||||||
return res3.external_addresses;
|
return res3.external_addresses;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function viaStreamCall(relayPeerId: string): Promise<string[]> {
|
export async function viaStreamCall(): Promise<string[]> {
|
||||||
let res4 = await viaStream(
|
let res4 = await viaStream(
|
||||||
relays[4].peerId,
|
relays[4].peerId,
|
||||||
[relays[2].peerId, relays[1].peerId],
|
[relays[2].peerId, relays[1].peerId],
|
||||||
|
@ -2,11 +2,11 @@ package aqua.js
|
|||||||
|
|
||||||
import aqua.*
|
import aqua.*
|
||||||
import aqua.backend.*
|
import aqua.backend.*
|
||||||
import aqua.definitions.{ArrayTypeDef, ArrowTypeDef, BottomTypeDef, FunctionDef, LabeledProductTypeDef, NamesConfig, NilTypeDef, OptionTypeDef, ScalarTypeDef, ServiceDef, StructTypeDef, TopTypeDef, TypeDefinition, UnlabeledProductTypeDef}
|
import aqua.definitions.*
|
||||||
import aqua.res.FuncRes
|
import aqua.res.FuncRes
|
||||||
import aqua.types.{ArrowType, BottomType, BoxType, LabeledConsType, LiteralType, NilType, OptionType, ProductType, ScalarType, StructType, TopType, Type, UnlabeledConsType}
|
import aqua.types.*
|
||||||
import io.circe.{Encoder, Json}
|
|
||||||
|
|
||||||
|
import io.circe.{Encoder, Json}
|
||||||
import scala.scalajs.js
|
import scala.scalajs.js
|
||||||
import scala.scalajs.js.JSConverters.*
|
import scala.scalajs.js.JSConverters.*
|
||||||
import scala.scalajs.js.annotation.{JSExportAll, JSImport}
|
import scala.scalajs.js.annotation.{JSExportAll, JSImport}
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package aqua.js
|
package aqua.js
|
||||||
|
|
||||||
import aqua.types.*
|
import aqua.types.*
|
||||||
|
|
||||||
import cats.data.Validated.{invalid, invalidNec, invalidNel, valid, validNec, validNel}
|
import cats.data.Validated.{invalid, invalidNec, invalidNel, valid, validNec, validNel}
|
||||||
import cats.data.{NonEmptyMap, Validated, ValidatedNec}
|
import cats.data.{NonEmptyMap, Validated, ValidatedNec}
|
||||||
import cats.syntax.applicative.*
|
import cats.syntax.applicative.*
|
||||||
@ -9,7 +10,6 @@ import cats.syntax.flatMap.*
|
|||||||
import cats.syntax.functor.*
|
import cats.syntax.functor.*
|
||||||
import cats.syntax.semigroup.*
|
import cats.syntax.semigroup.*
|
||||||
import cats.syntax.traverse.*
|
import cats.syntax.traverse.*
|
||||||
|
|
||||||
import scala.collection.immutable.SortedMap
|
import scala.collection.immutable.SortedMap
|
||||||
import scala.scalajs.js
|
import scala.scalajs.js
|
||||||
|
|
||||||
@ -17,18 +17,18 @@ object JsonEncoder {
|
|||||||
|
|
||||||
/* Get widest possible type from JSON arrays. For example:
|
/* Get widest possible type from JSON arrays. For example:
|
||||||
JSON: {
|
JSON: {
|
||||||
field1: [
|
field1: [
|
||||||
{
|
{
|
||||||
a: "a",
|
a: "a",
|
||||||
b: [1,2,3],
|
b: [1,2,3],
|
||||||
c: 4
|
c: 4
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
c: 3
|
c: 3
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
There type in array must be { a: ?string, b: []number, c: number
|
Type in array must be { a: ?string, b: []number, c: number }
|
||||||
*/
|
*/
|
||||||
private def compareAndGetWidestType(
|
private def compareAndGetWidestType(
|
||||||
name: String,
|
name: String,
|
||||||
@ -43,8 +43,8 @@ object JsonEncoder {
|
|||||||
case (la @ ArrayType(_), BottomType) => validNec(la)
|
case (la @ ArrayType(_), BottomType) => validNec(la)
|
||||||
case (lo @ OptionType(lel), rtt) if lel == rtt => validNec(lo)
|
case (lo @ OptionType(lel), rtt) if lel == rtt => validNec(lo)
|
||||||
case (ltt, ro @ OptionType(rel)) if ltt == rel => validNec(ro)
|
case (ltt, ro @ OptionType(rel)) if ltt == rel => validNec(ro)
|
||||||
case (BottomType, rb) => validNec(OptionType(rb))
|
case (BottomType, rb: DataType) => validNec(OptionType(rb))
|
||||||
case (lb, BottomType) => validNec(OptionType(lb))
|
case (lb: DataType, BottomType) => validNec(OptionType(lb))
|
||||||
case (lst: StructType, rst: StructType) =>
|
case (lst: StructType, rst: StructType) =>
|
||||||
val lFieldsSM: SortedMap[String, Type] = lst.fields.toSortedMap
|
val lFieldsSM: SortedMap[String, Type] = lst.fields.toSortedMap
|
||||||
val rFieldsSM: SortedMap[String, Type] = rst.fields.toSortedMap
|
val rFieldsSM: SortedMap[String, Type] = rst.fields.toSortedMap
|
||||||
@ -100,7 +100,10 @@ object JsonEncoder {
|
|||||||
.reduce[ValidatedNec[String, Type]] { case (l, t) =>
|
.reduce[ValidatedNec[String, Type]] { case (l, t) =>
|
||||||
compareAndGetWidestType(name, l, t)
|
compareAndGetWidestType(name, l, t)
|
||||||
}
|
}
|
||||||
.map(t => ArrayType(t))
|
.andThen {
|
||||||
|
case dt: DataType => validNec(ArrayType(dt))
|
||||||
|
case t => invalidNec(s"Unexpected type $t")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
case a if t == "object" && !js.isUndefined(arg) && arg != null =>
|
case a if t == "object" && !js.isUndefined(arg) && arg != null =>
|
||||||
|
@ -6,17 +6,17 @@ import aqua.model.*
|
|||||||
import aqua.model.inline.state.{Arrows, Exports, Mangler}
|
import aqua.model.inline.state.{Arrows, Exports, Mangler}
|
||||||
import aqua.raw.ops.RawTag
|
import aqua.raw.ops.RawTag
|
||||||
import aqua.raw.value.{ValueRaw, VarRaw}
|
import aqua.raw.value.{ValueRaw, VarRaw}
|
||||||
import aqua.types.{AbilityType, ArrowType, BoxType, NamedType, StreamType, Type}
|
import aqua.types.{AbilityType, ArrowType, CollectionType, NamedType, StreamType, Type}
|
||||||
|
|
||||||
import cats.data.StateT
|
import cats.data.StateT
|
||||||
import cats.data.{Chain, IndexedStateT, State}
|
import cats.data.{Chain, IndexedStateT, State}
|
||||||
import cats.syntax.functor.*
|
|
||||||
import cats.syntax.applicative.*
|
import cats.syntax.applicative.*
|
||||||
import cats.syntax.bifunctor.*
|
import cats.syntax.bifunctor.*
|
||||||
import cats.syntax.foldable.*
|
import cats.syntax.foldable.*
|
||||||
import cats.syntax.traverse.*
|
import cats.syntax.functor.*
|
||||||
import cats.syntax.option.*
|
import cats.syntax.option.*
|
||||||
import cats.syntax.show.*
|
import cats.syntax.show.*
|
||||||
|
import cats.syntax.traverse.*
|
||||||
import cats.{Eval, Monoid}
|
import cats.{Eval, Monoid}
|
||||||
import scribe.Logging
|
import scribe.Logging
|
||||||
|
|
||||||
|
@ -8,7 +8,7 @@ import aqua.model.inline.state.{Arrows, Exports, Mangler}
|
|||||||
import aqua.model.inline.tag.IfTagInliner
|
import aqua.model.inline.tag.IfTagInliner
|
||||||
import aqua.raw.ops.*
|
import aqua.raw.ops.*
|
||||||
import aqua.raw.value.*
|
import aqua.raw.value.*
|
||||||
import aqua.types.{BoxType, CanonStreamType, StreamType}
|
import aqua.types.{CanonStreamType, CollectionType, StreamType}
|
||||||
|
|
||||||
import cats.data.{Chain, State, StateT}
|
import cats.data.{Chain, State, StateT}
|
||||||
import cats.instances.list.*
|
import cats.instances.list.*
|
||||||
@ -31,9 +31,10 @@ import scribe.Logging
|
|||||||
*/
|
*/
|
||||||
object TagInliner extends Logging {
|
object TagInliner extends Logging {
|
||||||
|
|
||||||
import RawValueInliner.{valueListToModel, valueToModel}
|
|
||||||
import aqua.model.inline.Inline.parDesugarPrefix
|
import aqua.model.inline.Inline.parDesugarPrefix
|
||||||
|
|
||||||
|
import RawValueInliner.{valueListToModel, valueToModel}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Result of [[RawTag]] inlining
|
* Result of [[RawTag]] inlining
|
||||||
*
|
*
|
||||||
@ -242,7 +243,7 @@ object TagInliner extends Logging {
|
|||||||
(v, p) = flattened
|
(v, p) = flattened
|
||||||
n <- Mangler[S].findAndForbidName(item)
|
n <- Mangler[S].findAndForbidName(item)
|
||||||
elementType = iterable.`type` match {
|
elementType = iterable.`type` match {
|
||||||
case b: BoxType => b.element
|
case b: CollectionType => b.element
|
||||||
case _ =>
|
case _ =>
|
||||||
internalError(
|
internalError(
|
||||||
s"non-box type variable '$iterable' in 'for' expression."
|
s"non-box type variable '$iterable' in 'for' expression."
|
||||||
|
@ -1,5 +1,9 @@
|
|||||||
package aqua.model.inline.raw
|
package aqua.model.inline.raw
|
||||||
|
|
||||||
|
import aqua.model.inline.Inline
|
||||||
|
import aqua.model.inline.Inline.MergeMode.*
|
||||||
|
import aqua.model.inline.RawValueInliner.unfold
|
||||||
|
import aqua.model.inline.state.{Arrows, Exports, Mangler}
|
||||||
import aqua.model.{
|
import aqua.model.{
|
||||||
CallModel,
|
CallModel,
|
||||||
CanonicalizeModel,
|
CanonicalizeModel,
|
||||||
@ -10,14 +14,11 @@ import aqua.model.{
|
|||||||
ValueModel,
|
ValueModel,
|
||||||
VarModel
|
VarModel
|
||||||
}
|
}
|
||||||
import aqua.model.inline.Inline.MergeMode.*
|
|
||||||
import aqua.model.inline.Inline
|
|
||||||
import aqua.model.inline.state.{Arrows, Exports, Mangler}
|
|
||||||
import aqua.raw.value.{FunctorRaw, ValueRaw}
|
import aqua.raw.value.{FunctorRaw, ValueRaw}
|
||||||
import cats.data.State
|
import aqua.types.{ArrayType, CanonStreamType, CollectionType, StreamType}
|
||||||
|
|
||||||
import cats.data.Chain
|
import cats.data.Chain
|
||||||
import aqua.model.inline.RawValueInliner.unfold
|
import cats.data.State
|
||||||
import aqua.types.{ArrayType, BoxType, CanonStreamType, StreamType}
|
|
||||||
import cats.syntax.monoid.*
|
import cats.syntax.monoid.*
|
||||||
import scribe.Logging
|
import scribe.Logging
|
||||||
|
|
||||||
|
@ -1,11 +1,13 @@
|
|||||||
package aqua.model.inline.raw
|
package aqua.model.inline.raw
|
||||||
|
|
||||||
import aqua.model.{CallModel, CanonicalizeModel, NullModel, PushToStreamModel, RestrictionModel, SeqModel, ValueModel, VarModel, XorModel}
|
import aqua.model.*
|
||||||
import aqua.model.inline.Inline
|
import aqua.model.inline.Inline
|
||||||
import aqua.model.inline.RawValueInliner.valueToModel
|
import aqua.model.inline.RawValueInliner.valueToModel
|
||||||
import aqua.model.inline.state.{Arrows, Exports, Mangler}
|
import aqua.model.inline.state.{Arrows, Exports, Mangler}
|
||||||
import aqua.raw.value.CollectionRaw
|
import aqua.raw.value.CollectionRaw
|
||||||
|
import aqua.types.StreamMapType
|
||||||
import aqua.types.{ArrayType, CanonStreamType, OptionType, StreamType}
|
import aqua.types.{ArrayType, CanonStreamType, OptionType, StreamType}
|
||||||
|
|
||||||
import cats.data.{Chain, State}
|
import cats.data.{Chain, State}
|
||||||
|
|
||||||
object CollectionRawInliner extends RawInliner[CollectionRaw] {
|
object CollectionRawInliner extends RawInliner[CollectionRaw] {
|
||||||
@ -20,11 +22,15 @@ object CollectionRawInliner extends RawInliner[CollectionRaw] {
|
|||||||
assignToName: Option[String] = None
|
assignToName: Option[String] = None
|
||||||
): State[S, (ValueModel, Inline)] =
|
): State[S, (ValueModel, Inline)] =
|
||||||
for {
|
for {
|
||||||
streamName <- raw.boxType match {
|
streamName <- raw.collectionType match {
|
||||||
case _: StreamType =>
|
case _: StreamType =>
|
||||||
assignToName
|
assignToName.fold(
|
||||||
.map(s => State.pure(s))
|
Mangler[S].findAndForbidName("stream-inline")
|
||||||
.getOrElse(Mangler[S].findAndForbidName("stream-inline"))
|
)(State.pure)
|
||||||
|
case _: StreamMapType =>
|
||||||
|
assignToName.fold(
|
||||||
|
Mangler[S].findAndForbidName("stream_map-inline")
|
||||||
|
)(State.pure)
|
||||||
case _: CanonStreamType => Mangler[S].findAndForbidName("canon_stream-inline")
|
case _: CanonStreamType => Mangler[S].findAndForbidName("canon_stream-inline")
|
||||||
case _: ArrayType => Mangler[S].findAndForbidName("array-inline")
|
case _: ArrayType => Mangler[S].findAndForbidName("array-inline")
|
||||||
case _: OptionType => Mangler[S].findAndForbidName("option-inline")
|
case _: OptionType => Mangler[S].findAndForbidName("option-inline")
|
||||||
@ -50,15 +56,15 @@ object CollectionRawInliner extends RawInliner[CollectionRaw] {
|
|||||||
}
|
}
|
||||||
|
|
||||||
canonName <-
|
canonName <-
|
||||||
if (raw.boxType.isStream) State.pure(streamName)
|
if (raw.collectionType.isStream) State.pure(streamName)
|
||||||
else Mangler[S].findAndForbidName(streamName)
|
else Mangler[S].findAndForbidName(streamName)
|
||||||
canonType = raw.boxType match {
|
canonType = raw.collectionType match {
|
||||||
case StreamType(_) => raw.boxType
|
case StreamType(_) => raw.collectionType
|
||||||
case _ => CanonStreamType(raw.boxType.element)
|
case _ => CanonStreamType(raw.collectionType.element)
|
||||||
}
|
}
|
||||||
canon = CallModel.Export(canonName, canonType)
|
canon = CallModel.Export(canonName, canonType)
|
||||||
} yield VarModel(canonName, canon.`type`) -> Inline.tree(
|
} yield VarModel(canonName, canon.`type`) -> Inline.tree(
|
||||||
raw.boxType match {
|
raw.collectionType match {
|
||||||
case ArrayType(_) =>
|
case ArrayType(_) =>
|
||||||
RestrictionModel(streamName, streamType).wrap(
|
RestrictionModel(streamName, streamType).wrap(
|
||||||
SeqModel.wrap(inlines ++ vals :+ CanonicalizeModel(stream, canon).leaf)
|
SeqModel.wrap(inlines ++ vals :+ CanonicalizeModel(stream, canon).leaf)
|
||||||
|
@ -55,7 +55,7 @@ object ConstantRaw {
|
|||||||
false
|
false
|
||||||
)
|
)
|
||||||
|
|
||||||
def defaultConstants(relayVarName: Option[String]): List[ConstantRaw] =
|
def defaultConstants(relayVarName: Option[String] = None): List[ConstantRaw] =
|
||||||
hostPeerId(
|
hostPeerId(
|
||||||
relayVarName
|
relayVarName
|
||||||
) :: initPeerId :: particleTtl :: particleTimestamp :: nil :: lastError :: Nil
|
) :: initPeerId :: particleTtl :: particleTimestamp :: nil :: lastError :: Nil
|
||||||
|
@ -4,12 +4,12 @@ import aqua.raw.arrow.FuncRaw
|
|||||||
import aqua.raw.ops.RawTag.Tree
|
import aqua.raw.ops.RawTag.Tree
|
||||||
import aqua.raw.value.{CallArrowRaw, CallServiceRaw, ValueRaw}
|
import aqua.raw.value.{CallArrowRaw, CallServiceRaw, ValueRaw}
|
||||||
import aqua.tree.{TreeNode, TreeNodeCompanion}
|
import aqua.tree.{TreeNode, TreeNodeCompanion}
|
||||||
import aqua.types.{ArrowType, DataType, ServiceType}
|
import aqua.types.*
|
||||||
|
|
||||||
import cats.Show
|
import cats.Show
|
||||||
import cats.data.{Chain, NonEmptyList}
|
import cats.data.{Chain, NonEmptyList}
|
||||||
import cats.syntax.foldable.*
|
|
||||||
import cats.free.Cofree
|
import cats.free.Cofree
|
||||||
|
import cats.syntax.foldable.*
|
||||||
|
|
||||||
sealed trait RawTag extends TreeNode[RawTag] {
|
sealed trait RawTag extends TreeNode[RawTag] {
|
||||||
|
|
||||||
@ -160,7 +160,7 @@ case class NextTag(item: String) extends RawTag {
|
|||||||
override def mapValues(f: ValueRaw => ValueRaw): RawTag = this
|
override def mapValues(f: ValueRaw => ValueRaw): RawTag = this
|
||||||
}
|
}
|
||||||
|
|
||||||
case class RestrictionTag(name: String, `type`: DataType) extends SeqGroupTag {
|
case class RestrictionTag(name: String, `type`: Type) extends SeqGroupTag {
|
||||||
|
|
||||||
override def restrictsVarNames: Set[String] = Set(name)
|
override def restrictsVarNames: Set[String] = Set(name)
|
||||||
|
|
||||||
|
@ -1,9 +1,11 @@
|
|||||||
package aqua.raw.value
|
package aqua.raw.value
|
||||||
|
|
||||||
|
import aqua.errors.Errors.internalError
|
||||||
import aqua.types.*
|
import aqua.types.*
|
||||||
|
import aqua.types.Type.*
|
||||||
|
|
||||||
import cats.data.{Chain, NonEmptyList, NonEmptyMap}
|
|
||||||
import cats.Eq
|
import cats.Eq
|
||||||
|
import cats.data.{Chain, NonEmptyList, NonEmptyMap}
|
||||||
import cats.syntax.option.*
|
import cats.syntax.option.*
|
||||||
|
|
||||||
sealed trait ValueRaw {
|
sealed trait ValueRaw {
|
||||||
@ -157,16 +159,27 @@ object LiteralRaw {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
case class CollectionRaw(values: NonEmptyList[ValueRaw], boxType: BoxType) extends ValueRaw {
|
case class CollectionRaw(
|
||||||
|
values: NonEmptyList[ValueRaw],
|
||||||
|
collectionType: CollectionType
|
||||||
|
) extends ValueRaw {
|
||||||
|
|
||||||
lazy val elementType: Type = boxType.element
|
lazy val elementType: DataType = collectionType.element
|
||||||
|
|
||||||
override lazy val baseType: Type = boxType
|
override lazy val baseType: Type = collectionType
|
||||||
|
|
||||||
override def mapValues(f: ValueRaw => ValueRaw): ValueRaw = {
|
override def mapValues(f: ValueRaw => ValueRaw): ValueRaw = {
|
||||||
val vals = values.map(f)
|
val vals = values.map(f)
|
||||||
val el = vals.map(_.`type`).reduceLeft(_ `∩` _)
|
val types = vals.map(_.`type` match {
|
||||||
copy(vals, boxType.withElement(el))
|
case ct: CollectibleType => ct
|
||||||
|
case t => internalError(s"Non-collection type in collection: ${t}")
|
||||||
|
})
|
||||||
|
val element = CollectionType.elementTypeOf(types.toList)
|
||||||
|
|
||||||
|
copy(
|
||||||
|
values = vals,
|
||||||
|
collectionType = collectionType.withElement(element)
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
override def varNames: Set[String] = values.toList.flatMap(_.varNames).toSet
|
override def varNames: Set[String] = values.toList.flatMap(_.varNames).toSet
|
||||||
|
@ -3,10 +3,11 @@ package aqua.res
|
|||||||
import aqua.model.{CallModel, ForModel, LiteralModel, ValueModel, VarModel}
|
import aqua.model.{CallModel, ForModel, LiteralModel, ValueModel, VarModel}
|
||||||
import aqua.raw.ops.Call
|
import aqua.raw.ops.Call
|
||||||
import aqua.tree.{TreeNode, TreeNodeCompanion}
|
import aqua.tree.{TreeNode, TreeNodeCompanion}
|
||||||
import aqua.types.DataType
|
import aqua.types.*
|
||||||
|
|
||||||
|
import cats.Show
|
||||||
import cats.data.Chain
|
import cats.data.Chain
|
||||||
import cats.free.Cofree
|
import cats.free.Cofree
|
||||||
import cats.Show
|
|
||||||
|
|
||||||
// TODO docs to all traits and objects
|
// TODO docs to all traits and objects
|
||||||
sealed trait ResolvedOp extends TreeNode[ResolvedOp]
|
sealed trait ResolvedOp extends TreeNode[ResolvedOp]
|
||||||
@ -46,7 +47,7 @@ object FoldRes {
|
|||||||
FoldRes(item, iterable, Mode.Never)
|
FoldRes(item, iterable, Mode.Never)
|
||||||
}
|
}
|
||||||
|
|
||||||
case class RestrictionRes(item: String, `type`: DataType) extends ResolvedOp {
|
case class RestrictionRes(item: String, `type`: Type) extends ResolvedOp {
|
||||||
override def toString: String = s"(new ${`type`.airPrefix}$item "
|
override def toString: String = s"(new ${`type`.airPrefix}$item "
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4,13 +4,12 @@ import aqua.model.OpModel.Tree
|
|||||||
import aqua.tree.{TreeNode, TreeNodeCompanion}
|
import aqua.tree.{TreeNode, TreeNodeCompanion}
|
||||||
import aqua.types.*
|
import aqua.types.*
|
||||||
|
|
||||||
import cats.data.Chain
|
|
||||||
import cats.free.Cofree
|
|
||||||
import cats.Show
|
|
||||||
import cats.Eval
|
import cats.Eval
|
||||||
|
import cats.Show
|
||||||
|
import cats.data.Chain
|
||||||
import cats.data.NonEmptyList
|
import cats.data.NonEmptyList
|
||||||
|
import cats.free.Cofree
|
||||||
import cats.syntax.functor.*
|
import cats.syntax.functor.*
|
||||||
|
|
||||||
import scala.annotation.tailrec
|
import scala.annotation.tailrec
|
||||||
|
|
||||||
sealed trait OpModel extends TreeNode[OpModel] {
|
sealed trait OpModel extends TreeNode[OpModel] {
|
||||||
@ -129,7 +128,7 @@ case class NextModel(item: String) extends OpModel {
|
|||||||
|
|
||||||
// TODO: Refactor out `name` and `type` to
|
// TODO: Refactor out `name` and `type` to
|
||||||
// something like VarModel without properties
|
// something like VarModel without properties
|
||||||
case class RestrictionModel(name: String, `type`: DataType) extends SeqGroupModel {
|
case class RestrictionModel(name: String, `type`: Type) extends SeqGroupModel {
|
||||||
override def usesVarNames: Set[String] = Set.empty
|
override def usesVarNames: Set[String] = Set.empty
|
||||||
|
|
||||||
override def restrictsVarNames: Set[String] = Set(name)
|
override def restrictsVarNames: Set[String] = Set(name)
|
||||||
|
@ -18,7 +18,7 @@ object ArgsProvider {
|
|||||||
// Variable name to store the value of the argument
|
// Variable name to store the value of the argument
|
||||||
varName: String,
|
varName: String,
|
||||||
// Type of the argument
|
// Type of the argument
|
||||||
t: DataType
|
t: DataType | StreamType
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -46,18 +46,21 @@ case class ArgsFromService(dataServiceId: ValueRaw) extends ArgsProvider {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private def getDataOp(name: String, varName: String, t: DataType): RawTag.Tree =
|
||||||
|
CallArrowRawTag
|
||||||
|
.service(
|
||||||
|
dataServiceId,
|
||||||
|
name,
|
||||||
|
Call(Nil, Call.Export(varName, t) :: Nil)
|
||||||
|
)
|
||||||
|
.leaf
|
||||||
|
|
||||||
def getDataOp(arg: ArgsProvider.Arg): RawTag.Tree =
|
def getDataOp(arg: ArgsProvider.Arg): RawTag.Tree =
|
||||||
arg.t match {
|
arg.t match {
|
||||||
case st: StreamType =>
|
case st: StreamType =>
|
||||||
getStreamDataOp(arg.name, arg.varName, st)
|
getStreamDataOp(arg.name, arg.varName, st)
|
||||||
case _ =>
|
case dt: DataType =>
|
||||||
CallArrowRawTag
|
getDataOp(arg.name, arg.varName, dt)
|
||||||
.service(
|
|
||||||
dataServiceId,
|
|
||||||
arg.name,
|
|
||||||
Call(Nil, Call.Export(arg.varName, arg.t) :: Nil)
|
|
||||||
)
|
|
||||||
.leaf
|
|
||||||
}
|
}
|
||||||
|
|
||||||
override def provideArgs(args: List[ArgsProvider.Arg]): List[RawTag.Tree] =
|
override def provideArgs(args: List[ArgsProvider.Arg]): List[RawTag.Tree] =
|
||||||
|
@ -81,7 +81,7 @@ case class FuncPreTransformer(
|
|||||||
(name, s"-$name-arg-", typ)
|
(name, s"-$name-arg-", typ)
|
||||||
}
|
}
|
||||||
|
|
||||||
val dataArgs = args.collect { case (name, varName, t: DataType) =>
|
val nonArrowArgs = args.collect { case (name, varName, t: (DataType | StreamType)) =>
|
||||||
ArgsProvider.Arg(name, varName, t)
|
ArgsProvider.Arg(name, varName, t)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -95,7 +95,7 @@ case class FuncPreTransformer(
|
|||||||
)
|
)
|
||||||
|
|
||||||
val provideArgs = argsProvider.provideArgs(
|
val provideArgs = argsProvider.provideArgs(
|
||||||
relayArg.toList ::: dataArgs
|
relayArg.toList ::: nonArrowArgs
|
||||||
)
|
)
|
||||||
|
|
||||||
val handleResults = resultsHandler.handleResults(
|
val handleResults = resultsHandler.handleResults(
|
||||||
|
@ -1,28 +1,28 @@
|
|||||||
package aqua.model.transform.topology
|
package aqua.model.transform.topology
|
||||||
|
|
||||||
import aqua.errors.Errors.internalError
|
import aqua.errors.Errors.internalError
|
||||||
import aqua.model.transform.topology.TopologyPath
|
|
||||||
import aqua.model.transform.cursor.ChainZipper
|
|
||||||
import aqua.model.transform.topology.strategy.*
|
|
||||||
import aqua.model.*
|
import aqua.model.*
|
||||||
|
import aqua.model.transform.cursor.ChainZipper
|
||||||
|
import aqua.model.transform.topology.TopologyPath
|
||||||
|
import aqua.model.transform.topology.strategy.*
|
||||||
import aqua.raw.value.{LiteralRaw, ValueRaw}
|
import aqua.raw.value.{LiteralRaw, ValueRaw}
|
||||||
import aqua.res.{ApRes, CanonRes, FoldRes, MakeRes, NextRes, ResolvedOp, SeqRes}
|
import aqua.res.{ApRes, CanonRes, FoldRes, MakeRes, NextRes, ResolvedOp, SeqRes}
|
||||||
import aqua.types.{ArrayType, BoxType, CanonStreamType, ScalarType, StreamType}
|
import aqua.types.{ArrayType, CanonStreamType, CollectionType, ScalarType, StreamType}
|
||||||
|
|
||||||
import cats.Eval
|
import cats.Eval
|
||||||
import cats.data.Chain.{==:, nil}
|
import cats.data.Chain.{==:, nil}
|
||||||
import cats.data.OptionT
|
import cats.data.OptionT
|
||||||
import cats.data.{Chain, NonEmptyChain, NonEmptyList, OptionT}
|
import cats.data.{Chain, NonEmptyChain, NonEmptyList, OptionT}
|
||||||
import cats.free.Cofree
|
import cats.free.Cofree
|
||||||
import cats.syntax.traverse.*
|
|
||||||
import cats.syntax.show.*
|
|
||||||
import cats.syntax.apply.*
|
|
||||||
import cats.syntax.option.*
|
|
||||||
import cats.syntax.flatMap.*
|
|
||||||
import cats.syntax.foldable.*
|
|
||||||
import cats.syntax.applicative.*
|
|
||||||
import cats.instances.map.*
|
import cats.instances.map.*
|
||||||
import cats.kernel.Monoid
|
import cats.kernel.Monoid
|
||||||
|
import cats.syntax.applicative.*
|
||||||
|
import cats.syntax.apply.*
|
||||||
|
import cats.syntax.flatMap.*
|
||||||
|
import cats.syntax.foldable.*
|
||||||
|
import cats.syntax.option.*
|
||||||
|
import cats.syntax.show.*
|
||||||
|
import cats.syntax.traverse.*
|
||||||
import scribe.Logging
|
import scribe.Logging
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -370,7 +370,7 @@ object Topology extends Logging {
|
|||||||
reversed: Boolean = false
|
reversed: Boolean = false
|
||||||
): Chain[Res] = peerIds.map { v =>
|
): Chain[Res] = peerIds.map { v =>
|
||||||
v.`type` match {
|
v.`type` match {
|
||||||
case _: BoxType =>
|
case _: CollectionType =>
|
||||||
val itemName = "-via-peer-"
|
val itemName = "-via-peer-"
|
||||||
val steps = Chain(
|
val steps = Chain(
|
||||||
MakeRes.hop(VarModel(itemName, ScalarType.string, Chain.empty)),
|
MakeRes.hop(VarModel(itemName, ScalarType.string, Chain.empty)),
|
||||||
|
@ -2,13 +2,14 @@ package aqua.parser.expr
|
|||||||
|
|
||||||
import aqua.parser.Expr
|
import aqua.parser.Expr
|
||||||
import aqua.parser.lexer.Token.*
|
import aqua.parser.lexer.Token.*
|
||||||
import aqua.parser.lexer.{ArrowTypeToken, DataTypeToken, Name}
|
import aqua.parser.lexer.{ArrowTypeToken, BasicTypeToken, Name}
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
|
import aqua.parser.lift.Span
|
||||||
|
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||||
|
|
||||||
import cats.Comonad
|
import cats.Comonad
|
||||||
import cats.parse.Parser
|
import cats.parse.Parser
|
||||||
import cats.~>
|
import cats.~>
|
||||||
import aqua.parser.lift.Span
|
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
|
||||||
|
|
||||||
case class ArrowTypeExpr[F[_]](name: Name[F], `type`: ArrowTypeToken[F])
|
case class ArrowTypeExpr[F[_]](name: Name[F], `type`: ArrowTypeToken[F])
|
||||||
extends Expr[F](ArrowTypeExpr, name) {
|
extends Expr[F](ArrowTypeExpr, name) {
|
||||||
@ -19,8 +20,9 @@ object ArrowTypeExpr extends Expr.Leaf {
|
|||||||
|
|
||||||
override val p: Parser[ArrowTypeExpr[Span.S]] =
|
override val p: Parser[ArrowTypeExpr[Span.S]] =
|
||||||
(Name.p ~ ((` : ` *> ArrowTypeToken.`arrowdef`(
|
(Name.p ~ ((` : ` *> ArrowTypeToken.`arrowdef`(
|
||||||
DataTypeToken.`datatypedef`
|
BasicTypeToken.`compositetypedef`
|
||||||
)) | ArrowTypeToken.`arrowWithNames`(DataTypeToken.`datatypedef`))).map { case (name, t) =>
|
)) | ArrowTypeToken.`arrowWithNames`(BasicTypeToken.`compositetypedef`))).map {
|
||||||
ArrowTypeExpr(name, t)
|
case (name, t) =>
|
||||||
|
ArrowTypeExpr(name, t)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,15 +2,16 @@ package aqua.parser.expr
|
|||||||
|
|
||||||
import aqua.parser.Expr
|
import aqua.parser.Expr
|
||||||
import aqua.parser.lexer.Token.*
|
import aqua.parser.lexer.Token.*
|
||||||
import aqua.parser.lexer.{DataTypeToken, Name, StreamTypeToken}
|
import aqua.parser.lexer.{BasicTypeToken, Name, StreamTypeToken}
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import cats.Comonad
|
|
||||||
import cats.parse.Parser
|
|
||||||
import cats.~>
|
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||||
|
|
||||||
case class FieldTypeExpr[F[_]](name: Name[F], `type`: DataTypeToken[F])
|
import cats.Comonad
|
||||||
|
import cats.parse.Parser
|
||||||
|
import cats.~>
|
||||||
|
|
||||||
|
case class FieldTypeExpr[F[_]](name: Name[F], `type`: BasicTypeToken[F])
|
||||||
extends Expr[F](FieldTypeExpr, name) {
|
extends Expr[F](FieldTypeExpr, name) {
|
||||||
|
|
||||||
override def mapK[K[_]: Comonad](fk: F ~> K): FieldTypeExpr[K] =
|
override def mapK[K[_]: Comonad](fk: F ~> K): FieldTypeExpr[K] =
|
||||||
@ -20,7 +21,7 @@ case class FieldTypeExpr[F[_]](name: Name[F], `type`: DataTypeToken[F])
|
|||||||
object FieldTypeExpr extends Expr.Leaf {
|
object FieldTypeExpr extends Expr.Leaf {
|
||||||
|
|
||||||
override val p: Parser[FieldTypeExpr[Span.S]] =
|
override val p: Parser[FieldTypeExpr[Span.S]] =
|
||||||
((Name.p <* ` : `) ~ DataTypeToken.`datatypedef`).map { case (name, t) =>
|
((Name.p <* ` : `) ~ BasicTypeToken.`compositetypedef`).map { case (name, t) =>
|
||||||
FieldTypeExpr(name, t)
|
FieldTypeExpr(name, t)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,13 +1,14 @@
|
|||||||
package aqua.parser.expr.func
|
package aqua.parser.expr.func
|
||||||
|
|
||||||
import aqua.parser.{ArrowReturnError, Ast, Expr, ParserError}
|
import aqua.parser.lexer.{ArrowTypeToken, BasicTypeToken, TypeToken, ValueToken}
|
||||||
import aqua.parser.lexer.{ArrowTypeToken, DataTypeToken, TypeToken, ValueToken}
|
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
|
import aqua.parser.lift.Span
|
||||||
|
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||||
|
import aqua.parser.{ArrowReturnError, Ast, Expr, ParserError}
|
||||||
|
|
||||||
import cats.Comonad
|
import cats.Comonad
|
||||||
import cats.parse.Parser
|
import cats.parse.Parser
|
||||||
import cats.~>
|
import cats.~>
|
||||||
import aqua.parser.lift.Span
|
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
|
||||||
|
|
||||||
case class ArrowExpr[F[_]](arrowTypeExpr: ArrowTypeToken[F])
|
case class ArrowExpr[F[_]](arrowTypeExpr: ArrowTypeToken[F])
|
||||||
extends Expr[F](ArrowExpr, arrowTypeExpr) {
|
extends Expr[F](ArrowExpr, arrowTypeExpr) {
|
||||||
|
@ -3,14 +3,15 @@ package aqua.parser.expr.func
|
|||||||
import aqua.parser.Expr
|
import aqua.parser.Expr
|
||||||
import aqua.parser.expr.func.DeclareStreamExpr
|
import aqua.parser.expr.func.DeclareStreamExpr
|
||||||
import aqua.parser.lexer.Token.*
|
import aqua.parser.lexer.Token.*
|
||||||
import aqua.parser.lexer.{DataTypeToken, Name, Token, TypeToken}
|
import aqua.parser.lexer.{BasicTypeToken, Name, Token, TypeToken}
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import cats.parse.Parser as P
|
|
||||||
import cats.{Comonad, ~>}
|
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||||
|
|
||||||
case class DeclareStreamExpr[F[_]](name: Name[F], `type`: DataTypeToken[F])
|
import cats.parse.Parser as P
|
||||||
|
import cats.{Comonad, ~>}
|
||||||
|
|
||||||
|
case class DeclareStreamExpr[F[_]](name: Name[F], `type`: BasicTypeToken[F])
|
||||||
extends Expr[F](DeclareStreamExpr, name) {
|
extends Expr[F](DeclareStreamExpr, name) {
|
||||||
|
|
||||||
override def mapK[K[_]: Comonad](fk: F ~> K): DeclareStreamExpr[K] =
|
override def mapK[K[_]: Comonad](fk: F ~> K): DeclareStreamExpr[K] =
|
||||||
@ -20,7 +21,7 @@ case class DeclareStreamExpr[F[_]](name: Name[F], `type`: DataTypeToken[F])
|
|||||||
object DeclareStreamExpr extends Expr.Leaf {
|
object DeclareStreamExpr extends Expr.Leaf {
|
||||||
|
|
||||||
override val p: P[DeclareStreamExpr[Span.S]] =
|
override val p: P[DeclareStreamExpr[Span.S]] =
|
||||||
((Name.p <* ` : `) ~ DataTypeToken.`datatypedef`).map { case (name, t) =>
|
((Name.p <* ` : `) ~ BasicTypeToken.`compositetypedef`).map { case (name, t) =>
|
||||||
DeclareStreamExpr(name, t)
|
DeclareStreamExpr(name, t)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3,39 +3,46 @@ package aqua.parser.lexer
|
|||||||
import aqua.parser.lexer.Token.*
|
import aqua.parser.lexer.Token.*
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.LiftParser.*
|
import aqua.parser.lift.LiftParser.*
|
||||||
|
import aqua.parser.lift.Span
|
||||||
|
import aqua.parser.lift.Span.{P0ToSpan, PToSpan, S}
|
||||||
import aqua.types.ScalarType
|
import aqua.types.ScalarType
|
||||||
|
|
||||||
import cats.Comonad
|
import cats.Comonad
|
||||||
|
import cats.data.NonEmptyList
|
||||||
import cats.parse.{Accumulator0, Parser as P, Parser0 as P0}
|
import cats.parse.{Accumulator0, Parser as P, Parser0 as P0}
|
||||||
import cats.syntax.comonad.*
|
import cats.syntax.comonad.*
|
||||||
import cats.syntax.functor.*
|
import cats.syntax.functor.*
|
||||||
import cats.~>
|
import cats.~>
|
||||||
import aqua.parser.lift.Span
|
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan, S}
|
|
||||||
import cats.data.NonEmptyList
|
|
||||||
|
|
||||||
sealed trait TypeToken[S[_]] extends Token[S] {
|
sealed trait TypeToken[S[_]] extends Token[S] {
|
||||||
def mapK[K[_]: Comonad](fk: S ~> K): TypeToken[K]
|
def mapK[K[_]: Comonad](fk: S ~> K): TypeToken[K]
|
||||||
}
|
}
|
||||||
|
|
||||||
sealed trait DataTypeToken[S[_]] extends TypeToken[S] {
|
sealed trait BasicTypeToken[S[_]] extends TypeToken[S] {
|
||||||
override def mapK[K[_]: Comonad](fk: S ~> K): DataTypeToken[K]
|
override def mapK[K[_]: Comonad](fk: S ~> K): BasicTypeToken[K]
|
||||||
}
|
}
|
||||||
|
|
||||||
case class TopBottomToken[S[_]: Comonad](override val unit: S[Unit], isTop: Boolean)
|
case class TopBottomToken[S[_]: Comonad](override val unit: S[Unit], isTop: Boolean)
|
||||||
extends DataTypeToken[S] {
|
extends BasicTypeToken[S] {
|
||||||
override def as[T](v: T): S[T] = unit.as(v)
|
override def as[T](v: T): S[T] = unit.as(v)
|
||||||
def isBottom: Boolean = !isTop
|
def isBottom: Boolean = !isTop
|
||||||
override def mapK[K[_]: Comonad](fk: S ~> K): TopBottomToken[K] = copy(fk(unit), isTop)
|
override def mapK[K[_]: Comonad](fk: S ~> K): TopBottomToken[K] = copy(fk(unit), isTop)
|
||||||
}
|
}
|
||||||
|
|
||||||
case class ArrayTypeToken[S[_]: Comonad](override val unit: S[Unit], data: DataTypeToken[S])
|
case class ArrayTypeToken[S[_]: Comonad](override val unit: S[Unit], data: BasicTypeToken[S])
|
||||||
extends DataTypeToken[S] {
|
extends BasicTypeToken[S] {
|
||||||
override def as[T](v: T): S[T] = unit.as(v)
|
override def as[T](v: T): S[T] = unit.as(v)
|
||||||
override def mapK[K[_]: Comonad](fk: S ~> K): ArrayTypeToken[K] = copy(fk(unit), data.mapK(fk))
|
override def mapK[K[_]: Comonad](fk: S ~> K): ArrayTypeToken[K] = copy(fk(unit), data.mapK(fk))
|
||||||
}
|
}
|
||||||
|
|
||||||
case class StreamTypeToken[S[_]: Comonad](override val unit: S[Unit], data: DataTypeToken[S])
|
object ArrayTypeToken {
|
||||||
extends DataTypeToken[S] {
|
|
||||||
|
val `arraytypedef`: P[ArrayTypeToken[Span.S]] =
|
||||||
|
(`[]`.lift ~ BasicTypeToken.`compositetypedef`).map(ud => ArrayTypeToken(ud._1, ud._2))
|
||||||
|
}
|
||||||
|
|
||||||
|
case class StreamTypeToken[S[_]: Comonad](override val unit: S[Unit], data: BasicTypeToken[S])
|
||||||
|
extends BasicTypeToken[S] {
|
||||||
override def as[T](v: T): S[T] = unit.as(v)
|
override def as[T](v: T): S[T] = unit.as(v)
|
||||||
override def mapK[K[_]: Comonad](fk: S ~> K): StreamTypeToken[K] = copy(fk(unit), data.mapK(fk))
|
override def mapK[K[_]: Comonad](fk: S ~> K): StreamTypeToken[K] = copy(fk(unit), data.mapK(fk))
|
||||||
}
|
}
|
||||||
@ -43,14 +50,12 @@ case class StreamTypeToken[S[_]: Comonad](override val unit: S[Unit], data: Data
|
|||||||
object StreamTypeToken {
|
object StreamTypeToken {
|
||||||
|
|
||||||
val `streamtypedef`: P[StreamTypeToken[Span.S]] =
|
val `streamtypedef`: P[StreamTypeToken[Span.S]] =
|
||||||
((`*`.lift <* P.not(`*`).withContext("Nested streams '**type' are prohibited"))
|
(`*`.lift ~ BasicTypeToken.`compositetypedef`).map(ud => StreamTypeToken(ud._1, ud._2))
|
||||||
~ DataTypeToken.`withoutstreamdatatypedef`)
|
|
||||||
.map(ud => StreamTypeToken(ud._1, ud._2))
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
case class OptionTypeToken[F[_]: Comonad](override val unit: F[Unit], data: DataTypeToken[F])
|
case class OptionTypeToken[F[_]: Comonad](override val unit: F[Unit], data: BasicTypeToken[F])
|
||||||
extends DataTypeToken[F] {
|
extends BasicTypeToken[F] {
|
||||||
override def as[T](v: T): F[T] = unit.as(v)
|
override def as[T](v: T): F[T] = unit.as(v)
|
||||||
|
|
||||||
override def mapK[K[_]: Comonad](fk: F ~> K): OptionTypeToken[K] =
|
override def mapK[K[_]: Comonad](fk: F ~> K): OptionTypeToken[K] =
|
||||||
@ -60,11 +65,11 @@ case class OptionTypeToken[F[_]: Comonad](override val unit: F[Unit], data: Data
|
|||||||
object OptionTypeToken {
|
object OptionTypeToken {
|
||||||
|
|
||||||
val `optiontypedef`: P[OptionTypeToken[Span.S]] =
|
val `optiontypedef`: P[OptionTypeToken[Span.S]] =
|
||||||
(`?`.lift ~ DataTypeToken.`withoutstreamdatatypedef`).map(ud => OptionTypeToken(ud._1, ud._2))
|
(`?`.lift ~ BasicTypeToken.`compositetypedef`).map(ud => OptionTypeToken(ud._1, ud._2))
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
case class NamedTypeToken[F[_]: Comonad](name: F[String]) extends DataTypeToken[F] {
|
case class NamedTypeToken[F[_]: Comonad](name: F[String]) extends BasicTypeToken[F] {
|
||||||
override def as[T](v: T): F[T] = name.as(v)
|
override def as[T](v: T): F[T] = name.as(v)
|
||||||
def asName: Name[F] = Name[F](name)
|
def asName: Name[F] = Name[F](name)
|
||||||
|
|
||||||
@ -84,21 +89,22 @@ object NamedTypeToken {
|
|||||||
`Class`.repSep(`.`).string.lift.map(NamedTypeToken(_))
|
`Class`.repSep(`.`).string.lift.map(NamedTypeToken(_))
|
||||||
}
|
}
|
||||||
|
|
||||||
case class BasicTypeToken[F[_]: Comonad](scalarType: F[ScalarType]) extends DataTypeToken[F] {
|
case class ScalarTypeToken[F[_]: Comonad](scalarType: F[ScalarType]) extends BasicTypeToken[F] {
|
||||||
override def as[T](v: T): F[T] = scalarType.as(v)
|
override def as[T](v: T): F[T] = scalarType.as(v)
|
||||||
|
|
||||||
override def mapK[K[_]: Comonad](fk: F ~> K): BasicTypeToken[K] =
|
override def mapK[K[_]: Comonad](fk: F ~> K): ScalarTypeToken[K] =
|
||||||
copy(fk(scalarType))
|
copy(fk(scalarType))
|
||||||
|
|
||||||
def value: ScalarType = scalarType.extract
|
def value: ScalarType = scalarType.extract
|
||||||
}
|
}
|
||||||
|
|
||||||
object BasicTypeToken {
|
object ScalarTypeToken {
|
||||||
|
|
||||||
val `basictypedef`: P[BasicTypeToken[Span.S]] =
|
val scalartypedef: P[ScalarTypeToken[Span.S]] =
|
||||||
P.oneOf(
|
P.oneOf(
|
||||||
ScalarType.all.map(n ⇒ P.string(n.name).as(n)).toList
|
ScalarType.all.map(n ⇒ P.string(n.name).as(n)).toList
|
||||||
).lift
|
).lift
|
||||||
.map(BasicTypeToken(_))
|
.map(ScalarTypeToken.apply)
|
||||||
}
|
}
|
||||||
|
|
||||||
case class ArrowTypeToken[S[_]: Comonad](
|
case class ArrowTypeToken[S[_]: Comonad](
|
||||||
@ -153,29 +159,20 @@ object ArrowTypeToken {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
object DataTypeToken {
|
object BasicTypeToken {
|
||||||
|
|
||||||
val `arraytypedef`: P[ArrayTypeToken[Span.S]] =
|
|
||||||
(`[]`.lift ~ `withoutstreamdatatypedef`).map(ud => ArrayTypeToken(ud._1, ud._2))
|
|
||||||
|
|
||||||
val `topbottomdef`: P[TopBottomToken[Span.S]] =
|
val `topbottomdef`: P[TopBottomToken[Span.S]] =
|
||||||
`⊥`.lift.map(TopBottomToken(_, isTop = false)) |
|
`⊥`.lift.map(TopBottomToken(_, isTop = false)) |
|
||||||
`⊤`.lift.map(TopBottomToken(_, isTop = true))
|
`⊤`.lift.map(TopBottomToken(_, isTop = true))
|
||||||
|
|
||||||
def `withoutstreamdatatypedef`: P[DataTypeToken[Span.S]] =
|
def `compositetypedef`: P[BasicTypeToken[Span.S]] =
|
||||||
P.oneOf(
|
P.oneOf(
|
||||||
P.defer(`topbottomdef`) :: P.defer(`arraytypedef`) :: P.defer(
|
P.defer(`topbottomdef`) ::
|
||||||
OptionTypeToken.`optiontypedef`
|
P.defer(ArrayTypeToken.`arraytypedef`) ::
|
||||||
) :: BasicTypeToken.`basictypedef` :: NamedTypeToken.dotted :: Nil
|
P.defer(StreamTypeToken.`streamtypedef`) ::
|
||||||
)
|
P.defer(OptionTypeToken.`optiontypedef`) ::
|
||||||
|
ScalarTypeToken.`scalartypedef` ::
|
||||||
def `datatypedef`: P[DataTypeToken[Span.S]] =
|
NamedTypeToken.dotted :: Nil
|
||||||
P.oneOf(
|
|
||||||
P.defer(`topbottomdef`) :: P.defer(`arraytypedef`) :: P.defer(
|
|
||||||
StreamTypeToken.`streamtypedef`
|
|
||||||
) :: P.defer(
|
|
||||||
OptionTypeToken.`optiontypedef`
|
|
||||||
) :: BasicTypeToken.`basictypedef` :: NamedTypeToken.dotted :: Nil
|
|
||||||
)
|
)
|
||||||
|
|
||||||
}
|
}
|
||||||
@ -184,9 +181,8 @@ object TypeToken {
|
|||||||
|
|
||||||
val `typedef`: P[TypeToken[Span.S]] =
|
val `typedef`: P[TypeToken[Span.S]] =
|
||||||
P.oneOf(
|
P.oneOf(
|
||||||
ArrowTypeToken
|
ArrowTypeToken.`arrowdef`(BasicTypeToken.`compositetypedef`).backtrack ::
|
||||||
.`arrowdef`(DataTypeToken.`datatypedef`)
|
BasicTypeToken.`compositetypedef` :: Nil
|
||||||
.backtrack :: DataTypeToken.`datatypedef` :: Nil
|
|
||||||
)
|
)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -3,25 +3,25 @@ package aqua
|
|||||||
import aqua.AquaSpec.spanToId
|
import aqua.AquaSpec.spanToId
|
||||||
import aqua.parser.expr.*
|
import aqua.parser.expr.*
|
||||||
import aqua.parser.expr.func.*
|
import aqua.parser.expr.func.*
|
||||||
import aqua.parser.lexer.InfixToken.Op as InfixOp
|
|
||||||
import aqua.parser.lexer.PrefixToken.Op as PrefixOp
|
|
||||||
import aqua.parser.lexer.InfixToken.Op.*
|
|
||||||
import aqua.parser.lexer.PrefixToken.Op.*
|
|
||||||
import aqua.parser.head.FromExpr.NameOrAbAs
|
import aqua.parser.head.FromExpr.NameOrAbAs
|
||||||
import aqua.parser.head.{FromExpr, UseFromExpr}
|
import aqua.parser.head.{FromExpr, UseFromExpr}
|
||||||
import aqua.parser.lexer.*
|
import aqua.parser.lexer.*
|
||||||
|
import aqua.parser.lexer.InfixToken.Op.*
|
||||||
|
import aqua.parser.lexer.InfixToken.Op as InfixOp
|
||||||
|
import aqua.parser.lexer.PrefixToken.Op.*
|
||||||
|
import aqua.parser.lexer.PrefixToken.Op as PrefixOp
|
||||||
import aqua.parser.lexer.Token.LiftToken
|
import aqua.parser.lexer.Token.LiftToken
|
||||||
import aqua.parser.lift.LiftParser.Implicits.idLiftParser
|
import aqua.parser.lift.LiftParser.Implicits.idLiftParser
|
||||||
import aqua.types.LiteralType.{bool, number, signed, string, unsigned}
|
|
||||||
import aqua.types.{LiteralType, ScalarType}
|
|
||||||
import cats.{~>, Id}
|
|
||||||
import org.scalatest.EitherValues
|
|
||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
|
||||||
import cats.~>
|
import aqua.types.LiteralType.{bool, number, signed, string, unsigned}
|
||||||
import cats.syntax.bifunctor.*
|
import aqua.types.{LiteralType, ScalarType}
|
||||||
import cats.data.NonEmptyList
|
|
||||||
|
|
||||||
|
import cats.data.NonEmptyList
|
||||||
|
import cats.syntax.bifunctor.*
|
||||||
|
import cats.{Id, ~>}
|
||||||
|
import cats.~>
|
||||||
|
import org.scalatest.EitherValues
|
||||||
import scala.collection.mutable
|
import scala.collection.mutable
|
||||||
import scala.language.implicitConversions
|
import scala.language.implicitConversions
|
||||||
|
|
||||||
@ -71,14 +71,14 @@ object AquaSpec {
|
|||||||
def toArrayType(str: String): ArrayTypeToken[Id] = ArrayTypeToken[Id]((), str)
|
def toArrayType(str: String): ArrayTypeToken[Id] = ArrayTypeToken[Id]((), str)
|
||||||
|
|
||||||
def toArrowType(
|
def toArrowType(
|
||||||
args: List[DataTypeToken[Id]],
|
args: List[BasicTypeToken[Id]],
|
||||||
res: Option[DataTypeToken[Id]]
|
res: Option[BasicTypeToken[Id]]
|
||||||
): ArrowTypeToken[Id] =
|
): ArrowTypeToken[Id] =
|
||||||
ArrowTypeToken[Id]((), args.map(None -> _), res.toList)
|
ArrowTypeToken[Id]((), args.map(None -> _), res.toList)
|
||||||
|
|
||||||
def toNamedArrow(
|
def toNamedArrow(
|
||||||
args: List[(String, TypeToken[Id])],
|
args: List[(String, TypeToken[Id])],
|
||||||
res: List[DataTypeToken[Id]]
|
res: List[BasicTypeToken[Id]]
|
||||||
): ArrowTypeToken[Id] =
|
): ArrowTypeToken[Id] =
|
||||||
ArrowTypeToken[Id]((), args.map(ab => Some(Name[Id](ab._1)) -> ab._2), res)
|
ArrowTypeToken[Id]((), args.map(ab => Some(Name[Id](ab._1)) -> ab._2), res)
|
||||||
|
|
||||||
@ -90,15 +90,15 @@ object AquaSpec {
|
|||||||
def toArgSc(str: String, scalarType: ScalarType): Arg[Id] =
|
def toArgSc(str: String, scalarType: ScalarType): Arg[Id] =
|
||||||
Arg[Id](str, scToBt(scalarType))
|
Arg[Id](str, scToBt(scalarType))
|
||||||
|
|
||||||
def scToBt(sc: ScalarType): BasicTypeToken[Id] = BasicTypeToken[Id](sc)
|
def scToBt(sc: ScalarType): ScalarTypeToken[Id] = ScalarTypeToken[Id](sc)
|
||||||
|
|
||||||
val boolSc: BasicTypeToken[Id] = BasicTypeToken[Id](ScalarType.bool)
|
val boolSc: ScalarTypeToken[Id] = ScalarTypeToken[Id](ScalarType.bool)
|
||||||
val stringSc: BasicTypeToken[Id] = BasicTypeToken[Id](ScalarType.string)
|
val stringSc: ScalarTypeToken[Id] = ScalarTypeToken[Id](ScalarType.string)
|
||||||
|
|
||||||
given Conversion[String, Name[Id]] = toName
|
given Conversion[String, Name[Id]] = toName
|
||||||
given Conversion[String, NamedTypeToken[Id]] = toNamedType
|
given Conversion[String, NamedTypeToken[Id]] = toNamedType
|
||||||
given Conversion[Int, LiteralToken[Id]] = toNumber
|
given Conversion[Int, LiteralToken[Id]] = toNumber
|
||||||
given Conversion[ScalarType, BasicTypeToken[Id]] = scToBt
|
given Conversion[ScalarType, ScalarTypeToken[Id]] = scToBt
|
||||||
}
|
}
|
||||||
|
|
||||||
trait AquaSpec extends EitherValues {
|
trait AquaSpec extends EitherValues {
|
||||||
|
@ -7,21 +7,20 @@ import aqua.parser.lexer.*
|
|||||||
import aqua.parser.lift.Span
|
import aqua.parser.lift.Span
|
||||||
import aqua.types.ScalarType.*
|
import aqua.types.ScalarType.*
|
||||||
|
|
||||||
import cats.{Eval, Id}
|
|
||||||
import cats.data.Chain.*
|
import cats.data.Chain.*
|
||||||
import cats.data.Validated.{Invalid, Valid}
|
import cats.data.Validated.{Invalid, Valid}
|
||||||
import cats.data.{Chain, NonEmptyList}
|
import cats.data.{Chain, NonEmptyList}
|
||||||
import cats.free.Cofree
|
import cats.free.Cofree
|
||||||
import cats.syntax.foldable.*
|
import cats.syntax.foldable.*
|
||||||
|
import cats.{Eval, Id}
|
||||||
import org.scalatest.flatspec.AnyFlatSpec
|
import org.scalatest.flatspec.AnyFlatSpec
|
||||||
import org.scalatest.matchers.should.Matchers
|
import org.scalatest.matchers.should.Matchers
|
||||||
import org.scalatest.{Inside, Inspectors}
|
import org.scalatest.{Inside, Inspectors}
|
||||||
|
|
||||||
import scala.collection.mutable
|
import scala.collection.mutable
|
||||||
import scala.language.implicitConversions
|
import scala.language.implicitConversions
|
||||||
|
|
||||||
class FuncExprSpec extends AnyFlatSpec with Matchers with Inside with Inspectors with AquaSpec {
|
class FuncExprSpec extends AnyFlatSpec with Matchers with Inside with Inspectors with AquaSpec {
|
||||||
import AquaSpec.{*, given}
|
import AquaSpec.{given, *}
|
||||||
|
|
||||||
private val parser = Parser.spanParser
|
private val parser = Parser.spanParser
|
||||||
|
|
||||||
@ -31,7 +30,7 @@ class FuncExprSpec extends AnyFlatSpec with Matchers with Inside with Inspectors
|
|||||||
)
|
)
|
||||||
|
|
||||||
val arrowToken =
|
val arrowToken =
|
||||||
ArrowTypeToken[Id]((), List(None -> BasicTypeToken[Id](u8)), List(BasicTypeToken[Id](bool)))
|
ArrowTypeToken[Id]((), List(None -> ScalarTypeToken[Id](u8)), List(ScalarTypeToken[Id](bool)))
|
||||||
arrowExpr("(peer: PeerId, other: u8 -> bool)") should be(
|
arrowExpr("(peer: PeerId, other: u8 -> bool)") should be(
|
||||||
ArrowExpr[Id](
|
ArrowExpr[Id](
|
||||||
toNamedArrow(("peer" -> toNamedType("PeerId")) :: ("other" -> arrowToken) :: Nil, Nil)
|
toNamedArrow(("peer" -> toNamedType("PeerId")) :: ("other" -> arrowToken) :: Nil, Nil)
|
||||||
@ -41,8 +40,8 @@ class FuncExprSpec extends AnyFlatSpec with Matchers with Inside with Inspectors
|
|||||||
val arrowToken2 =
|
val arrowToken2 =
|
||||||
ArrowTypeToken[Id](
|
ArrowTypeToken[Id](
|
||||||
(),
|
(),
|
||||||
List(None -> BasicTypeToken[Id](u32), None -> BasicTypeToken[Id](u64)),
|
List(None -> ScalarTypeToken[Id](u32), None -> ScalarTypeToken[Id](u64)),
|
||||||
List(BasicTypeToken[Id](bool))
|
List(ScalarTypeToken[Id](bool))
|
||||||
)
|
)
|
||||||
arrowExpr("(peer: PeerId, other: u32, u64 -> bool)") should be(
|
arrowExpr("(peer: PeerId, other: u32, u64 -> bool)") should be(
|
||||||
ArrowExpr[Id](
|
ArrowExpr[Id](
|
||||||
@ -50,12 +49,12 @@ class FuncExprSpec extends AnyFlatSpec with Matchers with Inside with Inspectors
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
val arrowToken3 = ArrowTypeToken[Id]((), List(None -> BasicTypeToken[Id](u32)), Nil)
|
val arrowToken3 = ArrowTypeToken[Id]((), List(None -> ScalarTypeToken[Id](u32)), Nil)
|
||||||
arrowExpr("(peer: PeerId, ret: u32 -> ()) -> string, u32") should be(
|
arrowExpr("(peer: PeerId, ret: u32 -> ()) -> string, u32") should be(
|
||||||
ArrowExpr[Id](
|
ArrowExpr[Id](
|
||||||
toNamedArrow(
|
toNamedArrow(
|
||||||
("peer" -> toNamedType("PeerId")) :: ("ret" -> arrowToken3) :: Nil,
|
("peer" -> toNamedType("PeerId")) :: ("ret" -> arrowToken3) :: Nil,
|
||||||
BasicTypeToken[Id](string) :: BasicTypeToken[Id](u32) :: Nil
|
ScalarTypeToken[Id](string) :: ScalarTypeToken[Id](u32) :: Nil
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -2,40 +2,52 @@ package aqua.parser.lexer
|
|||||||
|
|
||||||
import aqua.parser.lift.LiftParser.Implicits.idLiftParser
|
import aqua.parser.lift.LiftParser.Implicits.idLiftParser
|
||||||
import aqua.types.ScalarType
|
import aqua.types.ScalarType
|
||||||
import aqua.types.ScalarType.u32
|
|
||||||
import cats.Id
|
import cats.Id
|
||||||
import cats.parse.Parser
|
import cats.parse.Parser
|
||||||
|
import cats.syntax.option.*
|
||||||
import org.scalatest.EitherValues
|
import org.scalatest.EitherValues
|
||||||
import org.scalatest.flatspec.AnyFlatSpec
|
import org.scalatest.flatspec.AnyFlatSpec
|
||||||
import org.scalatest.matchers.should.Matchers
|
import org.scalatest.matchers.should.Matchers
|
||||||
|
|
||||||
import scala.language.implicitConversions
|
|
||||||
|
|
||||||
class TypeTokenSpec extends AnyFlatSpec with Matchers with EitherValues {
|
class TypeTokenSpec extends AnyFlatSpec with Matchers with EitherValues {
|
||||||
|
|
||||||
import aqua.AquaSpec._
|
import aqua.AquaSpec._
|
||||||
|
|
||||||
implicit def strToBt(st: ScalarType): BasicTypeToken[Id] = BasicTypeToken[Id](st)
|
def stToStt(st: ScalarType): ScalarTypeToken[Id] = ScalarTypeToken(st)
|
||||||
|
|
||||||
"Basic type" should "parse" in {
|
"basic type token" should "parse scalar types" in {
|
||||||
BasicTypeToken.`basictypedef`.parseAll("u32").value.mapK(spanToId) should be(strToBt(u32))
|
ScalarType.all.foreach(st =>
|
||||||
BasicTypeToken.`basictypedef`.parseAll("()").isLeft should be(true)
|
ScalarTypeToken.`scalartypedef`
|
||||||
|
.parseAll(st.name)
|
||||||
|
.value
|
||||||
|
.mapK(spanToId) should be(stToStt(st))
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
"Return type" should "parse" in {
|
it should "not parse empty brackets" in {
|
||||||
|
ScalarTypeToken.`scalartypedef`
|
||||||
|
.parseAll("()")
|
||||||
|
.isLeft should be(true)
|
||||||
|
}
|
||||||
|
|
||||||
|
"arrow type token" should "parse type def" in {
|
||||||
|
|
||||||
def typedef(str: String) =
|
def typedef(str: String) =
|
||||||
ArrowTypeToken.typeDef().parseAll(str).value.mapK(spanToId)
|
ArrowTypeToken.typeDef().parseAll(str).value.mapK(spanToId)
|
||||||
|
|
||||||
def returndef(str: String) =
|
|
||||||
ArrowTypeToken.returnDef().parseAll(str).value.map(_.mapK(spanToId))
|
|
||||||
|
|
||||||
typedef("(A -> ())") should be(
|
typedef("(A -> ())") should be(
|
||||||
ArrowTypeToken[Id]((), List((None, NamedTypeToken[Id]("A"))), Nil)
|
ArrowTypeToken[Id]((), List((None, NamedTypeToken[Id]("A"))), Nil)
|
||||||
)
|
)
|
||||||
|
|
||||||
typedef("(A -> B)") should be(
|
typedef("(A -> B)") should be(
|
||||||
ArrowTypeToken[Id]((), List((None, NamedTypeToken[Id]("A"))), List(NamedTypeToken[Id]("B")))
|
ArrowTypeToken[Id]((), List((None, NamedTypeToken[Id]("A"))), List(NamedTypeToken[Id]("B")))
|
||||||
)
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
it should "parse return def" in {
|
||||||
|
def returndef(str: String) =
|
||||||
|
ArrowTypeToken.returnDef().parseAll(str).value.map(_.mapK(spanToId))
|
||||||
|
|
||||||
returndef("(A -> B), (C -> D)") should be(
|
returndef("(A -> B), (C -> D)") should be(
|
||||||
List(
|
List(
|
||||||
@ -69,11 +81,16 @@ class TypeTokenSpec extends AnyFlatSpec with Matchers with EitherValues {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
"Arrow type" should "parse" in {
|
it should "parse arrow def" in {
|
||||||
def arrowdef(str: String) =
|
def arrowdef(str: String) =
|
||||||
ArrowTypeToken.`arrowdef`(DataTypeToken.`datatypedef`).parseAll(str).value.mapK(spanToId)
|
ArrowTypeToken
|
||||||
|
.`arrowdef`(BasicTypeToken.`compositetypedef`)
|
||||||
|
.parseAll(str)
|
||||||
|
.value
|
||||||
|
.mapK(spanToId)
|
||||||
|
|
||||||
def arrowWithNames(str: String) = ArrowTypeToken
|
def arrowWithNames(str: String) = ArrowTypeToken
|
||||||
.`arrowWithNames`(DataTypeToken.`datatypedef`)
|
.`arrowWithNames`(BasicTypeToken.`compositetypedef`)
|
||||||
.parseAll(str)
|
.parseAll(str)
|
||||||
.value
|
.value
|
||||||
.mapK(spanToId)
|
.mapK(spanToId)
|
||||||
@ -81,6 +98,7 @@ class TypeTokenSpec extends AnyFlatSpec with Matchers with EitherValues {
|
|||||||
arrowdef("-> B") should be(
|
arrowdef("-> B") should be(
|
||||||
ArrowTypeToken[Id]((), Nil, List(NamedTypeToken[Id]("B")))
|
ArrowTypeToken[Id]((), Nil, List(NamedTypeToken[Id]("B")))
|
||||||
)
|
)
|
||||||
|
|
||||||
arrowdef("A -> B") should be(
|
arrowdef("A -> B") should be(
|
||||||
ArrowTypeToken[Id](
|
ArrowTypeToken[Id](
|
||||||
(),
|
(),
|
||||||
@ -147,9 +165,11 @@ class TypeTokenSpec extends AnyFlatSpec with Matchers with EitherValues {
|
|||||||
arrowWithNames("{SomeAb, SecondAb}(a: A) -> B") should be(
|
arrowWithNames("{SomeAb, SecondAb}(a: A) -> B") should be(
|
||||||
ArrowTypeToken[Id](
|
ArrowTypeToken[Id](
|
||||||
(),
|
(),
|
||||||
(Some(Name[Id]("SomeAb")) -> NamedTypeToken[Id]("SomeAb")) :: (Some(Name[Id](
|
(Some(Name[Id]("SomeAb")) -> NamedTypeToken[Id]("SomeAb")) :: (Some(
|
||||||
"SecondAb"
|
Name[Id](
|
||||||
)) -> NamedTypeToken[Id]("SecondAb")) :: (
|
"SecondAb"
|
||||||
|
)
|
||||||
|
) -> NamedTypeToken[Id]("SecondAb")) :: (
|
||||||
Some(Name[Id]("a")) -> NamedTypeToken[Id]("A")
|
Some(Name[Id]("a")) -> NamedTypeToken[Id]("A")
|
||||||
) :: Nil,
|
) :: Nil,
|
||||||
List(NamedTypeToken[Id]("B"))
|
List(NamedTypeToken[Id]("B"))
|
||||||
@ -159,25 +179,28 @@ class TypeTokenSpec extends AnyFlatSpec with Matchers with EitherValues {
|
|||||||
arrowdef("u32 -> Boo") should be(
|
arrowdef("u32 -> Boo") should be(
|
||||||
ArrowTypeToken[Id](
|
ArrowTypeToken[Id](
|
||||||
(),
|
(),
|
||||||
(None -> strToBt(u32)) :: Nil,
|
(None -> stToStt(ScalarType.u32)) :: Nil,
|
||||||
List(NamedTypeToken[Id]("Boo"))
|
List(NamedTypeToken[Id]("Boo"))
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
TypeToken.`typedef`.parseAll("u32 -> ()").value.mapK(spanToId) should be(
|
TypeToken.`typedef`.parseAll("u32 -> ()").value.mapK(spanToId) should be(
|
||||||
ArrowTypeToken[Id]((), (None -> strToBt(u32)) :: Nil, Nil)
|
ArrowTypeToken[Id]((), (None -> stToStt(ScalarType.u32)) :: Nil, Nil)
|
||||||
)
|
)
|
||||||
|
|
||||||
arrowdef("A, u32 -> B") should be(
|
arrowdef("A, u32 -> B") should be(
|
||||||
ArrowTypeToken[Id](
|
ArrowTypeToken[Id](
|
||||||
(),
|
(),
|
||||||
(None -> NamedTypeToken[Id]("A")) :: (None -> strToBt(u32)) :: Nil,
|
(None -> NamedTypeToken[Id]("A")) :: (None -> stToStt(ScalarType.u32)) :: Nil,
|
||||||
List(NamedTypeToken[Id]("B"))
|
List(NamedTypeToken[Id]("B"))
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
arrowdef("[]Absolutely, u32 -> B, C") should be(
|
arrowdef("[]Absolutely, u32 -> B, C") should be(
|
||||||
ArrowTypeToken[Id](
|
ArrowTypeToken[Id](
|
||||||
(),
|
(),
|
||||||
(Option.empty[Name[Id]] -> ArrayTypeToken[Id]((), NamedTypeToken[Id]("Absolutely"))) ::
|
(Option.empty[Name[Id]] -> ArrayTypeToken[Id]((), NamedTypeToken[Id]("Absolutely"))) ::
|
||||||
(Option.empty[Name[Id]] -> strToBt(u32)) :: Nil,
|
(Option.empty[Name[Id]] -> stToStt(ScalarType.u32)) :: Nil,
|
||||||
NamedTypeToken[Id]("B") ::
|
NamedTypeToken[Id]("B") ::
|
||||||
NamedTypeToken[Id]("C") :: Nil
|
NamedTypeToken[Id]("C") :: Nil
|
||||||
)
|
)
|
||||||
@ -185,18 +208,46 @@ class TypeTokenSpec extends AnyFlatSpec with Matchers with EitherValues {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
"Array type" should "parse" in {
|
"data type token" should "parse nested types" in {
|
||||||
def typedef(str: String) = TypeToken.`typedef`.parseAll(str).value.mapK(spanToId)
|
def typedef(str: String): BasicTypeToken[Id] =
|
||||||
|
BasicTypeToken.`compositetypedef`.parseAll(str).value.mapK(spanToId)
|
||||||
|
|
||||||
typedef("[]Something") should be(
|
val baseTypes: List[(String, BasicTypeToken[Id])] = List(
|
||||||
ArrayTypeToken[Id]((), NamedTypeToken[Id]("Something"))
|
"u32" -> stToStt(ScalarType.u32),
|
||||||
|
"string" -> stToStt(ScalarType.string),
|
||||||
|
"Named" -> NamedTypeToken[Id]("Named")
|
||||||
)
|
)
|
||||||
typedef("[]u32") should be(
|
|
||||||
ArrayTypeToken[Id]((), strToBt(u32))
|
val modifiers: List[(String, BasicTypeToken[Id] => BasicTypeToken[Id])] = List(
|
||||||
)
|
"[]" -> ((t: BasicTypeToken[Id]) => ArrayTypeToken[Id]((), t)),
|
||||||
typedef("[][]u32") should be(
|
"?" -> ((t: BasicTypeToken[Id]) => OptionTypeToken[Id]((), t)),
|
||||||
ArrayTypeToken[Id]((), ArrayTypeToken[Id]((), strToBt(u32)))
|
"*" -> ((t: BasicTypeToken[Id]) => StreamTypeToken[Id]((), t))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
LazyList
|
||||||
|
// Generate all cartesian products of modifiers
|
||||||
|
.unfold(modifiers)(prod =>
|
||||||
|
(
|
||||||
|
prod,
|
||||||
|
for {
|
||||||
|
m <- modifiers
|
||||||
|
(sm, mt) = m
|
||||||
|
p <- prod
|
||||||
|
(sp, pt) = p
|
||||||
|
} yield (sm + sp, mt.compose(pt))
|
||||||
|
).some
|
||||||
|
)
|
||||||
|
.take(6)
|
||||||
|
.foreach { mods =>
|
||||||
|
for {
|
||||||
|
base <- baseTypes
|
||||||
|
(bs, bt) = base
|
||||||
|
mod <- mods
|
||||||
|
(ms, mt) = mod
|
||||||
|
// Apply modifiers to base type
|
||||||
|
(st, t) = (ms + bs, mt(bt))
|
||||||
|
} typedef(st) should be(t)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -4,14 +4,15 @@ import aqua.parser.expr.AliasExpr
|
|||||||
import aqua.raw.{Raw, TypeRaw}
|
import aqua.raw.{Raw, TypeRaw}
|
||||||
import aqua.semantics.Prog
|
import aqua.semantics.Prog
|
||||||
import aqua.semantics.rules.types.TypesAlgebra
|
import aqua.semantics.rules.types.TypesAlgebra
|
||||||
import cats.syntax.functor.*
|
|
||||||
import cats.Monad
|
|
||||||
import cats.Applicative
|
import cats.Applicative
|
||||||
|
import cats.Monad
|
||||||
import cats.syntax.flatMap.*
|
import cats.syntax.flatMap.*
|
||||||
|
import cats.syntax.functor.*
|
||||||
|
|
||||||
class AliasSem[S[_]](val expr: AliasExpr[S]) extends AnyVal {
|
class AliasSem[S[_]](val expr: AliasExpr[S]) extends AnyVal {
|
||||||
|
|
||||||
def program[Alg[_]: Monad](implicit T: TypesAlgebra[S, Alg]): Prog[Alg, Raw] =
|
def program[Alg[_]: Monad](using T: TypesAlgebra[S, Alg]): Prog[Alg, Raw] =
|
||||||
T.resolveType(expr.target).flatMap {
|
T.resolveType(expr.target).flatMap {
|
||||||
case Some(t) => T.defineAlias(expr.name, t) as (TypeRaw(expr.name.value, t): Raw)
|
case Some(t) => T.defineAlias(expr.name, t) as (TypeRaw(expr.name.value, t): Raw)
|
||||||
case None => Applicative[Alg].pure(Raw.error("Alias type unresolved"))
|
case None => Applicative[Alg].pure(Raw.error("Alias type unresolved"))
|
||||||
|
@ -58,13 +58,13 @@ class ArrowSem[S[_]](val expr: ArrowExpr[S]) extends AnyVal {
|
|||||||
// TODO: wrap with local on...via...
|
// TODO: wrap with local on...via...
|
||||||
val retsAndArgs = retValues zip funcArrow.codomain.toList
|
val retsAndArgs = retValues zip funcArrow.codomain.toList
|
||||||
|
|
||||||
val dataArgsNames = funcArrow.domain.labelledData.map { case (name, _) => name }
|
val streamArgNames = funcArrow.domain.labelledStreams.map { case (name, _) => name }
|
||||||
val streamsThatReturnAsStreams = retsAndArgs.collect {
|
val streamsThatReturnAsStreams = retsAndArgs.collect {
|
||||||
case (VarRaw(n, StreamType(_)), StreamType(_)) => n
|
case (VarRaw(n, StreamType(_)), StreamType(_)) => n
|
||||||
}.toSet
|
}.toSet
|
||||||
|
|
||||||
// Remove arguments, and values returned as streams
|
// Remove arguments, and values returned as streams
|
||||||
val localStreams = streamsInScope -- dataArgsNames -- streamsThatReturnAsStreams
|
val localStreams = streamsInScope -- streamArgNames -- streamsThatReturnAsStreams
|
||||||
|
|
||||||
// process stream that returns as not streams and all Apply*Raw
|
// process stream that returns as not streams and all Apply*Raw
|
||||||
retsAndArgs.traverse {
|
retsAndArgs.traverse {
|
||||||
|
@ -1,48 +1,39 @@
|
|||||||
package aqua.semantics.expr.func
|
package aqua.semantics.expr.func
|
||||||
|
|
||||||
import aqua.raw.ops.DeclareStreamTag
|
import aqua.helpers.syntax.optiont.*
|
||||||
import aqua.parser.expr.func.DeclareStreamExpr
|
import aqua.parser.expr.func.DeclareStreamExpr
|
||||||
import aqua.raw.Raw
|
import aqua.raw.Raw
|
||||||
|
import aqua.raw.ops.DeclareStreamTag
|
||||||
import aqua.raw.value.VarRaw
|
import aqua.raw.value.VarRaw
|
||||||
import aqua.semantics.Prog
|
import aqua.semantics.Prog
|
||||||
import aqua.semantics.rules.names.NamesAlgebra
|
import aqua.semantics.rules.names.NamesAlgebra
|
||||||
import aqua.semantics.rules.types.TypesAlgebra
|
import aqua.semantics.rules.types.TypesAlgebra
|
||||||
import aqua.types.{ArrayType, OptionType, StreamType}
|
import aqua.types.*
|
||||||
|
|
||||||
import cats.Monad
|
import cats.Monad
|
||||||
import cats.data.Chain
|
import cats.data.Chain
|
||||||
|
import cats.data.OptionT
|
||||||
import cats.syntax.applicative.*
|
import cats.syntax.applicative.*
|
||||||
import cats.syntax.flatMap.*
|
import cats.syntax.flatMap.*
|
||||||
import cats.syntax.functor.*
|
import cats.syntax.functor.*
|
||||||
|
|
||||||
class DeclareStreamSem[S[_]](val expr: DeclareStreamExpr[S]) {
|
class DeclareStreamSem[S[_]](val expr: DeclareStreamExpr[S]) {
|
||||||
|
|
||||||
def program[Alg[_]: Monad](implicit
|
def program[Alg[_]: Monad](using
|
||||||
N: NamesAlgebra[S, Alg],
|
N: NamesAlgebra[S, Alg],
|
||||||
T: TypesAlgebra[S, Alg]
|
T: TypesAlgebra[S, Alg]
|
||||||
): Prog[Alg, Raw] =
|
): Prog[Alg, Raw] = Prog.leaf {
|
||||||
Prog.leaf(
|
val sem = for {
|
||||||
T.resolveType(expr.`type`)
|
streamType <- OptionT(
|
||||||
.flatMap {
|
T.resolveStreamType(expr.`type`)
|
||||||
case Some(t: StreamType) =>
|
)
|
||||||
N.define(expr.name, t).map(b => Option.when(b)(t))
|
_ <- OptionT.withFilterF(
|
||||||
case Some(t: OptionType) =>
|
N.define(expr.name, streamType)
|
||||||
val streamType = StreamType(t.element)
|
)
|
||||||
N.define(expr.name, streamType).map(b => Option.when(b)(streamType))
|
valueModel = VarRaw(expr.name.value, streamType)
|
||||||
case Some(at @ ArrayType(t)) =>
|
} yield DeclareStreamTag(valueModel).funcOpLeaf: Raw
|
||||||
val streamType = StreamType(t)
|
|
||||||
T.ensureTypeMatches(expr.`type`, streamType, at).map(b => Option.when(b)(streamType))
|
sem.getOrElse(Raw.error(s"Name `${expr.name.value}` not defined"))
|
||||||
case Some(t) =>
|
}
|
||||||
val streamType = StreamType(t)
|
|
||||||
T.ensureTypeMatches(expr.`type`, streamType, t).map(b => Option.when(b)(streamType))
|
|
||||||
case None =>
|
|
||||||
None.pure[Alg]
|
|
||||||
}
|
|
||||||
.map {
|
|
||||||
case Some(streamType) =>
|
|
||||||
val valueModel = VarRaw(expr.name.value, streamType)
|
|
||||||
DeclareStreamTag(valueModel).funcOpLeaf: Raw
|
|
||||||
case None => Raw.error(s"Name `${expr.name.value}` not defined")
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -1,27 +1,27 @@
|
|||||||
package aqua.semantics.expr.func
|
package aqua.semantics.expr.func
|
||||||
|
|
||||||
import aqua.raw.Raw
|
|
||||||
import aqua.parser.expr.func.ForExpr
|
import aqua.parser.expr.func.ForExpr
|
||||||
|
import aqua.parser.expr.func.ForExpr.Mode
|
||||||
import aqua.parser.lexer.{Name, ValueToken}
|
import aqua.parser.lexer.{Name, ValueToken}
|
||||||
import aqua.raw.value.ValueRaw
|
import aqua.raw.Raw
|
||||||
import aqua.raw.ops.*
|
import aqua.raw.ops.*
|
||||||
import aqua.raw.ops.ForTag
|
import aqua.raw.ops.ForTag
|
||||||
|
import aqua.raw.value.ValueRaw
|
||||||
import aqua.semantics.Prog
|
import aqua.semantics.Prog
|
||||||
|
import aqua.semantics.expr.func.FuncOpSem
|
||||||
import aqua.semantics.rules.ValuesAlgebra
|
import aqua.semantics.rules.ValuesAlgebra
|
||||||
import aqua.semantics.rules.abilities.AbilitiesAlgebra
|
import aqua.semantics.rules.abilities.AbilitiesAlgebra
|
||||||
import aqua.semantics.rules.names.NamesAlgebra
|
import aqua.semantics.rules.names.NamesAlgebra
|
||||||
import aqua.semantics.rules.types.TypesAlgebra
|
import aqua.semantics.rules.types.TypesAlgebra
|
||||||
import aqua.types.{ArrayType, BoxType, StreamType}
|
import aqua.types.*
|
||||||
import aqua.semantics.expr.func.FuncOpSem
|
|
||||||
|
|
||||||
import cats.Monad
|
import cats.Monad
|
||||||
import cats.data.Chain
|
import cats.data.{Chain, OptionT}
|
||||||
import cats.syntax.applicative.*
|
import cats.syntax.applicative.*
|
||||||
import cats.syntax.apply.*
|
import cats.syntax.apply.*
|
||||||
import cats.syntax.flatMap.*
|
import cats.syntax.flatMap.*
|
||||||
import cats.syntax.functor.*
|
import cats.syntax.functor.*
|
||||||
import cats.syntax.option.*
|
import cats.syntax.option.*
|
||||||
import aqua.parser.expr.func.ForExpr.Mode
|
|
||||||
|
|
||||||
class ForSem[S[_]](val expr: ForExpr[S]) extends AnyVal {
|
class ForSem[S[_]](val expr: ForExpr[S]) extends AnyVal {
|
||||||
|
|
||||||
@ -74,19 +74,18 @@ class ForSem[S[_]](val expr: ForExpr[S]) extends AnyVal {
|
|||||||
|
|
||||||
object ForSem {
|
object ForSem {
|
||||||
|
|
||||||
def beforeFor[S[_], F[_]: Monad](item: Name[S], iterable: ValueToken[S])(implicit
|
def beforeFor[S[_], F[_]: Monad](
|
||||||
|
item: Name[S],
|
||||||
|
iterable: ValueToken[S]
|
||||||
|
)(using
|
||||||
V: ValuesAlgebra[S, F],
|
V: ValuesAlgebra[S, F],
|
||||||
N: NamesAlgebra[S, F],
|
N: NamesAlgebra[S, F],
|
||||||
T: TypesAlgebra[S, F]
|
T: TypesAlgebra[S, F]
|
||||||
): F[Option[ValueRaw]] =
|
): F[Option[ValueRaw]] = (for {
|
||||||
V.valueToRaw(iterable).flatMap {
|
value <- V.valueToIterable(iterable)
|
||||||
case Some(vm) =>
|
(raw, typ) = value
|
||||||
vm.`type` match {
|
_ <- OptionT.liftF(
|
||||||
case t: BoxType =>
|
N.define(item, typ.element)
|
||||||
N.define(item, t.element).as(vm.some)
|
)
|
||||||
case dt =>
|
} yield raw).value
|
||||||
T.ensureTypeMatches(iterable, ArrayType(dt), dt).as(none)
|
|
||||||
}
|
|
||||||
case _ => none.pure
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -1,17 +1,17 @@
|
|||||||
package aqua.semantics.expr.func
|
package aqua.semantics.expr.func
|
||||||
|
|
||||||
|
import aqua.raw.Raw
|
||||||
|
import aqua.raw.ops.{RawTag, RestrictionTag}
|
||||||
|
import aqua.semantics.rules.names.NamesAlgebra
|
||||||
|
|
||||||
import cats.Monad
|
import cats.Monad
|
||||||
import cats.syntax.functor.*
|
import cats.syntax.functor.*
|
||||||
|
|
||||||
import aqua.semantics.rules.names.NamesAlgebra
|
|
||||||
import aqua.raw.Raw
|
|
||||||
import aqua.raw.ops.{RawTag, RestrictionTag}
|
|
||||||
|
|
||||||
object FuncOpSem {
|
object FuncOpSem {
|
||||||
|
|
||||||
def restrictStreamsInScope[S[_], Alg[_]: Monad](tree: RawTag.Tree)(using
|
def restrictStreamsInScope[S[_], Alg[_]: Monad](
|
||||||
N: NamesAlgebra[S, Alg]
|
tree: RawTag.Tree
|
||||||
): Alg[RawTag.Tree] = N
|
)(using N: NamesAlgebra[S, Alg]): Alg[RawTag.Tree] = N
|
||||||
.streamsDefinedWithinScope()
|
.streamsDefinedWithinScope()
|
||||||
.map(streams =>
|
.map(streams =>
|
||||||
streams.toList
|
streams.toList
|
||||||
|
@ -1,23 +1,23 @@
|
|||||||
package aqua.semantics.expr.func
|
package aqua.semantics.expr.func
|
||||||
|
|
||||||
import aqua.raw.ops.{FuncOp, OnTag}
|
|
||||||
import aqua.parser.expr.func.OnExpr
|
import aqua.parser.expr.func.OnExpr
|
||||||
import aqua.parser.lexer.ValueToken
|
import aqua.parser.lexer.ValueToken
|
||||||
import aqua.raw.Raw
|
import aqua.raw.Raw
|
||||||
|
import aqua.raw.ops.{FuncOp, OnTag}
|
||||||
import aqua.raw.value.ValueRaw
|
import aqua.raw.value.ValueRaw
|
||||||
import aqua.semantics.Prog
|
import aqua.semantics.Prog
|
||||||
import aqua.semantics.rules.ValuesAlgebra
|
import aqua.semantics.rules.ValuesAlgebra
|
||||||
import aqua.semantics.rules.abilities.AbilitiesAlgebra
|
import aqua.semantics.rules.abilities.AbilitiesAlgebra
|
||||||
import aqua.semantics.rules.types.TypesAlgebra
|
import aqua.semantics.rules.types.TypesAlgebra
|
||||||
import aqua.types.{BoxType, OptionType, ScalarType}
|
import aqua.types.{CollectionType, OptionType, ScalarType}
|
||||||
|
|
||||||
import cats.data.Chain
|
import cats.data.Chain
|
||||||
import cats.data.OptionT
|
import cats.data.OptionT
|
||||||
import cats.syntax.applicative.*
|
import cats.syntax.applicative.*
|
||||||
import cats.syntax.apply.*
|
import cats.syntax.apply.*
|
||||||
import cats.syntax.flatMap.*
|
import cats.syntax.flatMap.*
|
||||||
import cats.syntax.traverse.*
|
|
||||||
import cats.syntax.functor.*
|
import cats.syntax.functor.*
|
||||||
|
import cats.syntax.traverse.*
|
||||||
import cats.{Monad, Traverse}
|
import cats.{Monad, Traverse}
|
||||||
|
|
||||||
class OnSem[S[_]](val expr: OnExpr[S]) extends AnyVal {
|
class OnSem[S[_]](val expr: OnExpr[S]) extends AnyVal {
|
||||||
@ -64,7 +64,7 @@ object OnSem {
|
|||||||
.traverse(v =>
|
.traverse(v =>
|
||||||
OptionT(V.valueToRaw(v)).filterF { vm =>
|
OptionT(V.valueToRaw(v)).filterF { vm =>
|
||||||
val expectedType = vm.`type` match {
|
val expectedType = vm.`type` match {
|
||||||
case _: BoxType => OptionType(ScalarType.string)
|
case _: CollectionType => OptionType(ScalarType.string)
|
||||||
case _ => ScalarType.string
|
case _ => ScalarType.string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,24 +1,24 @@
|
|||||||
package aqua.semantics.expr.func
|
package aqua.semantics.expr.func
|
||||||
|
|
||||||
import aqua.raw.Raw
|
|
||||||
import aqua.parser.expr.func.ParSeqExpr
|
import aqua.parser.expr.func.ParSeqExpr
|
||||||
import aqua.raw.value.ValueRaw
|
import aqua.raw.Raw
|
||||||
import aqua.raw.ops.*
|
import aqua.raw.ops.*
|
||||||
import aqua.raw.ops.ForTag
|
import aqua.raw.ops.ForTag
|
||||||
|
import aqua.raw.value.ValueRaw
|
||||||
import aqua.semantics.Prog
|
import aqua.semantics.Prog
|
||||||
import aqua.semantics.rules.ValuesAlgebra
|
import aqua.semantics.rules.ValuesAlgebra
|
||||||
import aqua.semantics.rules.abilities.AbilitiesAlgebra
|
import aqua.semantics.rules.abilities.AbilitiesAlgebra
|
||||||
import aqua.semantics.rules.names.NamesAlgebra
|
import aqua.semantics.rules.names.NamesAlgebra
|
||||||
import aqua.semantics.rules.types.TypesAlgebra
|
import aqua.semantics.rules.types.TypesAlgebra
|
||||||
import aqua.types.{ArrayType, BoxType, StreamType}
|
import aqua.types.{ArrayType, CollectionType, StreamType}
|
||||||
|
|
||||||
import cats.Monad
|
import cats.Monad
|
||||||
import cats.data.Chain
|
import cats.data.Chain
|
||||||
import cats.syntax.option.*
|
|
||||||
import cats.syntax.applicative.*
|
import cats.syntax.applicative.*
|
||||||
import cats.syntax.apply.*
|
import cats.syntax.apply.*
|
||||||
import cats.syntax.flatMap.*
|
import cats.syntax.flatMap.*
|
||||||
import cats.syntax.functor.*
|
import cats.syntax.functor.*
|
||||||
|
import cats.syntax.option.*
|
||||||
|
|
||||||
class ParSeqSem[S[_]](val expr: ParSeqExpr[S]) extends AnyVal {
|
class ParSeqSem[S[_]](val expr: ParSeqExpr[S]) extends AnyVal {
|
||||||
|
|
||||||
|
@ -1,15 +1,19 @@
|
|||||||
package aqua.semantics.expr.func
|
package aqua.semantics.expr.func
|
||||||
|
|
||||||
import aqua.raw.ops.{Call, PushToStreamTag}
|
import aqua.helpers.syntax.optiont.*
|
||||||
import aqua.parser.expr.func.PushToStreamExpr
|
import aqua.parser.expr.func.PushToStreamExpr
|
||||||
import aqua.parser.lexer.Token
|
import aqua.parser.lexer.Token
|
||||||
import aqua.raw.Raw
|
import aqua.raw.Raw
|
||||||
|
import aqua.raw.ops.{Call, PushToStreamTag}
|
||||||
import aqua.semantics.Prog
|
import aqua.semantics.Prog
|
||||||
import aqua.semantics.rules.ValuesAlgebra
|
import aqua.semantics.rules.ValuesAlgebra
|
||||||
import aqua.semantics.rules.names.NamesAlgebra
|
import aqua.semantics.rules.names.NamesAlgebra
|
||||||
import aqua.semantics.rules.types.TypesAlgebra
|
import aqua.semantics.rules.types.TypesAlgebra
|
||||||
import aqua.types.{ArrayType, StreamType, Type}
|
import aqua.types.*
|
||||||
|
import aqua.types.TopType
|
||||||
|
|
||||||
import cats.Monad
|
import cats.Monad
|
||||||
|
import cats.data.OptionT
|
||||||
import cats.syntax.applicative.*
|
import cats.syntax.applicative.*
|
||||||
import cats.syntax.apply.*
|
import cats.syntax.apply.*
|
||||||
import cats.syntax.flatMap.*
|
import cats.syntax.flatMap.*
|
||||||
@ -22,24 +26,14 @@ class PushToStreamSem[S[_]](val expr: PushToStreamExpr[S]) extends AnyVal {
|
|||||||
elementToken: Token[S],
|
elementToken: Token[S],
|
||||||
stream: Type,
|
stream: Type,
|
||||||
element: Type
|
element: Type
|
||||||
)(implicit
|
)(using T: TypesAlgebra[S, Alg]): Alg[Boolean] = (
|
||||||
T: TypesAlgebra[S, Alg]
|
T.typeToStream(streamToken, stream),
|
||||||
): Alg[Boolean] =
|
T.typeToCollectible(elementToken, element)
|
||||||
stream match {
|
).merged.semiflatMap { case (st, et) =>
|
||||||
case StreamType(st) =>
|
T.ensureTypeMatches(elementToken, st.element, et)
|
||||||
T.ensureTypeMatches(elementToken, st, element)
|
}.getOrElse(false)
|
||||||
case _ =>
|
|
||||||
T.ensureTypeMatches(
|
|
||||||
streamToken,
|
|
||||||
StreamType(element match {
|
|
||||||
case StreamType(e) => ArrayType(e)
|
|
||||||
case _ => element
|
|
||||||
}),
|
|
||||||
stream
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
def program[Alg[_]: Monad](implicit
|
def program[Alg[_]: Monad](using
|
||||||
N: NamesAlgebra[S, Alg],
|
N: NamesAlgebra[S, Alg],
|
||||||
T: TypesAlgebra[S, Alg],
|
T: TypesAlgebra[S, Alg],
|
||||||
V: ValuesAlgebra[S, Alg]
|
V: ValuesAlgebra[S, Alg]
|
||||||
|
@ -1,6 +1,9 @@
|
|||||||
package aqua.semantics.rules
|
package aqua.semantics.rules
|
||||||
|
|
||||||
|
import aqua.errors.Errors.internalError
|
||||||
|
import aqua.helpers.syntax.optiont.*
|
||||||
import aqua.parser.lexer.*
|
import aqua.parser.lexer.*
|
||||||
|
import aqua.parser.lexer.InfixToken.value
|
||||||
import aqua.parser.lexer.InfixToken.{BoolOp, CmpOp, EqOp, MathOp, Op as InfOp}
|
import aqua.parser.lexer.InfixToken.{BoolOp, CmpOp, EqOp, MathOp, Op as InfOp}
|
||||||
import aqua.parser.lexer.PrefixToken.Op as PrefOp
|
import aqua.parser.lexer.PrefixToken.Op as PrefOp
|
||||||
import aqua.raw.value.*
|
import aqua.raw.value.*
|
||||||
@ -9,7 +12,6 @@ import aqua.semantics.rules.names.NamesAlgebra
|
|||||||
import aqua.semantics.rules.report.ReportAlgebra
|
import aqua.semantics.rules.report.ReportAlgebra
|
||||||
import aqua.semantics.rules.types.TypesAlgebra
|
import aqua.semantics.rules.types.TypesAlgebra
|
||||||
import aqua.types.*
|
import aqua.types.*
|
||||||
import aqua.helpers.syntax.optiont.*
|
|
||||||
|
|
||||||
import cats.Monad
|
import cats.Monad
|
||||||
import cats.data.{NonEmptyList, OptionT}
|
import cats.data.{NonEmptyList, OptionT}
|
||||||
@ -151,26 +153,22 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](using
|
|||||||
raws
|
raws
|
||||||
.zip(values)
|
.zip(values)
|
||||||
.traverse { case (raw, token) =>
|
.traverse { case (raw, token) =>
|
||||||
T.ensureTypeIsCollectible(token, raw.`type`)
|
T.typeToCollectible(token, raw.`type`).map(raw -> _)
|
||||||
.map(Option.when(_)(raw))
|
|
||||||
}
|
}
|
||||||
.map(_.sequence)
|
.value
|
||||||
)
|
)
|
||||||
raw = valuesRawChecked.map(raws =>
|
raw = valuesRawChecked.map(raws =>
|
||||||
NonEmptyList
|
NonEmptyList
|
||||||
.fromList(raws)
|
.fromList(raws)
|
||||||
.fold(ValueRaw.Nil) { nonEmpty =>
|
.fold(ValueRaw.Nil) { nonEmpty =>
|
||||||
val element = raws.map(_.`type`).reduceLeft(_ `∩` _)
|
val (values, types) = nonEmpty.unzip
|
||||||
// In case we mix values of uncomparable types, intersection returns bottom, meaning "uninhabited type".
|
val element = CollectionType.elementTypeOf(types.toList)
|
||||||
// But we want to get to TopType instead: this would mean that intersection is empty, and you cannot
|
|
||||||
// make any decision about the structure of type, but can push anything inside
|
|
||||||
val elementNotBottom = if (element == BottomType) TopType else element
|
|
||||||
CollectionRaw(
|
CollectionRaw(
|
||||||
nonEmpty,
|
values,
|
||||||
ct.mode match {
|
ct.mode match {
|
||||||
case CollectionToken.Mode.StreamMode => StreamType(elementNotBottom)
|
case CollectionToken.Mode.StreamMode => StreamType(element)
|
||||||
case CollectionToken.Mode.ArrayMode => ArrayType(elementNotBottom)
|
case CollectionToken.Mode.ArrayMode => ArrayType(element)
|
||||||
case CollectionToken.Mode.OptionMode => OptionType(elementNotBottom)
|
case CollectionToken.Mode.OptionMode => OptionType(element)
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@ -323,14 +321,19 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](using
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def valueToIterable(v: ValueToken[S]): OptionT[Alg, (ValueRaw, CollectionType)] =
|
||||||
|
for {
|
||||||
|
raw <- OptionT(valueToRaw(v))
|
||||||
|
typ <- T.typeToIterable(v, raw.`type`)
|
||||||
|
} yield raw -> typ
|
||||||
|
|
||||||
def valueToTypedRaw(v: ValueToken[S], expectedType: Type): Alg[Option[ValueRaw]] =
|
def valueToTypedRaw(v: ValueToken[S], expectedType: Type): Alg[Option[ValueRaw]] =
|
||||||
OptionT(valueToRaw(v))
|
(for {
|
||||||
.flatMap(raw =>
|
raw <- OptionT(valueToRaw(v))
|
||||||
OptionT.whenM(
|
_ <- OptionT.withFilterF(
|
||||||
T.ensureTypeMatches(v, expectedType, raw.`type`)
|
T.ensureTypeMatches(v, expectedType, raw.`type`)
|
||||||
)(raw.pure)
|
|
||||||
)
|
)
|
||||||
.value
|
} yield raw).value
|
||||||
|
|
||||||
def valueToStringRaw(v: ValueToken[S]): Alg[Option[ValueRaw]] =
|
def valueToStringRaw(v: ValueToken[S]): Alg[Option[ValueRaw]] =
|
||||||
valueToTypedRaw(v, LiteralType.string)
|
valueToTypedRaw(v, LiteralType.string)
|
||||||
|
@ -0,0 +1,112 @@
|
|||||||
|
package aqua.semantics.rules.types
|
||||||
|
|
||||||
|
import aqua.parser.lexer.*
|
||||||
|
import aqua.types.*
|
||||||
|
|
||||||
|
import cats.data.ValidatedNec
|
||||||
|
import cats.syntax.apply.*
|
||||||
|
import cats.syntax.bifunctor.*
|
||||||
|
import cats.syntax.either.*
|
||||||
|
import cats.syntax.functor.*
|
||||||
|
import cats.syntax.option.*
|
||||||
|
import cats.syntax.traverse.*
|
||||||
|
import cats.syntax.validated.*
|
||||||
|
|
||||||
|
final case class TypeResolution[S[_], +T](
|
||||||
|
`type`: T,
|
||||||
|
definitions: List[(Token[S], NamedTypeToken[S])]
|
||||||
|
)
|
||||||
|
|
||||||
|
object TypeResolution {
|
||||||
|
|
||||||
|
final case class TypeResolutionError[S[_]](
|
||||||
|
token: Token[S],
|
||||||
|
hint: String
|
||||||
|
)
|
||||||
|
|
||||||
|
type Res[S[_], A] = ValidatedNec[
|
||||||
|
TypeResolutionError[S],
|
||||||
|
TypeResolution[S, A]
|
||||||
|
]
|
||||||
|
|
||||||
|
private def resolveCollection[S[_]](
|
||||||
|
tt: TypeToken[S],
|
||||||
|
collectionName: String,
|
||||||
|
collectionType: DataType => Type
|
||||||
|
)(state: TypesState[S]): Res[S, Type] =
|
||||||
|
resolveTypeToken(tt)(state).andThen {
|
||||||
|
case TypeResolution(it: DataType, t) =>
|
||||||
|
TypeResolution(collectionType(it), t).validNec
|
||||||
|
case TypeResolution(it, _) =>
|
||||||
|
TypeResolutionError(
|
||||||
|
tt,
|
||||||
|
s"$collectionName could not contain values of type $it"
|
||||||
|
).invalidNec
|
||||||
|
}
|
||||||
|
|
||||||
|
def resolveTypeToken[S[_]](
|
||||||
|
tt: TypeToken[S]
|
||||||
|
)(state: TypesState[S]): Res[S, Type] =
|
||||||
|
tt match {
|
||||||
|
case TopBottomToken(_, isTop) =>
|
||||||
|
val `type` = if (isTop) TopType else BottomType
|
||||||
|
|
||||||
|
TypeResolution(`type`, Nil).validNec
|
||||||
|
case ArrayTypeToken(_, dtt) =>
|
||||||
|
resolveCollection(dtt, "Array", ArrayType.apply)(state)
|
||||||
|
case StreamTypeToken(_, dtt) =>
|
||||||
|
resolveCollection(dtt, "Stream", StreamType.apply)(state)
|
||||||
|
case OptionTypeToken(_, dtt) =>
|
||||||
|
resolveCollection(dtt, "Option", OptionType.apply)(state)
|
||||||
|
case ntt: NamedTypeToken[S] =>
|
||||||
|
val defs = state
|
||||||
|
.getTypeDefinition(ntt.value)
|
||||||
|
.toList
|
||||||
|
.map(ntt -> _)
|
||||||
|
|
||||||
|
state
|
||||||
|
.getType(ntt.value)
|
||||||
|
.map(typ => TypeResolution(typ, defs))
|
||||||
|
.toValidNec(
|
||||||
|
TypeResolutionError(
|
||||||
|
ntt,
|
||||||
|
s"Type ${ntt.value} is not defined"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
case stt: ScalarTypeToken[S] =>
|
||||||
|
TypeResolution(stt.value, Nil).validNec
|
||||||
|
case att: ArrowTypeToken[S] =>
|
||||||
|
resolveArrowDef(att)(state)
|
||||||
|
}
|
||||||
|
|
||||||
|
def resolveArrowDef[S[_]](
|
||||||
|
arrowTypeToken: ArrowTypeToken[S]
|
||||||
|
)(state: TypesState[S]): Res[S, ArrowType] = {
|
||||||
|
val res = arrowTypeToken.res
|
||||||
|
.traverse(typeToken => resolveTypeToken(typeToken)(state).toEither)
|
||||||
|
val args = arrowTypeToken.args.traverse { case (argName, typeToken) =>
|
||||||
|
resolveTypeToken(typeToken)(state)
|
||||||
|
.map(argName.map(_.value) -> _)
|
||||||
|
.toEither
|
||||||
|
}
|
||||||
|
|
||||||
|
(args, res).mapN { (args, res) =>
|
||||||
|
val (argsLabeledTypes, argsTokens) =
|
||||||
|
args.map { case lbl -> TypeResolution(typ, tkn) =>
|
||||||
|
(lbl, typ) -> tkn
|
||||||
|
}.unzip.map(_.flatten)
|
||||||
|
val (resTypes, resTokens) =
|
||||||
|
res.map { case TypeResolution(typ, tkn) =>
|
||||||
|
typ -> tkn
|
||||||
|
}.unzip.map(_.flatten)
|
||||||
|
|
||||||
|
val typ = ArrowType(
|
||||||
|
ProductType.maybeLabelled(argsLabeledTypes),
|
||||||
|
ProductType(resTypes)
|
||||||
|
)
|
||||||
|
val defs = (argsTokens ++ resTokens)
|
||||||
|
|
||||||
|
TypeResolution(typ, defs)
|
||||||
|
}.toValidated
|
||||||
|
}
|
||||||
|
}
|
@ -3,14 +3,18 @@ package aqua.semantics.rules.types
|
|||||||
import aqua.parser.lexer.*
|
import aqua.parser.lexer.*
|
||||||
import aqua.raw.value.{PropertyRaw, ValueRaw}
|
import aqua.raw.value.{PropertyRaw, ValueRaw}
|
||||||
import aqua.types.*
|
import aqua.types.*
|
||||||
|
import aqua.types.Type.*
|
||||||
|
|
||||||
import cats.data.NonEmptyMap
|
|
||||||
import cats.data.NonEmptyList
|
import cats.data.NonEmptyList
|
||||||
|
import cats.data.NonEmptyMap
|
||||||
|
import cats.data.OptionT
|
||||||
|
|
||||||
trait TypesAlgebra[S[_], Alg[_]] {
|
trait TypesAlgebra[S[_], Alg[_]] {
|
||||||
|
|
||||||
def resolveType(token: TypeToken[S]): Alg[Option[Type]]
|
def resolveType(token: TypeToken[S]): Alg[Option[Type]]
|
||||||
|
|
||||||
|
def resolveStreamType(token: TypeToken[S]): Alg[Option[StreamType]]
|
||||||
|
|
||||||
def resolveNamedType(token: TypeToken[S]): Alg[Option[AbilityType | StructType]]
|
def resolveNamedType(token: TypeToken[S]): Alg[Option[AbilityType | StructType]]
|
||||||
|
|
||||||
def getType(name: String): Alg[Option[Type]]
|
def getType(name: String): Alg[Option[Type]]
|
||||||
@ -56,7 +60,11 @@ trait TypesAlgebra[S[_], Alg[_]] {
|
|||||||
|
|
||||||
def ensureTypeMatches(token: Token[S], expected: Type, givenType: Type): Alg[Boolean]
|
def ensureTypeMatches(token: Token[S], expected: Type, givenType: Type): Alg[Boolean]
|
||||||
|
|
||||||
def ensureTypeIsCollectible(token: Token[S], givenType: Type): Alg[Boolean]
|
def typeToCollectible(token: Token[S], givenType: Type): OptionT[Alg, CollectibleType]
|
||||||
|
|
||||||
|
def typeToStream(token: Token[S], givenType: Type): OptionT[Alg, StreamType]
|
||||||
|
|
||||||
|
def typeToIterable(token: Token[S], givenType: Type): OptionT[Alg, CollectionType]
|
||||||
|
|
||||||
def ensureTypeOneOf[T <: Type](
|
def ensureTypeOneOf[T <: Type](
|
||||||
token: Token[S],
|
token: Token[S],
|
||||||
|
@ -1,35 +1,28 @@
|
|||||||
package aqua.semantics.rules.types
|
package aqua.semantics.rules.types
|
||||||
|
|
||||||
import aqua.parser.lexer.*
|
import aqua.parser.lexer.*
|
||||||
import aqua.raw.value.{
|
import aqua.raw.value.*
|
||||||
FunctorRaw,
|
|
||||||
IntoArrowRaw,
|
|
||||||
IntoCopyRaw,
|
|
||||||
IntoFieldRaw,
|
|
||||||
IntoIndexRaw,
|
|
||||||
PropertyRaw,
|
|
||||||
ValueRaw
|
|
||||||
}
|
|
||||||
import aqua.semantics.rules.locations.LocationsAlgebra
|
|
||||||
import aqua.semantics.rules.StackInterpreter
|
import aqua.semantics.rules.StackInterpreter
|
||||||
|
import aqua.semantics.rules.locations.LocationsAlgebra
|
||||||
import aqua.semantics.rules.report.ReportAlgebra
|
import aqua.semantics.rules.report.ReportAlgebra
|
||||||
import aqua.semantics.rules.types.TypesStateHelper.{TypeResolution, TypeResolutionError}
|
import aqua.semantics.rules.types.TypeResolution.TypeResolutionError
|
||||||
import aqua.types.*
|
import aqua.types.*
|
||||||
|
import aqua.types.Type.*
|
||||||
|
|
||||||
import cats.data.Validated.{Invalid, Valid}
|
import cats.data.Validated.{Invalid, Valid}
|
||||||
import cats.data.{Chain, NonEmptyList, NonEmptyMap, OptionT, State}
|
import cats.data.{Chain, NonEmptyList, NonEmptyMap, OptionT, State}
|
||||||
import cats.syntax.applicative.*
|
import cats.syntax.applicative.*
|
||||||
import cats.syntax.apply.*
|
import cats.syntax.apply.*
|
||||||
import cats.syntax.flatMap.*
|
import cats.syntax.flatMap.*
|
||||||
import cats.syntax.functor.*
|
|
||||||
import cats.syntax.traverse.*
|
|
||||||
import cats.syntax.foldable.*
|
import cats.syntax.foldable.*
|
||||||
import cats.{~>, Applicative}
|
import cats.syntax.functor.*
|
||||||
import cats.syntax.option.*
|
import cats.syntax.option.*
|
||||||
|
import cats.syntax.traverse.*
|
||||||
|
import cats.{Applicative, ~>}
|
||||||
import monocle.Lens
|
import monocle.Lens
|
||||||
import monocle.macros.GenLens
|
import monocle.macros.GenLens
|
||||||
|
|
||||||
import scala.collection.immutable.SortedMap
|
import scala.collection.immutable.SortedMap
|
||||||
|
import scala.reflect.TypeTest
|
||||||
|
|
||||||
class TypesInterpreter[S[_], X](using
|
class TypesInterpreter[S[_], X](using
|
||||||
lens: Lens[X, TypesState[S]],
|
lens: Lens[X, TypesState[S]],
|
||||||
@ -49,15 +42,22 @@ class TypesInterpreter[S[_], X](using
|
|||||||
getState.map(st => st.strict.get(name))
|
getState.map(st => st.strict.get(name))
|
||||||
|
|
||||||
override def resolveType(token: TypeToken[S]): State[X, Option[Type]] =
|
override def resolveType(token: TypeToken[S]): State[X, Option[Type]] =
|
||||||
getState.map(TypesStateHelper.resolveTypeToken(token)).flatMap {
|
getState.map(TypeResolution.resolveTypeToken(token)).flatMap {
|
||||||
case Some(TypeResolution(typ, tokens)) =>
|
case Valid(TypeResolution(typ, tokens)) =>
|
||||||
val tokensLocs = tokens.map { case (t, n) => n.value -> t }
|
val tokensLocs = tokens.map { case (t, n) => n.value -> t }
|
||||||
locations.pointLocations(tokensLocs).as(typ.some)
|
locations.pointLocations(tokensLocs).as(typ.some)
|
||||||
case None =>
|
case Invalid(errors) =>
|
||||||
// TODO: Give more specific error message
|
errors.traverse_ { case TypeResolutionError(token, hint) =>
|
||||||
report.error(token, s"Unresolved type").as(None)
|
report.error(token, hint)
|
||||||
|
}.as(none)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
override def resolveStreamType(token: TypeToken[S]): State[X, Option[StreamType]] =
|
||||||
|
OptionT(resolveType(token)).flatMapF {
|
||||||
|
case st: StreamType => st.some.pure[ST]
|
||||||
|
case t => report.error(token, s"Expected stream type, got $t").as(none)
|
||||||
|
}.value
|
||||||
|
|
||||||
def resolveNamedType(token: TypeToken[S]): State[X, Option[AbilityType | StructType]] =
|
def resolveNamedType(token: TypeToken[S]): State[X, Option[AbilityType | StructType]] =
|
||||||
resolveType(token).flatMap(_.flatTraverse {
|
resolveType(token).flatMap(_.flatTraverse {
|
||||||
case t: (AbilityType | StructType) => Option(t).pure
|
case t: (AbilityType | StructType) => Option(t).pure
|
||||||
@ -65,7 +65,7 @@ class TypesInterpreter[S[_], X](using
|
|||||||
})
|
})
|
||||||
|
|
||||||
override def resolveArrowDef(arrowDef: ArrowTypeToken[S]): State[X, Option[ArrowType]] =
|
override def resolveArrowDef(arrowDef: ArrowTypeToken[S]): State[X, Option[ArrowType]] =
|
||||||
getState.map(TypesStateHelper.resolveArrowDef(arrowDef)).flatMap {
|
getState.map(TypeResolution.resolveArrowDef(arrowDef)).flatMap {
|
||||||
case Valid(TypeResolution(tt, tokens)) =>
|
case Valid(TypeResolution(tt, tokens)) =>
|
||||||
val tokensLocs = tokens.map { case (t, n) => n.value -> t }
|
val tokensLocs = tokens.map { case (t, n) => n.value -> t }
|
||||||
locations.pointLocations(tokensLocs).as(tt.some)
|
locations.pointLocations(tokensLocs).as(tt.some)
|
||||||
@ -142,11 +142,7 @@ class TypesInterpreter[S[_], X](using
|
|||||||
ensureNameNotDefined(name.value, name, ifDefined = none)(
|
ensureNameNotDefined(name.value, name, ifDefined = none)(
|
||||||
fields.toList.traverse {
|
fields.toList.traverse {
|
||||||
case (field, (fieldName, t: DataType)) =>
|
case (field, (fieldName, t: DataType)) =>
|
||||||
t match {
|
(field -> t).some.pure[ST]
|
||||||
case _: StreamType =>
|
|
||||||
report.error(fieldName, s"Field '$field' has stream type").as(none)
|
|
||||||
case _ => (field -> t).some.pure[ST]
|
|
||||||
}
|
|
||||||
case (field, (fieldName, t)) =>
|
case (field, (fieldName, t)) =>
|
||||||
report
|
report
|
||||||
.error(
|
.error(
|
||||||
@ -294,7 +290,7 @@ class TypesInterpreter[S[_], X](using
|
|||||||
op.idx.fold(
|
op.idx.fold(
|
||||||
State.pure(Some(IntoIndexRaw(idx, ot.element)))
|
State.pure(Some(IntoIndexRaw(idx, ot.element)))
|
||||||
)(v => report.error(v, s"Options might have only one element, use ! to get it").as(None))
|
)(v => report.error(v, s"Options might have only one element, use ! to get it").as(None))
|
||||||
case rt: BoxType =>
|
case rt: CollectionType =>
|
||||||
State.pure(Some(IntoIndexRaw(idx, rt.element)))
|
State.pure(Some(IntoIndexRaw(idx, rt.element)))
|
||||||
case _ =>
|
case _ =>
|
||||||
report.error(op, s"Expected $rootT to be a collection type").as(None)
|
report.error(op, s"Expected $rootT to be a collection type").as(None)
|
||||||
@ -318,7 +314,7 @@ class TypesInterpreter[S[_], X](using
|
|||||||
true
|
true
|
||||||
case (LiteralType.signed, rst: ScalarType) if ScalarType.number(rst) =>
|
case (LiteralType.signed, rst: ScalarType) if ScalarType.number(rst) =>
|
||||||
true
|
true
|
||||||
case (lbt: BoxType, rbt: BoxType) =>
|
case (lbt: CollectionType, rbt: CollectionType) =>
|
||||||
isComparable(lbt.element, rbt.element)
|
isComparable(lbt.element, rbt.element)
|
||||||
// Prohibit comparing abilities
|
// Prohibit comparing abilities
|
||||||
case (_: AbilityType, _: AbilityType) =>
|
case (_: AbilityType, _: AbilityType) =>
|
||||||
@ -329,7 +325,7 @@ class TypesInterpreter[S[_], X](using
|
|||||||
case (LiteralType(xs, _), LiteralType(ys, _)) =>
|
case (LiteralType(xs, _), LiteralType(ys, _)) =>
|
||||||
xs.intersect(ys).nonEmpty
|
xs.intersect(ys).nonEmpty
|
||||||
case _ =>
|
case _ =>
|
||||||
lt.uniteTop(rt) != TopType
|
lt `∪` rt != TopType
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isComparable(left, right)) State.pure(true)
|
if (isComparable(left, right)) State.pure(true)
|
||||||
@ -379,35 +375,71 @@ class TypesInterpreter[S[_], X](using
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
case _ =>
|
case _ =>
|
||||||
val notes =
|
val notes = (expected, givenType) match {
|
||||||
if (expected.acceptsValueOf(OptionType(givenType)))
|
case (_, dt: DataType) if expected.acceptsValueOf(OptionType(dt)) =>
|
||||||
"note: Try converting value to optional" :: Nil
|
"note: Try converting value to optional" :: Nil
|
||||||
else if (givenType.acceptsValueOf(OptionType(expected)))
|
case (dt: DataType, _) if givenType.acceptsValueOf(OptionType(dt)) =>
|
||||||
"note: You're providing an optional value where normal value is expected." ::
|
"note: You're providing an optional value where normal value is expected." ::
|
||||||
"You can extract value with `!`, but be aware it may trigger join behaviour." ::
|
"You can extract value with `!`, but be aware it may trigger join behaviour." ::
|
||||||
Nil
|
Nil
|
||||||
else Nil
|
case _ => Nil
|
||||||
|
}
|
||||||
|
|
||||||
report
|
report
|
||||||
.error(
|
.error(
|
||||||
token,
|
token,
|
||||||
"Types mismatch." :: s"expected: $expected" :: s"given: $givenType" :: Nil ++ notes
|
"Types mismatch." +:
|
||||||
|
s"expected: $expected" +:
|
||||||
|
s"given: $givenType" +:
|
||||||
|
notes
|
||||||
)
|
)
|
||||||
.as(false)
|
.as(false)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
override def ensureTypeIsCollectible(token: Token[S], givenType: Type): State[X, Boolean] =
|
private def typeTo[T <: Type](
|
||||||
|
token: Token[S],
|
||||||
|
givenType: Type,
|
||||||
|
error: String
|
||||||
|
)(using tt: TypeTest[Type, T]): OptionT[State[X, *], T] =
|
||||||
givenType match {
|
givenType match {
|
||||||
case _: DataType => true.pure
|
case t: T => OptionT.pure(t)
|
||||||
case _ =>
|
case _ =>
|
||||||
report
|
OptionT.liftF(
|
||||||
.error(
|
report.error(token, error)
|
||||||
token,
|
) *> OptionT.none
|
||||||
s"Value of type '$givenType' could not be put into a collection"
|
|
||||||
)
|
|
||||||
.as(false)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
override def typeToCollectible(
|
||||||
|
token: Token[S],
|
||||||
|
givenType: Type
|
||||||
|
): OptionT[State[X, *], CollectibleType] =
|
||||||
|
typeTo[CollectibleType](
|
||||||
|
token,
|
||||||
|
givenType,
|
||||||
|
s"Value of type '$givenType' could not be put into a collection"
|
||||||
|
)
|
||||||
|
|
||||||
|
override def typeToStream(
|
||||||
|
token: Token[S],
|
||||||
|
givenType: Type
|
||||||
|
): OptionT[State[X, *], StreamType] =
|
||||||
|
typeTo[StreamType](
|
||||||
|
token,
|
||||||
|
givenType,
|
||||||
|
s"Expected stream value, got value of type '$givenType'"
|
||||||
|
)
|
||||||
|
|
||||||
|
override def typeToIterable(
|
||||||
|
token: Token[S],
|
||||||
|
givenType: Type
|
||||||
|
): OptionT[State[X, *], CollectionType] =
|
||||||
|
typeTo[CollectionType](
|
||||||
|
token,
|
||||||
|
givenType,
|
||||||
|
s"Value of type '$givenType' could not be iterated over"
|
||||||
|
)
|
||||||
|
|
||||||
override def ensureTypeOneOf[T <: Type](
|
override def ensureTypeOneOf[T <: Type](
|
||||||
token: Token[S],
|
token: Token[S],
|
||||||
expected: Set[T],
|
expected: Set[T],
|
||||||
|
@ -1,20 +1,19 @@
|
|||||||
package aqua.semantics.rules.types
|
package aqua.semantics.rules.types
|
||||||
|
|
||||||
import aqua.raw.value.{FunctorRaw, IntoIndexRaw, LiteralRaw, PropertyRaw, ValueRaw}
|
|
||||||
import aqua.parser.lexer.*
|
import aqua.parser.lexer.*
|
||||||
import aqua.types.*
|
|
||||||
import aqua.raw.RawContext
|
import aqua.raw.RawContext
|
||||||
|
import aqua.raw.value.{FunctorRaw, IntoIndexRaw, LiteralRaw, PropertyRaw, ValueRaw}
|
||||||
|
import aqua.types.*
|
||||||
|
|
||||||
import cats.data.Validated.{Invalid, Valid}
|
import cats.data.Validated.{Invalid, Valid}
|
||||||
import cats.data.{Chain, NonEmptyChain, ValidatedNec}
|
import cats.data.{Chain, NonEmptyChain, ValidatedNec}
|
||||||
import cats.kernel.Monoid
|
import cats.kernel.Monoid
|
||||||
import cats.syntax.option.*
|
|
||||||
import cats.syntax.traverse.*
|
|
||||||
import cats.syntax.validated.*
|
|
||||||
import cats.syntax.apply.*
|
import cats.syntax.apply.*
|
||||||
import cats.syntax.bifunctor.*
|
import cats.syntax.bifunctor.*
|
||||||
import cats.syntax.functor.*
|
import cats.syntax.functor.*
|
||||||
import cats.syntax.apply.*
|
import cats.syntax.option.*
|
||||||
|
import cats.syntax.traverse.*
|
||||||
|
import cats.syntax.validated.*
|
||||||
|
|
||||||
case class TypesState[S[_]](
|
case class TypesState[S[_]](
|
||||||
fields: Map[String, (Name[S], Type)] = Map(),
|
fields: Map[String, (Name[S], Type)] = Map(),
|
||||||
@ -37,97 +36,6 @@ case class TypesState[S[_]](
|
|||||||
definitions.get(name)
|
definitions.get(name)
|
||||||
}
|
}
|
||||||
|
|
||||||
object TypesStateHelper {
|
|
||||||
|
|
||||||
final case class TypeResolution[S[_], +T](
|
|
||||||
`type`: T,
|
|
||||||
definitions: List[(Token[S], NamedTypeToken[S])]
|
|
||||||
)
|
|
||||||
|
|
||||||
final case class TypeResolutionError[S[_]](
|
|
||||||
token: Token[S],
|
|
||||||
hint: String
|
|
||||||
)
|
|
||||||
|
|
||||||
def resolveTypeToken[S[_]](tt: TypeToken[S])(
|
|
||||||
state: TypesState[S]
|
|
||||||
): Option[TypeResolution[S, Type]] =
|
|
||||||
tt match {
|
|
||||||
case TopBottomToken(_, isTop) =>
|
|
||||||
val `type` = if (isTop) TopType else BottomType
|
|
||||||
|
|
||||||
TypeResolution(`type`, Nil).some
|
|
||||||
case ArrayTypeToken(_, dtt) =>
|
|
||||||
resolveTypeToken(dtt)(state).collect { case TypeResolution(it: DataType, t) =>
|
|
||||||
TypeResolution(ArrayType(it), t)
|
|
||||||
}
|
|
||||||
case StreamTypeToken(_, dtt) =>
|
|
||||||
resolveTypeToken(dtt)(state).collect { case TypeResolution(it: DataType, t) =>
|
|
||||||
TypeResolution(StreamType(it), t)
|
|
||||||
}
|
|
||||||
case OptionTypeToken(_, dtt) =>
|
|
||||||
resolveTypeToken(dtt)(state).collect { case TypeResolution(it: DataType, t) =>
|
|
||||||
TypeResolution(OptionType(it), t)
|
|
||||||
}
|
|
||||||
case ntt: NamedTypeToken[S] =>
|
|
||||||
val defs = state
|
|
||||||
.getTypeDefinition(ntt.value)
|
|
||||||
.toList
|
|
||||||
.map(ntt -> _)
|
|
||||||
|
|
||||||
state
|
|
||||||
.getType(ntt.value)
|
|
||||||
.map(typ => TypeResolution(typ, defs))
|
|
||||||
case btt: BasicTypeToken[S] =>
|
|
||||||
TypeResolution(btt.value, Nil).some
|
|
||||||
case att: ArrowTypeToken[S] =>
|
|
||||||
resolveArrowDef(att)(state).toOption
|
|
||||||
}
|
|
||||||
|
|
||||||
def resolveArrowDef[S[_]](arrowTypeToken: ArrowTypeToken[S])(
|
|
||||||
state: TypesState[S]
|
|
||||||
): ValidatedNec[TypeResolutionError[S], TypeResolution[S, ArrowType]] = {
|
|
||||||
val res = arrowTypeToken.res.traverse(typeToken =>
|
|
||||||
resolveTypeToken(typeToken)(state)
|
|
||||||
.toValidNec(
|
|
||||||
TypeResolutionError(
|
|
||||||
typeToken,
|
|
||||||
"Can not resolve the result type"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
val args = arrowTypeToken.args.traverse { case (argName, typeToken) =>
|
|
||||||
resolveTypeToken(typeToken)(state)
|
|
||||||
.toValidNec(
|
|
||||||
TypeResolutionError(
|
|
||||||
typeToken,
|
|
||||||
"Can not resolve the argument type"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.map(argName.map(_.value) -> _)
|
|
||||||
}
|
|
||||||
|
|
||||||
(args, res).mapN { (args, res) =>
|
|
||||||
val (argsLabeledTypes, argsTokens) =
|
|
||||||
args.map { case lbl -> TypeResolution(typ, tkn) =>
|
|
||||||
(lbl, typ) -> tkn
|
|
||||||
}.unzip.map(_.flatten)
|
|
||||||
val (resTypes, resTokens) =
|
|
||||||
res.map { case TypeResolution(typ, tkn) =>
|
|
||||||
typ -> tkn
|
|
||||||
}.unzip.map(_.flatten)
|
|
||||||
|
|
||||||
val typ = ArrowType(
|
|
||||||
ProductType.maybeLabelled(argsLabeledTypes),
|
|
||||||
ProductType(resTypes)
|
|
||||||
)
|
|
||||||
val defs = (argsTokens ++ resTokens)
|
|
||||||
|
|
||||||
TypeResolution(typ, defs)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
object TypesState {
|
object TypesState {
|
||||||
|
|
||||||
final case class TypeDefinition[S[_]](
|
final case class TypeDefinition[S[_]](
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
package aqua.semantics
|
package aqua.semantics
|
||||||
|
|
||||||
import aqua.parser.expr.func.ArrowExpr
|
import aqua.parser.expr.func.ArrowExpr
|
||||||
import aqua.parser.lexer.{BasicTypeToken, Name}
|
import aqua.parser.lexer.{Name, ScalarTypeToken}
|
||||||
import aqua.raw.Raw
|
import aqua.raw.Raw
|
||||||
import aqua.raw.arrow.ArrowRaw
|
import aqua.raw.arrow.ArrowRaw
|
||||||
import aqua.raw.ops.*
|
import aqua.raw.ops.*
|
||||||
@ -12,12 +12,12 @@ import aqua.types.*
|
|||||||
import aqua.types.ScalarType.*
|
import aqua.types.ScalarType.*
|
||||||
|
|
||||||
import cats.Id
|
import cats.Id
|
||||||
import cats.syntax.applicative.*
|
|
||||||
import cats.data.{NonEmptyList, NonEmptyMap, State}
|
import cats.data.{NonEmptyList, NonEmptyMap, State}
|
||||||
|
import cats.syntax.applicative.*
|
||||||
import org.scalatest.EitherValues
|
import org.scalatest.EitherValues
|
||||||
|
import org.scalatest.Inside
|
||||||
import org.scalatest.flatspec.AnyFlatSpec
|
import org.scalatest.flatspec.AnyFlatSpec
|
||||||
import org.scalatest.matchers.should.Matchers
|
import org.scalatest.matchers.should.Matchers
|
||||||
import org.scalatest.Inside
|
|
||||||
|
|
||||||
class ArrowSemSpec extends AnyFlatSpec with Matchers with EitherValues with Inside {
|
class ArrowSemSpec extends AnyFlatSpec with Matchers with EitherValues with Inside {
|
||||||
|
|
||||||
@ -84,7 +84,7 @@ class ArrowSemSpec extends AnyFlatSpec with Matchers with EitherValues with Insi
|
|||||||
val state = getState(seq)(program("(a: string, b: u32) -> u32"))
|
val state = getState(seq)(program("(a: string, b: u32) -> u32"))
|
||||||
|
|
||||||
state.errors.headOption.get shouldBe RulesViolated[Id](
|
state.errors.headOption.get shouldBe RulesViolated[Id](
|
||||||
BasicTypeToken[Id](u32),
|
ScalarTypeToken[Id](u32),
|
||||||
"Types mismatch, expected: u32, given: string" :: Nil
|
"Types mismatch, expected: u32, given: string" :: Nil
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -112,7 +112,7 @@ class ArrowSemSpec extends AnyFlatSpec with Matchers with EitherValues with Insi
|
|||||||
|
|
||||||
state.errors shouldBe empty
|
state.errors shouldBe empty
|
||||||
inside(raw) { case ArrowRaw(_, Nil, bodyRes) =>
|
inside(raw) { case ArrowRaw(_, Nil, bodyRes) =>
|
||||||
bodyRes shouldBe body
|
bodyRes.equalsOrShowDiff(body) shouldBe true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,26 +1,27 @@
|
|||||||
package aqua.semantics
|
package aqua.semantics
|
||||||
|
|
||||||
import aqua.raw.RawContext
|
|
||||||
import aqua.parser.Ast
|
import aqua.parser.Ast
|
||||||
import aqua.raw.ops.{Call, CallArrowRawTag, FuncOp, OnTag, ParTag, RawTag, SeqGroupTag, SeqTag}
|
|
||||||
import aqua.parser.Parser
|
import aqua.parser.Parser
|
||||||
import aqua.parser.lift.{LiftParser, Span}
|
import aqua.parser.lift.{LiftParser, Span}
|
||||||
|
import aqua.raw.ConstantRaw
|
||||||
|
import aqua.raw.RawContext
|
||||||
|
import aqua.raw.ops.*
|
||||||
|
import aqua.raw.ops.{Call, CallArrowRawTag, FuncOp, OnTag, ParTag, RawTag, SeqGroupTag, SeqTag}
|
||||||
import aqua.raw.value.*
|
import aqua.raw.value.*
|
||||||
import aqua.types.*
|
import aqua.types.*
|
||||||
import aqua.raw.ops.*
|
|
||||||
|
|
||||||
import org.scalatest.flatspec.AnyFlatSpec
|
import cats.Eval
|
||||||
import org.scalatest.matchers.should.Matchers
|
import cats.data.State
|
||||||
import org.scalatest.Inside
|
import cats.data.Validated
|
||||||
import cats.~>
|
|
||||||
import cats.data.{Chain, EitherNec, NonEmptyChain}
|
import cats.data.{Chain, EitherNec, NonEmptyChain}
|
||||||
|
import cats.free.Cofree
|
||||||
|
import cats.syntax.foldable.*
|
||||||
import cats.syntax.show.*
|
import cats.syntax.show.*
|
||||||
import cats.syntax.traverse.*
|
import cats.syntax.traverse.*
|
||||||
import cats.syntax.foldable.*
|
import cats.~>
|
||||||
import cats.data.Validated
|
import org.scalatest.Inside
|
||||||
import cats.free.Cofree
|
import org.scalatest.flatspec.AnyFlatSpec
|
||||||
import cats.data.State
|
import org.scalatest.matchers.should.Matchers
|
||||||
import cats.Eval
|
|
||||||
|
|
||||||
class SemanticsSpec extends AnyFlatSpec with Matchers with Inside {
|
class SemanticsSpec extends AnyFlatSpec with Matchers with Inside {
|
||||||
|
|
||||||
@ -40,7 +41,11 @@ class SemanticsSpec extends AnyFlatSpec with Matchers with Inside {
|
|||||||
Any
|
Any
|
||||||
]
|
]
|
||||||
): Unit = inside(parser(script)) { case Validated.Valid(ast) =>
|
): Unit = inside(parser(script)) { case Validated.Valid(ast) =>
|
||||||
val init = RawContext.blank
|
val init = RawContext.blank.copy(
|
||||||
|
parts = Chain
|
||||||
|
.fromSeq(ConstantRaw.defaultConstants())
|
||||||
|
.map(const => RawContext.blank -> const)
|
||||||
|
)
|
||||||
inside(semantics.process(ast, init).value.run)(test)
|
inside(semantics.process(ast, init).value.run)(test)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -839,4 +844,52 @@ class SemanticsSpec extends AnyFlatSpec with Matchers with Inside {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
it should "allow pushing `nil` to stream" in {
|
||||||
|
def test(quantifier: String) = {
|
||||||
|
val script = s"""
|
||||||
|
|func test() -> []${quantifier}string:
|
||||||
|
| stream: *${quantifier}string
|
||||||
|
| stream <<- nil
|
||||||
|
| <- stream
|
||||||
|
|""".stripMargin
|
||||||
|
|
||||||
|
insideBody(script) { body =>
|
||||||
|
matchSubtree(body) { case (PushToStreamTag(VarRaw(name, _), _), _) =>
|
||||||
|
name shouldEqual "nil"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
test("?")
|
||||||
|
test("[]")
|
||||||
|
}
|
||||||
|
|
||||||
|
it should "allow putting stream into collection" in {
|
||||||
|
def test(t: String, p: String) = {
|
||||||
|
val script = s"""
|
||||||
|
|service Srv("test-srv"):
|
||||||
|
| consume(value: ${t}[]string)
|
||||||
|
|
|
||||||
|
|func test():
|
||||||
|
| stream: *string
|
||||||
|
| Srv.consume(${p}[stream])
|
||||||
|
|""".stripMargin
|
||||||
|
|
||||||
|
insideBody(script) { body =>
|
||||||
|
println(body.show)
|
||||||
|
matchSubtree(body) { case (CallArrowRawTag(_, ca: CallArrowRaw), _) =>
|
||||||
|
inside(ca.arguments) { case (c: CollectionRaw) :: Nil =>
|
||||||
|
c.values.exists {
|
||||||
|
case VarRaw(name, _) => name == "stream"
|
||||||
|
case _ => false
|
||||||
|
} should be(true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
test("[]", "")
|
||||||
|
test("?", "?")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
179
semantics/src/test/scala/aqua/semantics/TypeResolutionSpec.scala
Normal file
179
semantics/src/test/scala/aqua/semantics/TypeResolutionSpec.scala
Normal file
@ -0,0 +1,179 @@
|
|||||||
|
package aqua.semantics
|
||||||
|
|
||||||
|
import aqua.parser.lexer.*
|
||||||
|
import aqua.semantics.rules.types.TypeResolution.TypeResolutionError
|
||||||
|
import aqua.semantics.rules.types.{TypeResolution, TypesState}
|
||||||
|
import aqua.types.*
|
||||||
|
|
||||||
|
import cats.Endo
|
||||||
|
import cats.Id
|
||||||
|
import cats.SemigroupK
|
||||||
|
import cats.data.NonEmptyMap
|
||||||
|
import cats.data.Validated.*
|
||||||
|
import cats.kernel.Semigroup
|
||||||
|
import cats.syntax.foldable.*
|
||||||
|
import cats.syntax.option.*
|
||||||
|
import cats.syntax.semigroup.*
|
||||||
|
import org.scalatest.Inside
|
||||||
|
import org.scalatest.flatspec.AnyFlatSpec
|
||||||
|
import org.scalatest.matchers.should.Matchers
|
||||||
|
|
||||||
|
class TypeResolutionSpec extends AnyFlatSpec with Matchers with Inside {
|
||||||
|
|
||||||
|
given [A, B]: Semigroup[(Endo[A], Endo[B])] with {
|
||||||
|
private val algebraA = SemigroupK[Endo].algebra[A]
|
||||||
|
private val algebraB = SemigroupK[Endo].algebra[B]
|
||||||
|
|
||||||
|
override def combine(x: (Endo[A], Endo[B]), y: (Endo[A], Endo[B])): (Endo[A], Endo[B]) =
|
||||||
|
(algebraA.combine(x._1, y._1), algebraB.combine(x._2, y._2))
|
||||||
|
}
|
||||||
|
|
||||||
|
def stt(st: ScalarType): ScalarTypeToken[Id] = ScalarTypeToken(st)
|
||||||
|
|
||||||
|
def ntt(name: String): NamedTypeToken[Id] = NamedTypeToken(name)
|
||||||
|
|
||||||
|
def resolve(
|
||||||
|
token: TypeToken[Id],
|
||||||
|
types: Map[String, Type]
|
||||||
|
): TypeResolution.Res[Id, Type] =
|
||||||
|
TypeResolution.resolveTypeToken(token)(TypesState(strict = types))
|
||||||
|
|
||||||
|
val validCollectionModifiers: LazyList[
|
||||||
|
List[(Endo[BasicTypeToken[Id]], DataType => Type)]
|
||||||
|
] = {
|
||||||
|
val baseModifiers: List[(Endo[BasicTypeToken[Id]], Endo[DataType])] = List(
|
||||||
|
(ArrayTypeToken[Id]((), _)) -> (ArrayType.apply),
|
||||||
|
(OptionTypeToken[Id]((), _)) -> (OptionType.apply)
|
||||||
|
)
|
||||||
|
|
||||||
|
val streamModifier = (dt: BasicTypeToken[Id]) => StreamTypeToken[Id]((), dt)
|
||||||
|
|
||||||
|
val dataModifiers = LazyList.unfold(baseModifiers) { mods =>
|
||||||
|
(
|
||||||
|
mods,
|
||||||
|
for {
|
||||||
|
m <- mods
|
||||||
|
b <- baseModifiers
|
||||||
|
} yield m combine b
|
||||||
|
).some
|
||||||
|
}
|
||||||
|
|
||||||
|
dataModifiers.map { mods =>
|
||||||
|
mods.map { case (token, typ) =>
|
||||||
|
(token andThen streamModifier) -> (typ andThen StreamType.apply)
|
||||||
|
} ++ mods
|
||||||
|
}.prepended(List((streamModifier, StreamType.apply))).take(6)
|
||||||
|
}
|
||||||
|
|
||||||
|
val structType = StructType("Struct", NonEmptyMap.of("field" -> ScalarType.i8))
|
||||||
|
|
||||||
|
"TypeResolution resolveTypeToken" should "resolve basic types" in {
|
||||||
|
val baseTypes = List(
|
||||||
|
stt(ScalarType.u32) -> ScalarType.u32,
|
||||||
|
stt(ScalarType.string) -> ScalarType.string,
|
||||||
|
ntt("Struct") -> structType
|
||||||
|
)
|
||||||
|
|
||||||
|
for {
|
||||||
|
base <- baseTypes
|
||||||
|
(token, expected) = base
|
||||||
|
} inside(resolve(token, Map("Struct" -> structType))) {
|
||||||
|
case Valid(TypeResolution(result, Nil)) =>
|
||||||
|
result shouldEqual expected
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
it should "resolve nested types" in {
|
||||||
|
val baseTypes = List(
|
||||||
|
stt(ScalarType.u32) -> ScalarType.u32,
|
||||||
|
stt(ScalarType.string) -> ScalarType.string,
|
||||||
|
ntt("Struct") -> structType
|
||||||
|
)
|
||||||
|
|
||||||
|
validCollectionModifiers
|
||||||
|
.take(6)
|
||||||
|
.toList
|
||||||
|
.flatten
|
||||||
|
.foreach(modifier =>
|
||||||
|
for {
|
||||||
|
base <- baseTypes
|
||||||
|
(btoken, btype) = base
|
||||||
|
(mod, typ) = modifier
|
||||||
|
} inside(resolve(mod(btoken), Map("Struct" -> structType))) {
|
||||||
|
case Valid(TypeResolution(result, Nil)) =>
|
||||||
|
result shouldEqual typ(btype)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
it should "forbid services and abilities in collections" in {
|
||||||
|
val arrow = NonEmptyMap.of("arrow" -> ArrowType(ProductType(Nil), ProductType(Nil)))
|
||||||
|
|
||||||
|
val serviceType = ServiceType("Srv", arrow)
|
||||||
|
val abilityType = AbilityType("Abl", arrow)
|
||||||
|
|
||||||
|
val types = List(
|
||||||
|
ntt(serviceType.name) -> serviceType,
|
||||||
|
ntt(abilityType.name) -> abilityType
|
||||||
|
)
|
||||||
|
|
||||||
|
validCollectionModifiers
|
||||||
|
.take(6)
|
||||||
|
.toList
|
||||||
|
.flatten
|
||||||
|
.foreach(modifier =>
|
||||||
|
for {
|
||||||
|
base <- types
|
||||||
|
(btoken, btype) = base
|
||||||
|
(mod, _) = modifier
|
||||||
|
} inside(
|
||||||
|
resolve(
|
||||||
|
mod(btoken),
|
||||||
|
Map(
|
||||||
|
serviceType.name -> serviceType,
|
||||||
|
abilityType.name -> abilityType
|
||||||
|
)
|
||||||
|
)
|
||||||
|
) { case Invalid(errors) =>
|
||||||
|
errors.exists(_.hint.contains("contain")) shouldBe true
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
it should "forbid streams inside any collection" in {
|
||||||
|
val baseTypes = List(
|
||||||
|
stt(ScalarType.u32),
|
||||||
|
stt(ScalarType.string),
|
||||||
|
ntt("Struct")
|
||||||
|
)
|
||||||
|
|
||||||
|
val modifiers = validCollectionModifiers
|
||||||
|
.map(_.map { case (token, _) => token })
|
||||||
|
.take(3)
|
||||||
|
.toList
|
||||||
|
.flatten
|
||||||
|
|
||||||
|
for {
|
||||||
|
left <- modifiers
|
||||||
|
right <- identity[BasicTypeToken[Id]] +: modifiers
|
||||||
|
base <- baseTypes
|
||||||
|
t = left(StreamTypeToken[Id]((), right(base)))
|
||||||
|
} inside(
|
||||||
|
resolve(t, Map(structType.name -> structType))
|
||||||
|
) { case Invalid(errors) =>
|
||||||
|
errors.exists(_.hint.contains("of type *")) shouldBe true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
it should "forbid stream of streams through alias" in {
|
||||||
|
val streamType = StreamType(ScalarType.u32)
|
||||||
|
|
||||||
|
val t = StreamTypeToken[Id]((), ntt("Als"))
|
||||||
|
|
||||||
|
inside(
|
||||||
|
resolve(t, Map("Als" -> streamType))
|
||||||
|
) { case Invalid(errors) =>
|
||||||
|
errors.exists(_.hint.contains("of type *")) shouldBe true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -12,13 +12,13 @@ import aqua.semantics.rules.names.{NamesAlgebra, NamesInterpreter, NamesState}
|
|||||||
import aqua.semantics.rules.report.{ReportAlgebra, ReportInterpreter}
|
import aqua.semantics.rules.report.{ReportAlgebra, ReportInterpreter}
|
||||||
import aqua.semantics.rules.types.{TypesAlgebra, TypesInterpreter}
|
import aqua.semantics.rules.types.{TypesAlgebra, TypesInterpreter}
|
||||||
import aqua.types.*
|
import aqua.types.*
|
||||||
|
|
||||||
import cats.Id
|
import cats.Id
|
||||||
import cats.data.{NonEmptyList, NonEmptyMap, State}
|
import cats.data.{NonEmptyList, NonEmptyMap, State}
|
||||||
import monocle.syntax.all.*
|
import monocle.syntax.all.*
|
||||||
import org.scalatest.Inside
|
import org.scalatest.Inside
|
||||||
import org.scalatest.flatspec.AnyFlatSpec
|
import org.scalatest.flatspec.AnyFlatSpec
|
||||||
import org.scalatest.matchers.should.Matchers
|
import org.scalatest.matchers.should.Matchers
|
||||||
|
|
||||||
import scala.collection.immutable.SortedMap
|
import scala.collection.immutable.SortedMap
|
||||||
|
|
||||||
class ValuesAlgebraSpec extends AnyFlatSpec with Matchers with Inside {
|
class ValuesAlgebraSpec extends AnyFlatSpec with Matchers with Inside {
|
||||||
@ -137,7 +137,7 @@ class ValuesAlgebraSpec extends AnyFlatSpec with Matchers with Inside {
|
|||||||
val vl = variable("left")
|
val vl = variable("left")
|
||||||
val vr = variable("right")
|
val vr = variable("right")
|
||||||
|
|
||||||
val ut = lt.uniteTop(rt)
|
val ut = lt `∪` rt
|
||||||
|
|
||||||
val state = genState(
|
val state = genState(
|
||||||
vars = Map(
|
vars = Map(
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
package aqua.types
|
package aqua.types
|
||||||
|
|
||||||
|
import aqua.errors.Errors.internalError
|
||||||
|
|
||||||
import cats.Monoid
|
import cats.Monoid
|
||||||
import cats.data.NonEmptyMap
|
import cats.data.NonEmptyMap
|
||||||
|
|
||||||
@ -19,6 +21,12 @@ case class IntersectTypes(scalarsCombine: ScalarsCombine.T) extends Monoid[Type]
|
|||||||
ap.toList.zip(bp.toList).map(combine)
|
ap.toList.zip(bp.toList).map(combine)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
private def combineDataTypes(a: DataType, b: DataType): DataType =
|
||||||
|
(a `∩` b) match {
|
||||||
|
case d: DataType => d
|
||||||
|
case t => internalError(s"$a ∩ $b yields non-data type $t")
|
||||||
|
}
|
||||||
|
|
||||||
override def combine(a: Type, b: Type): Type =
|
override def combine(a: Type, b: Type): Type =
|
||||||
(a, b) match {
|
(a, b) match {
|
||||||
case _ if CompareTypes(a, b) == 0.0 => a
|
case _ if CompareTypes(a, b) == 0.0 => a
|
||||||
@ -39,18 +47,20 @@ case class IntersectTypes(scalarsCombine: ScalarsCombine.T) extends Monoid[Type]
|
|||||||
combineProducts(aa.codomain, bb.codomain)
|
combineProducts(aa.codomain, bb.codomain)
|
||||||
)
|
)
|
||||||
|
|
||||||
case (ac: OptionType, bc: BoxType) =>
|
case (ac: OptionType, bc: CollectionType) =>
|
||||||
OptionType(ac.element `∩` bc.element)
|
OptionType(combineDataTypes(ac.element, bc.element))
|
||||||
|
case (ac: CollectionType, bc: OptionType) =>
|
||||||
|
OptionType(combineDataTypes(ac.element, bc.element))
|
||||||
|
|
||||||
case (ac: BoxType, bc: OptionType) =>
|
case (ac: ArrayType, bc: CollectionType) =>
|
||||||
OptionType(ac.element `∩` bc.element)
|
ArrayType(combineDataTypes(ac.element, bc.element))
|
||||||
|
case (ac: CollectionType, bc: ArrayType) =>
|
||||||
|
ArrayType(combineDataTypes(ac.element, bc.element))
|
||||||
|
|
||||||
case (ac: ArrayType, bc: BoxType) =>
|
|
||||||
ArrayType(ac.element `∩` bc.element)
|
|
||||||
case (ac: BoxType, bc: ArrayType) =>
|
|
||||||
ArrayType(ac.element `∩` bc.element)
|
|
||||||
case (ac: StreamType, bc: StreamType) =>
|
case (ac: StreamType, bc: StreamType) =>
|
||||||
StreamType(ac.element `∩` bc.element)
|
StreamType(combineDataTypes(ac.element, bc.element))
|
||||||
|
case (ac: StreamMapType, bc: StreamMapType) =>
|
||||||
|
StreamMapType(combineDataTypes(ac.element, bc.element))
|
||||||
|
|
||||||
case (a: ScalarType, b: ScalarType) =>
|
case (a: ScalarType, b: ScalarType) =>
|
||||||
scalarsCombine(a, b)
|
scalarsCombine(a, b)
|
||||||
|
@ -1,33 +1,39 @@
|
|||||||
package aqua.types
|
package aqua.types
|
||||||
|
|
||||||
import cats.PartialOrder
|
import aqua.errors.Errors.internalError
|
||||||
import cats.data.NonEmptyMap
|
import aqua.types.Type.*
|
||||||
|
|
||||||
import cats.Eval
|
import cats.Eval
|
||||||
import cats.syntax.traverse.*
|
import cats.PartialOrder
|
||||||
|
import cats.data.NonEmptyList
|
||||||
|
import cats.data.NonEmptyMap
|
||||||
import cats.syntax.applicative.*
|
import cats.syntax.applicative.*
|
||||||
import cats.syntax.option.*
|
import cats.syntax.option.*
|
||||||
|
import cats.syntax.partialOrder.*
|
||||||
|
import cats.syntax.traverse.*
|
||||||
|
|
||||||
sealed trait Type {
|
sealed trait Type {
|
||||||
|
|
||||||
def acceptsValueOf(incoming: Type): Boolean = {
|
def acceptsValueOf(incoming: Type): Boolean =
|
||||||
import Type.typesPartialOrder
|
|
||||||
import cats.syntax.partialOrder._
|
|
||||||
this >= incoming
|
this >= incoming
|
||||||
}
|
|
||||||
|
|
||||||
def isInhabited: Boolean = true
|
def isInhabited: Boolean = true
|
||||||
|
|
||||||
infix def `∩`(other: Type): Type = intersectBottom(other)
|
infix def `∩`[T <: Type](other: T): Type = intersectBottom(other)
|
||||||
|
|
||||||
def intersectTop(other: Type): Type = IntersectTypes.top.combine(this, other)
|
private final def intersectTop(other: Type): Type =
|
||||||
|
IntersectTypes.top.combine(this, other)
|
||||||
|
|
||||||
def intersectBottom(other: Type): Type = IntersectTypes.bottom.combine(this, other)
|
private final def intersectBottom(other: Type): Type =
|
||||||
|
IntersectTypes.bottom.combine(this, other)
|
||||||
|
|
||||||
infix def `∪`(other: Type): Type = uniteTop(other)
|
infix def `∪`[T <: Type](other: T): Type = uniteTop(other)
|
||||||
|
|
||||||
def uniteTop(other: Type): Type = UniteTypes.top.combine(this, other)
|
private final def uniteTop(other: Type): Type =
|
||||||
|
UniteTypes.top.combine(this, other)
|
||||||
|
|
||||||
def uniteBottom(other: Type): Type = UniteTypes.bottom.combine(this, other)
|
private final def uniteBottom(other: Type): Type =
|
||||||
|
UniteTypes.bottom.combine(this, other)
|
||||||
|
|
||||||
def properties: Map[String, Type] = Map.empty
|
def properties: Map[String, Type] = Map.empty
|
||||||
|
|
||||||
@ -73,8 +79,7 @@ sealed trait ProductType extends Type {
|
|||||||
*/
|
*/
|
||||||
def toLabelledList(prefix: String = "arg", index: Int = 0): List[(String, Type)] = this match {
|
def toLabelledList(prefix: String = "arg", index: Int = 0): List[(String, Type)] = this match {
|
||||||
case LabeledConsType(label, t, pt) => (label -> t) :: pt.toLabelledList(prefix, index + 1)
|
case LabeledConsType(label, t, pt) => (label -> t) :: pt.toLabelledList(prefix, index + 1)
|
||||||
case UnlabeledConsType(t, pt) =>
|
case UnlabeledConsType(t, pt) => (s"$prefix$index" -> t) :: pt.toLabelledList(prefix, index + 1)
|
||||||
(s"$prefix$index" -> t) :: pt.toLabelledList(prefix, index + 1)
|
|
||||||
case _ => Nil
|
case _ => Nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -85,6 +90,15 @@ sealed trait ProductType extends Type {
|
|||||||
pt.labelledData
|
pt.labelledData
|
||||||
case _ => Nil
|
case _ => Nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
lazy val labelledStreams: List[(String, StreamType)] = this match {
|
||||||
|
case LabeledConsType(label, t: StreamType, pt) =>
|
||||||
|
(label -> t) :: pt.labelledStreams
|
||||||
|
case ConsType(_, pt) =>
|
||||||
|
pt.labelledStreams
|
||||||
|
case _ => Nil
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
object ProductType {
|
object ProductType {
|
||||||
@ -247,42 +261,73 @@ object LiteralType {
|
|||||||
def forInt(n: Long): LiteralType = if (n < 0) signed else unsigned
|
def forInt(n: Long): LiteralType = if (n < 0) signed else unsigned
|
||||||
}
|
}
|
||||||
|
|
||||||
sealed trait BoxType extends DataType {
|
sealed trait CollectionType extends Type {
|
||||||
def isStream: Boolean
|
def isStream: Boolean
|
||||||
|
|
||||||
def element: Type
|
def element: DataType
|
||||||
|
|
||||||
def withElement(t: Type): BoxType
|
def withElement(t: DataType): CollectionType
|
||||||
|
|
||||||
override def properties: Map[String, Type] =
|
override def properties: Map[String, Type] =
|
||||||
Map("length" -> ScalarType.u32)
|
Map("length" -> ScalarType.u32)
|
||||||
}
|
}
|
||||||
|
|
||||||
case class CanonStreamType(element: Type) extends BoxType {
|
object CollectionType {
|
||||||
|
|
||||||
override def isStream: Boolean = false
|
def elementTypeOf(types: List[CollectibleType]): DataType =
|
||||||
|
NonEmptyList
|
||||||
|
.fromList(types)
|
||||||
|
.fold(BottomType)(
|
||||||
|
_.map {
|
||||||
|
case StreamType(el) => ArrayType(el)
|
||||||
|
case dt: DataType => dt
|
||||||
|
}.reduce[Type](_ `∩` _) match {
|
||||||
|
// In case we mix values of uncomparable types, intersection returns bottom, meaning "uninhabited type".
|
||||||
|
// But we want to get to TopType instead: this would mean that intersection is empty, and you cannot
|
||||||
|
// make any decision about the structure of type, but can push anything inside
|
||||||
|
case BottomType => TopType
|
||||||
|
case dt: DataType => dt
|
||||||
|
case t =>
|
||||||
|
internalError(
|
||||||
|
s"Expected data type from " +
|
||||||
|
s"intersection of ${types.mkString(", ")}; " +
|
||||||
|
s"got $t"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
case class CanonStreamType(
|
||||||
|
override val element: DataType
|
||||||
|
) extends DataType with CollectionType {
|
||||||
|
|
||||||
|
override val isStream: Boolean = false
|
||||||
|
|
||||||
override def toString: String = "#" + element
|
override def toString: String = "#" + element
|
||||||
|
|
||||||
override def withElement(t: Type): BoxType = copy(element = t)
|
override def withElement(t: DataType): CollectionType = copy(element = t)
|
||||||
}
|
}
|
||||||
|
|
||||||
case class ArrayType(element: Type) extends BoxType {
|
case class ArrayType(
|
||||||
|
override val element: DataType
|
||||||
|
) extends DataType with CollectionType {
|
||||||
|
|
||||||
override def isStream: Boolean = false
|
override val isStream: Boolean = false
|
||||||
|
|
||||||
override def toString: String = "[]" + element
|
override def toString: String = "[]" + element
|
||||||
|
|
||||||
override def withElement(t: Type): BoxType = copy(element = t)
|
override def withElement(t: DataType): CollectionType = copy(element = t)
|
||||||
}
|
}
|
||||||
|
|
||||||
case class OptionType(element: Type) extends BoxType {
|
case class OptionType(
|
||||||
|
override val element: DataType
|
||||||
|
) extends DataType with CollectionType {
|
||||||
|
|
||||||
override def isStream: Boolean = false
|
override val isStream: Boolean = false
|
||||||
|
|
||||||
override def toString: String = "?" + element
|
override def toString: String = "?" + element
|
||||||
|
|
||||||
override def withElement(t: Type): BoxType = copy(element = t)
|
override def withElement(t: DataType): CollectionType = copy(element = t)
|
||||||
}
|
}
|
||||||
|
|
||||||
sealed trait NamedType extends Type {
|
sealed trait NamedType extends Type {
|
||||||
@ -374,7 +419,13 @@ case class StructType(name: String, fields: NonEmptyMap[String, Type])
|
|||||||
s"$fullName{${fields.map(_.toString).toNel.toList.map(kv => kv._1 + ": " + kv._2).mkString(", ")}}"
|
s"$fullName{${fields.map(_.toString).toNel.toList.map(kv => kv._1 + ": " + kv._2).mkString(", ")}}"
|
||||||
}
|
}
|
||||||
|
|
||||||
case class StreamMapType(element: Type) extends DataType {
|
sealed trait MutableStreamType extends Type with CollectionType
|
||||||
|
|
||||||
|
case class StreamMapType(override val element: DataType) extends MutableStreamType {
|
||||||
|
|
||||||
|
override val isStream: Boolean = true
|
||||||
|
|
||||||
|
override def withElement(t: DataType): CollectionType = copy(element = t)
|
||||||
|
|
||||||
override def toString: String = s"%$element"
|
override def toString: String = s"%$element"
|
||||||
}
|
}
|
||||||
@ -383,6 +434,15 @@ object StreamMapType {
|
|||||||
def top(): StreamMapType = StreamMapType(TopType)
|
def top(): StreamMapType = StreamMapType(TopType)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
case class StreamType(override val element: DataType) extends MutableStreamType {
|
||||||
|
|
||||||
|
override val isStream: Boolean = true
|
||||||
|
|
||||||
|
override def toString: String = s"*$element"
|
||||||
|
|
||||||
|
override def withElement(t: DataType): CollectionType = copy(element = t)
|
||||||
|
}
|
||||||
|
|
||||||
case class ServiceType(name: String, fields: NonEmptyMap[String, ArrowType]) extends NamedType {
|
case class ServiceType(name: String, fields: NonEmptyMap[String, ArrowType]) extends NamedType {
|
||||||
|
|
||||||
override val specifier: String = "service"
|
override val specifier: String = "service"
|
||||||
@ -424,17 +484,13 @@ case class ArrowType(domain: ProductType, codomain: ProductType) extends Type {
|
|||||||
s"$domain -> $codomain"
|
s"$domain -> $codomain"
|
||||||
}
|
}
|
||||||
|
|
||||||
case class StreamType(element: Type) extends BoxType {
|
|
||||||
|
|
||||||
override def isStream: Boolean = true
|
|
||||||
|
|
||||||
override def toString: String = s"*$element"
|
|
||||||
|
|
||||||
override def withElement(t: Type): BoxType = copy(element = t)
|
|
||||||
}
|
|
||||||
|
|
||||||
object Type {
|
object Type {
|
||||||
|
|
||||||
implicit lazy val typesPartialOrder: PartialOrder[Type] =
|
/**
|
||||||
|
* `StreamType` is collectible with canonicalization
|
||||||
|
*/
|
||||||
|
type CollectibleType = DataType | StreamType
|
||||||
|
|
||||||
|
given PartialOrder[Type] =
|
||||||
CompareTypes.partialOrder
|
CompareTypes.partialOrder
|
||||||
}
|
}
|
||||||
|
@ -1,8 +1,9 @@
|
|||||||
package aqua.types
|
package aqua.types
|
||||||
|
|
||||||
|
import aqua.errors.Errors.internalError
|
||||||
|
|
||||||
import cats.Monoid
|
import cats.Monoid
|
||||||
import cats.data.NonEmptyMap
|
import cats.data.NonEmptyMap
|
||||||
|
|
||||||
import scala.annotation.tailrec
|
import scala.annotation.tailrec
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -28,6 +29,12 @@ case class UniteTypes(scalarsCombine: ScalarsCombine.T) extends Monoid[Type]:
|
|||||||
step(a.toList, b.toList, Nil)
|
step(a.toList, b.toList, Nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def combineDataTypes(a: DataType, b: DataType): DataType =
|
||||||
|
(a `∪` b) match {
|
||||||
|
case d: DataType => d
|
||||||
|
case t => internalError(s"$a ∪ $b yields non-data type $t")
|
||||||
|
}
|
||||||
|
|
||||||
override def combine(a: Type, b: Type): Type =
|
override def combine(a: Type, b: Type): Type =
|
||||||
(a, b) match {
|
(a, b) match {
|
||||||
case (ap: ProductType, bp: ProductType) =>
|
case (ap: ProductType, bp: ProductType) =>
|
||||||
@ -52,18 +59,19 @@ case class UniteTypes(scalarsCombine: ScalarsCombine.T) extends Monoid[Type]:
|
|||||||
)
|
)
|
||||||
|
|
||||||
case (ac: OptionType, bc: ArrayType) =>
|
case (ac: OptionType, bc: ArrayType) =>
|
||||||
ArrayType(ac.element `∪` bc.element)
|
ArrayType(combineDataTypes(ac.element, bc.element))
|
||||||
|
|
||||||
case (ac: ArrayType, bc: OptionType) =>
|
case (ac: ArrayType, bc: OptionType) =>
|
||||||
ArrayType(ac.element `∪` bc.element)
|
ArrayType(combineDataTypes(ac.element, bc.element))
|
||||||
|
|
||||||
case (ac: ArrayType, bc: ArrayType) =>
|
case (ac: ArrayType, bc: ArrayType) =>
|
||||||
ArrayType(ac.element `∪` bc.element)
|
ArrayType(combineDataTypes(ac.element, bc.element))
|
||||||
case (ac: OptionType, bc: OptionType) =>
|
case (ac: OptionType, bc: OptionType) =>
|
||||||
OptionType(ac.element `∪` bc.element)
|
OptionType(combineDataTypes(ac.element, bc.element))
|
||||||
|
|
||||||
case (ac: StreamType, bc: StreamType) =>
|
case (ac: StreamType, bc: StreamType) =>
|
||||||
StreamType(ac.element `∩` bc.element)
|
StreamType(combineDataTypes(ac.element, bc.element))
|
||||||
|
case (ac: StreamMapType, bc: StreamMapType) =>
|
||||||
|
StreamMapType(combineDataTypes(ac.element, bc.element))
|
||||||
|
|
||||||
case (a: ScalarType, b: ScalarType) =>
|
case (a: ScalarType, b: ScalarType) =>
|
||||||
scalarsCombine(a, b)
|
scalarsCombine(a, b)
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
package aqua.types
|
package aqua.types
|
||||||
|
|
||||||
import aqua.types.Type.typesPartialOrder
|
|
||||||
import cats.data.NonEmptyMap
|
import cats.data.NonEmptyMap
|
||||||
import cats.kernel.PartialOrder
|
import cats.kernel.PartialOrder
|
||||||
import cats.syntax.partialOrder._
|
import cats.syntax.partialOrder._
|
||||||
@ -15,7 +14,7 @@ class TypeSpec extends AnyFlatSpec with Matchers {
|
|||||||
|
|
||||||
def `?`(t: DataType): DataType = OptionType(t)
|
def `?`(t: DataType): DataType = OptionType(t)
|
||||||
|
|
||||||
def `*`(t: DataType): DataType = StreamType(t)
|
def `*`(t: DataType): StreamType = StreamType(t)
|
||||||
|
|
||||||
def accepts(recv: Type, incoming: Type) =
|
def accepts(recv: Type, incoming: Type) =
|
||||||
recv >= incoming
|
recv >= incoming
|
||||||
@ -76,7 +75,8 @@ class TypeSpec extends AnyFlatSpec with Matchers {
|
|||||||
|
|
||||||
"structs of scalars with literals" should "be variant" in {
|
"structs of scalars with literals" should "be variant" in {
|
||||||
val one: Type = StructType("one", NonEmptyMap.of("field" -> u64))
|
val one: Type = StructType("one", NonEmptyMap.of("field" -> u64))
|
||||||
val two: Type = StructType("two", NonEmptyMap.of("field" -> LiteralType.number, "other" -> string))
|
val two: Type =
|
||||||
|
StructType("two", NonEmptyMap.of("field" -> LiteralType.number, "other" -> string))
|
||||||
|
|
||||||
accepts(one, two) should be(true)
|
accepts(one, two) should be(true)
|
||||||
accepts(two, one) should be(false)
|
accepts(two, one) should be(false)
|
||||||
|
@ -1,8 +1,10 @@
|
|||||||
package aqua.helpers.syntax
|
package aqua.helpers.syntax
|
||||||
|
|
||||||
import cats.{Functor, Monad}
|
|
||||||
import cats.data.OptionT
|
import cats.data.OptionT
|
||||||
|
import cats.syntax.apply.*
|
||||||
|
import cats.syntax.flatMap.*
|
||||||
import cats.syntax.functor.*
|
import cats.syntax.functor.*
|
||||||
|
import cats.{Functor, Monad}
|
||||||
|
|
||||||
object optiont {
|
object optiont {
|
||||||
|
|
||||||
@ -28,4 +30,20 @@ object optiont {
|
|||||||
)(using F: Monad[F]): OptionT[F, B] =
|
)(using F: Monad[F]): OptionT[F, B] =
|
||||||
o.flatTransform(f.andThen(_.value))
|
o.flatTransform(f.andThen(_.value))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
extension [F[_]: Monad, A, B](
|
||||||
|
t: Tuple2[OptionT[F, A], OptionT[F, B]]
|
||||||
|
) {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Merges `OptionT`s into `OptionT` of a tuple,
|
||||||
|
* **executing both effects**.
|
||||||
|
*/
|
||||||
|
def merged: OptionT[F, (A, B)] = OptionT(
|
||||||
|
for {
|
||||||
|
a <- t._1.value
|
||||||
|
b <- t._2.value
|
||||||
|
} yield (a, b).tupled
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user