fix(compiler): Use correct empty stream as argument [LNG-280, LNG-292] (#967)

This commit is contained in:
Dima 2023-11-28 17:45:12 +07:00 committed by GitHub
parent 6329e42f56
commit 4cecab1a26
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
30 changed files with 618 additions and 188 deletions

View File

@ -9,7 +9,7 @@ import cats.data.Chain
import cats.data.Validated.{Invalid, Valid}
import cats.effect.{IO, IOApp}
import fs2.io.file.{Files, Path}
import fs2.{text, Stream}
import fs2.{Stream, text}
object Test extends IOApp.Simple {

View File

@ -1,2 +1,20 @@
func arr(strs: []string) -> []string
<- strs
aqua StreamArgs
export lng280BugWithForEmptyStreamFunc
service StreamService("test-service"):
store(numbers: []u32, n: u32)
func callService(stream: *u32, n: u32):
stream <<- 1
StreamService.store(stream, n)
func returnEmptyStream() -> *u32:
<- *[]
func lng280BugWithForEmptyStreamFunc():
arr = [1,2,3,4,5]
for a <- arr:
str <- returnEmptyStream()
-- passing the function directly won't work, see LNG-290
callService(str, a)

View File

@ -1,10 +0,0 @@
service TestService("test-service"):
get_records(key: string) -> []string
func append_records(peer: string, srum: *[]string):
srum <- TestService.get_records(peer)
func retrieve_records(peer: string) -> [][]string:
records: *[]string
append_records(peer, records)
<- records

View File

@ -38,9 +38,7 @@ func returnEmpty() -> *string:
<- relayNil
func returnEmptyLiteral() -> *string:
empty: *string
-- TODO: return *[] here after LNG-280
<- empty
<- *[]
func returnNilLength() -> u32:
arr = nil

View File

@ -1,6 +1,8 @@
aqua StreamArgs
export retrieve_records, modify_stream, TestService
export retrieve_records, modify_stream, TestService, StreamService
export lng280Bug, lng280BugWithFor, lng280BugWithForAnonStream
export returnDerivedStream, lng280BugWithForEmptyStreamFunc
service TestService("test-service"):
get_records(key: string) -> []string
@ -15,4 +17,60 @@ func retrieve_records(peer: string) -> [][]string:
func modify_stream(stream: *string) -> []string:
stream <<- "appended value"
<- stream
<- stream
func useStream(stream: *string) -> *string:
stream <<- "valueUseStream"
<- stream
func returnStream() -> *string:
r <- useStream(*[])
r <<- "valueReturnStream"
<- r
func lng280Bug() -> []string:
stream <- returnStream()
stream <<- "valueTop"
<- stream
func lng280BugWithFor() -> []string:
results: *string
arr = [1,2,3]
for a <- arr:
stream <- returnStream()
for s <- stream:
results <<- s
<- results
service StreamService("test-service"):
store(numbers: []u32, n: u32)
func callService(stream: *u32, n: u32):
stream <<- 1
StreamService.store(stream, n)
func lng280BugWithForAnonStream():
arr = [1,2,3,4,5]
for a <- arr:
callService(*[], a)
func returnEmptyStream() -> *u32:
<- *[]
func lng280BugWithForEmptyStreamFunc():
arr = [1,2,3,4,5]
for a <- arr:
str <- returnEmptyStream()
-- passing the function directly won't work, see LNG-290
callService(str, a)
func getStream(str: *u32) -> *u32:
nums = str
nums <<- 1
<- nums
func returnDerivedStream() -> *u32:
nums <- getStream(*[])
<- nums

View File

@ -84,8 +84,13 @@ import { tryOtherwiseCall } from "../examples/tryOtherwiseCall.js";
import { coCall } from "../examples/coCall.js";
import { bugLNG60Call, passArgsCall } from "../examples/passArgsCall.js";
import {
lng280BugCall,
lng280BugWithForAnonStreamCall,
lng280BugWithForCall,
streamArgsCall,
modifyStreamCall,
returnDerivedStreamCall,
lng280BugWithForEmptyStreamFuncCall
} from "../examples/streamArgsCall.js";
import { streamResultsCall } from "../examples/streamResultsCall.js";
import { structuralTypingCall } from "../examples/structuralTypingCall.js";
@ -617,6 +622,38 @@ describe("Testing examples", () => {
]);
});
it.skip("streamArgs.aqua LNG-280", async () => {
let result = await lng280BugCall();
expect(result).toEqual(["valueUseStream", "valueReturnStream", "valueTop"]);
});
it.skip("streamArgs.aqua LNG-280 with for", async () => {
let result = await lng280BugWithForCall();
expect(result).toEqual([
"valueUseStream",
"valueReturnStream",
"valueUseStream",
"valueReturnStream",
"valueUseStream",
"valueReturnStream"
]);
});
it("streamArgs.aqua LNG-280 with for and anonymous stream", async () => {
let result = await lng280BugWithForAnonStreamCall();
expect(result).toEqual([[1, 1], [1, 2], [1, 3], [1, 4], [1, 5]]);
});
it("streamArgs.aqua LNG-280 with for and anonymous stream from function", async () => {
let result = await lng280BugWithForEmptyStreamFuncCall();
expect(result).toEqual([[1, 1], [1, 2], [1, 3], [1, 4], [1, 5]]);
});
it.skip("streamArgs.aqua return derived stream", async () => {
let result = await returnDerivedStreamCall();
expect(result).toEqual([1]);
});
it("streamResults.aqua", async () => {
let streamResultsResult = await streamResultsCall();
expect(streamResultsResult).toEqual(["new_name", "new_name", "new_name"]);

View File

@ -2,6 +2,12 @@ import {
retrieve_records,
modify_stream,
registerTestService,
registerStreamService,
lng280Bug,
lng280BugWithFor,
lng280BugWithForAnonStream,
returnDerivedStream,
lng280BugWithForEmptyStreamFunc
} from "../compiled/examples/streamArgs.js";
export async function streamArgsCall() {
@ -17,3 +23,41 @@ export async function streamArgsCall() {
export async function modifyStreamCall(arg: string[]) {
return await modify_stream(arg);
}
export async function lng280BugCall(): Promise<string[]> {
return lng280Bug()
}
export async function lng280BugWithForCall(): Promise<string[]> {
return lng280BugWithFor()
}
export async function lng280BugWithForAnonStreamCall(): Promise<number[][]> {
let storage: number[][] = []
registerStreamService({
store: (numbers, n) => {
numbers.push(n)
storage.push(numbers)
},
});
await lng280BugWithForAnonStream()
return storage
}
export async function lng280BugWithForEmptyStreamFuncCall(): Promise<number[][]> {
let storage: number[][] = []
registerStreamService({
store: (numbers, n) => {
numbers.push(n)
storage.push(numbers)
},
});
await lng280BugWithForEmptyStreamFunc()
return storage
}
export async function returnDerivedStreamCall(): Promise<number[]> {
return returnDerivedStream()
}

View File

@ -10,22 +10,21 @@ import aqua.parser.lift.FileSpan.F
import aqua.parser.lift.{FileSpan, Span}
import aqua.parser.{ArrowReturnError, BlockIndentError, LexerError, ParserError}
import aqua.raw.ConstantRaw
import aqua.semantics.{HeaderError, RulesViolated, SemanticWarning, WrongAST}
import aqua.semantics.*
import aqua.{AquaIO, SpanParser}
import cats.data.Validated.{invalidNec, validNec, Invalid, Valid}
import cats.data.{NonEmptyChain, Validated}
import cats.data.Validated
import cats.data.Validated.{Invalid, Valid}
import cats.effect.IO
import cats.syntax.option.*
import cats.effect.unsafe.implicits.global
import cats.syntax.option.*
import fs2.io.file.{Files, Path}
import scribe.Logging
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.scalajs.js
import scala.scalajs.js.JSConverters.*
import scala.scalajs.js.annotation.*
import scala.scalajs.js.{undefined, UndefOr}
import scala.scalajs.js.{UndefOr, undefined}
import scribe.Logging
@JSExportAll
case class CompilationResult(

View File

@ -8,16 +8,13 @@ import aqua.raw.ops.RawTag
import aqua.raw.value.{ValueRaw, VarRaw}
import aqua.types.*
import cats.data.StateT
import cats.data.{Chain, IndexedStateT, State}
import cats.data.{Chain, IndexedStateT, State, StateT}
import cats.kernel.Semigroup
import cats.syntax.applicative.*
import cats.syntax.bifunctor.*
import cats.syntax.foldable.*
import cats.syntax.functor.*
import cats.syntax.option.*
import cats.syntax.semigroup.*
import cats.syntax.show.*
import cats.syntax.traverse.*
import cats.{Eval, Monoid}
import scribe.Logging
@ -373,6 +370,41 @@ object ArrowInliner extends Logging {
} yield values -> arrows
}
// change all collections that passed as stream arguments to canonicalized streams
private def collectionsToCanons(
tree: RawTag.Tree,
streamArgs: Map[String, VarModel]
): RawTag.Tree = {
// collect arguments with stream type
// to exclude it from resolving and rename it with a higher-level stream that passed by argument
val streamsToRename = streamArgs.view.mapValues(_.name).toMap
if (streamsToRename.isEmpty) tree
else
tree
.map(_.mapValues(_.map {
// if an argument is a BoxType (Array or Option), but we pass a stream,
// change a type as stream to not miss `$` sign in air
case v @ VarRaw(name, baseType: CollectionType) if streamsToRename.contains(name) =>
v.copy(baseType = CanonStreamType(baseType.element))
case v => v
}))
.renameExports(streamsToRename)
}
// Change the type of collection arguments if they pass as streams
private def canonStreamVariables[S: Mangler](
args: ArgsCall
): State[S, (Map[String, String], List[OpModel.Tree])] =
args.streamToImmutableArgsWithTypes.toList.traverse { case (argName, (vm, StreamType(t))) =>
Mangler[S].findAndForbidName(vm.name + "_canon").map { canonName =>
(
(argName, canonName),
CanonicalizeModel(vm, CallModel.Export(canonName, CanonStreamType(t))).leaf
)
}
}.map(_.unzip.leftMap(_.toMap))
/**
* Prepare the function and the context for inlining
*
@ -387,7 +419,7 @@ object ArrowInliner extends Logging {
call: CallModel,
exports: Map[String, ValueModel],
arrows: Map[String, FuncArrow]
): State[S, FuncArrow] = for {
): State[S, (FuncArrow, OpModel.Tree)] = for {
args <- ArgsCall(fn.arrowType.domain, call.args).pure[State[S, *]]
argNames = args.argNames
@ -421,15 +453,18 @@ object ArrowInliner extends Logging {
)
)
defineRenames <- Mangler[S].findAndForbidNames(defineNames)
canonStreamsWithNames <- canonStreamVariables(args)
(renamedCanonStreams, canons) = canonStreamsWithNames
renaming =
data.renames ++
streamRenames ++
arrowRenames ++
abRenames ++
capturedValues.renames ++
capturedArrows.renames ++
defineRenames
defineRenames ++
renamedCanonStreams ++
streamRenames
/**
* TODO: Optimize resolve.
@ -440,11 +475,15 @@ object ArrowInliner extends Logging {
exportsResolved = exports ++ data.renamed ++ capturedValues.renamed
tree = fn.body.rename(renaming)
streamToCanonArgs = args.streamToImmutableArgs.renamed(renamedCanonStreams)
treeWithCanons = collectionsToCanons(tree, streamToCanonArgs)
ret = fn.ret.map(_.renameVars(renaming))
_ <- Arrows[S].resolved(arrowsResolved)
_ <- Exports[S].resolved(exportsResolved)
} yield fn.copy(body = tree, ret = ret)
} yield (fn.copy(body = treeWithCanons, ret = ret), SeqModel.wrap(canons))
private[inline] def callArrowRet[S: Exports: Arrows: Mangler](
arrow: FuncArrow,
@ -463,9 +502,9 @@ object ArrowInliner extends Logging {
Arrows[S].scope(
for {
// Process renamings, prepare environment
fn <- ArrowInliner.prelude(arrow, call, exports, arrows)
inlineResult <- ArrowInliner.inline(fn, call, streams)
} yield inlineResult
fnCanon <- ArrowInliner.prelude(arrow, call, exports, arrows)
inlineResult <- ArrowInliner.inline(fnCanon._1, call, streams)
} yield inlineResult.copy(tree = SeqModel.wrap(fnCanon._2, inlineResult.tree))
)
)

View File

@ -45,6 +45,9 @@ object RawValueInliner extends Logging {
case cr: CollectionRaw =>
CollectionRawInliner(cr, propertiesAllowed)
case sr: StreamRaw =>
StreamRawInliner(sr, propertiesAllowed)
case dr: MakeStructRaw =>
MakeStructRawInliner(dr, propertiesAllowed)
@ -89,14 +92,6 @@ object RawValueInliner extends Logging {
_ = logger.trace("map was: " + map)
} yield vm -> parDesugarPrefix(ops.filterNot(_ == EmptyModel.leaf))
def collectionToModel[S: Mangler: Exports: Arrows](
value: CollectionRaw,
assignTo: Option[String]
): State[S, (ValueModel, Option[OpModel.Tree])] = {
logger.trace("RAW COLLECTION " + value)
toModel(CollectionRawInliner.unfoldCollection(value, assignTo))
}
def valueToModel[S: Mangler: Exports: Arrows](
value: ValueRaw,
propertiesAllowed: Boolean = true
@ -117,10 +112,12 @@ object RawValueInliner extends Logging {
def callToModel[S: Mangler: Exports: Arrows](
call: Call,
flatStreamArguments: Boolean
): State[S, (CallModel, Option[OpModel.Tree])] =
): State[S, (CallModel, Option[OpModel.Tree])] = {
valueListToModel(call.args).flatMap { args =>
if (flatStreamArguments)
args.map(arg => TagInliner.flat(arg._1, arg._2, true)).sequence
args.map{ arg =>
TagInliner.flat(arg._1, arg._2, true)
}.sequence
else
State.pure(args)
}.map { list =>
@ -132,4 +129,5 @@ object RawValueInliner extends Logging {
parDesugarPrefix(list.flatMap(_._2))
)
}
}
}

View File

@ -2,7 +2,6 @@ package aqua.model.inline
import aqua.errors.Errors.internalError
import aqua.model.*
import aqua.model.inline.RawValueInliner.collectionToModel
import aqua.model.inline.raw.{CallArrowRawInliner, CallServiceRawInliner}
import aqua.model.inline.state.{Arrows, Exports, Mangler}
import aqua.model.inline.tag.IfTagInliner
@ -274,7 +273,7 @@ object TagInliner extends Logging {
)
case (_, (vm, prefix)) =>
internalError(
s"stream (${exportTo}) resolved " +
s"stream ($exportTo) resolved " +
s"to ($vm) with prefix ($prefix)"
)
}
@ -367,13 +366,7 @@ object TagInliner extends Logging {
case AssignmentTag(value, assignTo) =>
for {
modelAndPrefix <- value match {
// if we assign collection to a stream, we must use it's name, because it is already created with 'new'
case c @ CollectionRaw(_, _: StreamType) =>
collectionToModel(c, Some(assignTo))
case v =>
valueToModel(v, false)
}
modelAndPrefix <- valueToModel(value, false)
(model, prefix) = modelAndPrefix
_ <- Exports[S].resolved(assignTo, model)
} yield TagInlined.Empty(prefix = prefix)

View File

@ -1,15 +1,12 @@
package aqua.model.inline.raw
import aqua.errors.Errors.internalError
import aqua.model.*
import aqua.model.inline.Inline
import aqua.model.inline.RawValueInliner.{callToModel, valueToModel}
import aqua.model.inline.state.{Arrows, Exports, Mangler}
import aqua.model.inline.{ArrowInliner, Inline, TagInliner}
import aqua.raw.ops.Call
import aqua.raw.value.CallServiceRaw
import cats.data.{Chain, State}
import cats.syntax.traverse.*
import scribe.Logging
object CallServiceRawInliner extends RawInliner[CallServiceRaw] with Logging {
@ -21,6 +18,7 @@ object CallServiceRawInliner extends RawInliner[CallServiceRaw] with Logging {
logger.trace(s"${exportTo.mkString(" ")} $value")
logger.trace(Console.BLUE + s"call service id ${value.serviceId}" + Console.RESET)
val call = Call(value.arguments, exportTo)
for {
@ -48,10 +46,10 @@ object CallServiceRawInliner extends RawInliner[CallServiceRaw] with Logging {
): State[S, (ValueModel, Inline)] =
Mangler[S]
.findAndForbidName(raw.fnName)
.flatMap(n =>
.flatMap { n =>
unfold(raw, Call.Export(n, raw.`type`) :: Nil).map {
case (Nil, inline) => (VarModel(n, raw.`type`), inline)
case (h :: _, inline) => (h, inline)
}
)
}
}

View File

@ -15,25 +15,13 @@ object CollectionRawInliner extends RawInliner[CollectionRaw] {
override def apply[S: Mangler: Exports: Arrows](
raw: CollectionRaw,
propertiesAllowed: Boolean
): State[S, (ValueModel, Inline)] = unfoldCollection(raw)
def unfoldCollection[S: Mangler: Exports: Arrows](
raw: CollectionRaw,
assignToName: Option[String] = None
): State[S, (ValueModel, Inline)] =
for {
streamName <- raw.collectionType match {
case _: StreamType =>
assignToName.fold(
Mangler[S].findAndForbidName("stream-inline")
)(State.pure)
case _: StreamMapType =>
assignToName.fold(
Mangler[S].findAndForbidName("stream_map-inline")
)(State.pure)
case _: CanonStreamType => Mangler[S].findAndForbidName("canon_stream-inline")
case _: ArrayType => Mangler[S].findAndForbidName("array-inline")
case _: OptionType => Mangler[S].findAndForbidName("option-inline")
// CanonStreamType is here just to avoid compilation warning. Right now it is unreachable
case _: CanonStreamType => Mangler[S].findAndForbidName("canon_stream-inline")
}
streamType = StreamType(raw.elementType)
@ -58,10 +46,7 @@ object CollectionRawInliner extends RawInliner[CollectionRaw] {
canonName <-
if (raw.collectionType.isStream) State.pure(streamName)
else Mangler[S].findAndForbidName(streamName)
canonType = raw.collectionType match {
case StreamType(_) => raw.collectionType
case _ => CanonStreamType(raw.collectionType.element)
}
canonType = CanonStreamType(raw.collectionType.element)
canon = CallModel.Export(canonName, canonType)
} yield VarModel(canonName, canon.`type`) -> Inline.tree(
raw.collectionType match {

View File

@ -0,0 +1,40 @@
package aqua.model.inline.raw
import aqua.model.*
import aqua.model.inline.Inline
import aqua.model.inline.RawValueInliner.valueToModel
import aqua.model.inline.state.{Arrows, Exports, Mangler}
import aqua.raw.value.StreamRaw
import cats.data.{Chain, State}
import cats.syntax.traverse.*
object StreamRawInliner extends RawInliner[StreamRaw] {
override def apply[S: Mangler: Exports: Arrows](
raw: StreamRaw,
propertiesAllowed: Boolean
): State[S, (ValueModel, Inline)] = {
val streamExp = CallModel.Export(raw.streamName, raw.streamType)
val streamVal = streamExp.asVar
for {
valsWithInlines <- raw.values
.traverse(valueToModel(_))
.map(Chain.fromSeq)
// push values to the stream, that is gathering the collection
vals = valsWithInlines.map { case (v, _) =>
PushToStreamModel(v, streamExp).leaf
}
// all inlines will be added before pushing values to the stream
inlines = valsWithInlines.flatMap { case (_, t) =>
Chain.fromOption(t)
}
_ <- Exports[S].resolved(raw.streamName, streamVal)
} yield streamVal -> Inline.tree(
SeqModel.wrap(inlines ++ vals)
)
}
}

View File

@ -94,10 +94,13 @@ class ArrowInlinerSpec extends AnyFlatSpec with Matchers with Inside {
*/
it should "pass stream to callback properly" in {
val streamType = StreamType(ScalarType.string)
val streamVar = VarRaw("records", streamType)
val streamModel = VarModel("records", StreamType(ScalarType.string))
val streamName = "records"
val streamVar = VarRaw(streamName, streamType)
val streamModel = VarModel(streamName, StreamType(ScalarType.string))
val canonName = streamVar.name + "_canon"
val canonModel = VarModel(canonName, CanonStreamType(ScalarType.string))
val cbType = ArrowType(ProductType(ArrayType(ScalarType.string) :: Nil), ProductType(Nil))
val cbVal = VarModel("cb-pass", cbType)
@ -184,6 +187,92 @@ class ArrowInlinerSpec extends AnyFlatSpec with Matchers with Inside {
}
/*
func use(str: []string):
Srv.useArr(str)
func call():
str = *[]
use(str)
*/
it should "pass stream to function with array argument properly" in {
val streamName = "str"
val streamType = StreamType(ScalarType.string)
val streamVar = VarRaw(streamName, streamType)
val streamModel = VarModel(streamName, streamType)
val canonName = streamName + "_canon"
val canonModel = VarModel(canonName, CanonStreamType(ScalarType.string))
val useArg = VarRaw("str", ArrayType(ScalarType.string))
val useArrow = FuncArrow(
"use",
CallArrowRawTag
.service(
LiteralRaw.quote("srv"),
"useArr",
Call(useArg :: Nil, Nil)
)
.leaf,
ArrowType(
ProductType.labelled(
(
useArg.name,
useArg.`type`
) :: Nil
),
ProductType(Nil)
),
Nil,
Map.empty,
Map.empty,
None
)
val model: OpModel.Tree = ArrowInliner
.callArrow[InliningState](
FuncArrow(
"call",
SeqTag.wrap(
DeclareStreamTag(streamVar).leaf,
CallArrowRawTag.func(useArrow.funcName, Call(streamVar :: Nil, Nil)).leaf
),
ArrowType(
ProductType(Nil),
ProductType(Nil)
),
Nil,
Map(useArrow.funcName -> useArrow),
Map.empty,
None
),
CallModel(Nil, Nil)
)
.runA(
InliningState(
resolvedArrows = Map(
useArrow.funcName -> useArrow
)
)
)
.value
model.equalsOrShowDiff(
CallArrowModel(useArrow.funcName).wrap(
SeqModel.wrap(
CanonicalizeModel(streamModel, CallModel.Export(canonModel.name, canonModel.`type`)).leaf,
CallServiceModel(
LiteralModel.quote("srv"),
"useArr",
CallModel(canonModel :: Nil, Nil)
).leaf
)
)
) should be(true)
}
/**
* func returnNil() -> *string:
* someStr: *string

View File

@ -1,6 +1,5 @@
package aqua.raw.arrow
import aqua.raw.value.ValueRaw
import aqua.raw.RawPart
import aqua.types.Type

View File

@ -3,10 +3,10 @@ package aqua.raw.value
import aqua.errors.Errors.internalError
import aqua.types.*
import aqua.types.Type.*
import cats.Eq
import cats.{Eq, Functor, Traverse}
import cats.data.{Chain, NonEmptyList, NonEmptyMap}
import cats.syntax.option.*
import cats.syntax.functor.*
sealed trait ValueRaw {
@ -68,7 +68,7 @@ object ValueRaw {
errorType
)
type ApplyRaw = ApplyPropertyRaw | CallArrowRaw | CollectionRaw | ApplyBinaryOpRaw |
type ApplyRaw = ApplyPropertyRaw | CallArrowRaw | CollectionRaw | StreamRaw | ApplyBinaryOpRaw |
ApplyUnaryOpRaw
extension (v: ValueRaw) {
@ -114,7 +114,7 @@ case class VarRaw(name: String, baseType: Type) extends ValueRaw {
override def mapValues(f: ValueRaw => ValueRaw): ValueRaw = this
override def renameVars(map: Map[String, String]): ValueRaw =
override def renameVars(map: Map[String, String]): VarRaw =
copy(name = map.getOrElse(name, name))
override def toString: String = s"var{$name: " + baseType + s"}"
@ -159,9 +159,31 @@ object LiteralRaw {
}
}
// StreamRaw must have stream name, because stream cannot be `nil`,
// so, we must know name of a stream to handle it in any cases
case class StreamRaw(values: List[ValueRaw], streamName: String, streamType: StreamType) extends ValueRaw {
lazy val elementType: DataType = streamType.element
override lazy val baseType: Type = streamType
override def mapValues(f: ValueRaw => ValueRaw): ValueRaw = {
val (vals, element) = CollectionRaw.mapCollection(f, values)
copy(
values = vals,
streamType = streamType.withElement(element)
)
}
override def varNames: Set[String] = (values.flatMap(_.varNames) :+ streamName).toSet
override def renameVars(map: Map[String, String]): ValueRaw =
copy(values = values.map(_.renameVars(map)), streamName = map.getOrElse(streamName, streamName))
}
case class CollectionRaw(
values: NonEmptyList[ValueRaw],
collectionType: CollectionType
collectionType: ImmutableCollectionType
) extends ValueRaw {
lazy val elementType: DataType = collectionType.element
@ -169,12 +191,7 @@ case class CollectionRaw(
override lazy val baseType: Type = collectionType
override def mapValues(f: ValueRaw => ValueRaw): ValueRaw = {
val vals = values.map(f)
val types = vals.map(_.`type` match {
case ct: CollectibleType => ct
case t => internalError(s"Non-collection type in collection: ${t}")
})
val element = CollectionType.elementTypeOf(types.toList)
val (vals, element) = CollectionRaw.mapCollection(f, values)
copy(
values = vals,
@ -188,6 +205,20 @@ case class CollectionRaw(
copy(values = values.map(_.renameVars(map)))
}
object CollectionRaw {
def mapCollection[F[_]: Traverse](f: ValueRaw => ValueRaw, values: F[ValueRaw]): (F[ValueRaw], DataType) = {
val vals = values.map(f)
val types = vals.map(_.`type` match {
case ct: CollectibleType => ct
case t => internalError(s"Non-collection type in collection: ${t}")
})
val element = CollectionType.elementTypeOf(types)
(vals, element)
}
}
case class MakeStructRaw(fields: NonEmptyMap[String, ValueRaw], structType: StructType)
extends ValueRaw {

View File

@ -1,9 +1,8 @@
package aqua.model
import aqua.model.ValueModel.Ability
import aqua.model.{ValueModel, VarModel}
import aqua.model.ValueModel.{Ability, Stream}
import aqua.raw.ops.Call
import aqua.raw.value.{ValueRaw, VarRaw}
import aqua.raw.value.VarRaw
import aqua.types.*
import cats.syntax.foldable.*
@ -35,7 +34,8 @@ case class ArgsCall(args: ProductType, callWith: List[ValueModel]) {
*/
lazy val dataArgs: Map[String, ValueModel] =
zipped.collect {
case ((name, _: DataType), value) if !streamArgs.contains(name) =>
case ((name, _: DataType), value)
if !streamArgs.contains(name) && !streamToImmutableArgs.contains(name) =>
name -> value
}.toMap
@ -61,11 +61,11 @@ case class ArgsCall(args: ProductType, callWith: List[ValueModel]) {
* Stream arguments as mapping
* Name of argument -> variable passed in the call
* NOTE: Argument is stream if it is passed as stream
* on the call site. Type of argument in the function
* definition does not matter.
* on the call site. Type of argument in the function
* definition does not matter.
*/
lazy val streamArgs: Map[String, VarModel] =
zipped.collect { case ((name, _), vr @ VarModel(_, StreamType(_), _)) =>
zipped.collect { case ((name, _: MutableStreamType), Stream(vr, _)) =>
name -> vr
}.toMap
@ -76,6 +76,29 @@ case class ArgsCall(args: ProductType, callWith: List[ValueModel]) {
lazy val streamArgsRenames: Map[String, String] =
streamArgs.view.mapValues(_.name).toMap
/**
* Stream arguments that will be used as immutable collections
* Name of argument -> variable passed in the call
*/
lazy val streamToImmutableArgs: Map[String, VarModel] =
zipped.collect {
case ((name, _: ImmutableCollectionType), vr @ VarModel(_, StreamType(_), _)) =>
name -> vr
}.toMap
lazy val streamToImmutableArgsWithTypes: Map[String, (VarModel, StreamType)] =
zipped.collect {
case ((name, _: ImmutableCollectionType), vr@Stream(_, t)) =>
name -> (vr, t)
}.toMap
/**
* All renamings from stream arguments as mapping
* Name inside function body -> name in the call context
*/
lazy val streamToImmutableArgsRenames: Map[String, String] =
streamToImmutableArgs.view.mapValues(_.name).toMap
/**
* Arrow arguments as mapping
* Name of argument -> variable passed in the call

View File

@ -1,10 +1,9 @@
package aqua.model
import aqua.raw.Raw
import aqua.raw.arrow.FuncRaw
import aqua.raw.ops.{Call, CallArrowRawTag, EmptyTag, RawTag}
import aqua.raw.ops.{Call, CallArrowRawTag, RawTag}
import aqua.raw.value.{ValueRaw, VarRaw}
import aqua.types.{ArrowType, ServiceType, Type}
import aqua.types.{ArrowType, Type}
import cats.syntax.option.*

View File

@ -73,6 +73,16 @@ object ValueModel {
case _ => none
}
}
object Stream {
def unapply(vm: VarModel): Option[(VarModel, StreamType)] =
vm match {
case vm@VarModel(_, t: StreamType, _) =>
(vm, t).some
case _ => none
}
}
}
case class LiteralModel(value: String, `type`: DataType) extends ValueModel {

View File

@ -53,21 +53,27 @@ class ArrowSem[S[_]](val expr: ArrowExpr[S]) extends AnyVal {
): Alg[Raw] = for {
streamsInScope <- N.streamsDefinedWithinScope()
retValues <- T.endArrowScope(expr.arrowTypeExpr)
// TODO: wrap with local on...via...
retsAndCodomain = retValues zip funcArrow.codomain.toList
(streamThatReturnAsStreamVars, streamThatReturnAsStreamNames) = retsAndCodomain.collect {
case (vr @ VarRaw(name, StreamType(_)), StreamType(_)) => (vr, name)
case (vr @ StreamRaw(_, name, _), StreamType(_)) => (vr, name)
}.unzip
// streams that return as streams and derived to another variable
derivedStreamRetValues <- N
.getDerivedFrom(streamThatReturnAsStreamVars.map(_.varNames))
.map(_.flatten.toSet)
res <- bodyGen match {
case FuncOp(bodyModel) =>
// TODO: wrap with local on...via...
val retsAndArgs = retValues zip funcArrow.codomain.toList
val streamArgNames = funcArrow.domain.labelledStreams.map { case (name, _) => name }
val streamsThatReturnAsStreams = retsAndArgs.collect {
case (VarRaw(n, StreamType(_)), StreamType(_)) => n
}.toSet
// Remove arguments, and values returned as streams
val localStreams = streamsInScope -- streamArgNames -- streamsThatReturnAsStreams
val localStreams = streamsInScope -- streamArgNames --
streamThatReturnAsStreamNames.toSet -- derivedStreamRetValues
// process stream that returns as not streams and all Apply*Raw
retsAndArgs.traverse {
retsAndCodomain.traverse {
case (v @ VarRaw(_, StreamType(_)), StreamType(_)) =>
(Chain.empty, v).pure[Alg]
// canonicalize and change return value

View File

@ -1,11 +1,10 @@
package aqua.semantics.expr.func
import aqua.raw.Raw
import aqua.raw.ops.{RawTag, RestrictionTag}
import aqua.semantics.rules.names.NamesAlgebra
import cats.Monad
import cats.{FlatMap, Functor, Monad}
import cats.syntax.functor.*
import cats.syntax.flatMap.*
object FuncOpSem {

View File

@ -24,7 +24,7 @@ case class StackInterpreter[S[_], X, St, Fr](
def mapStackHead_(f: Fr => Fr): SX[Unit] =
mapStackHead(())(f.andThen(_ -> ()))
def mapStackHeadM[A](ifStackEmpty: SX[A])(f: Fr => SX[(Fr, A)]): SX[A] =
def mapStackHeadM[A](ifStackEmpty: => SX[A])(f: Fr => SX[(Fr, A)]): SX[A] =
getState.map(stackLens.get).flatMap {
case head :: tail =>
f(head).flatMap { case (updated, result) =>

View File

@ -8,6 +8,7 @@ import aqua.parser.lexer.InfixToken.{BoolOp, CmpOp, EqOp, MathOp, Op as InfOp}
import aqua.parser.lexer.PrefixToken.Op as PrefOp
import aqua.raw.value.*
import aqua.semantics.rules.abilities.AbilitiesAlgebra
import aqua.semantics.rules.mangler.ManglerAlgebra
import aqua.semantics.rules.names.NamesAlgebra
import aqua.semantics.rules.report.ReportAlgebra
import aqua.semantics.rules.types.TypesAlgebra
@ -29,6 +30,7 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](using
N: NamesAlgebra[S, Alg],
T: TypesAlgebra[S, Alg],
A: AbilitiesAlgebra[S, Alg],
M: ManglerAlgebra[Alg],
report: ReportAlgebra[S, Alg]
) extends Logging {
@ -170,22 +172,44 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](using
}
.value
)
raw = valuesRawChecked.map(raws =>
NonEmptyList
.fromList(raws)
.fold(ValueRaw.Nil) { nonEmpty =>
val (values, types) = nonEmpty.unzip
val element = CollectionType.elementTypeOf(types.toList)
CollectionRaw(
values,
ct.mode match {
case CollectionToken.Mode.StreamMode => StreamType(element)
case CollectionToken.Mode.ArrayMode => ArrayType(element)
case CollectionToken.Mode.OptionMode => OptionType(element)
}
raw <- ct.mode match {
case m @ (CollectionToken.Mode.OptionMode | CollectionToken.Mode.ArrayMode) =>
valuesRawChecked
.map(raws =>
NonEmptyList
.fromList(raws)
.fold(ValueRaw.Nil) { nonEmpty =>
val (values, types) = nonEmpty.unzip
val element = CollectionType.elementTypeOf(types.toList)
CollectionRaw(
values,
m match {
case CollectionToken.Mode.ArrayMode => ArrayType(element)
case CollectionToken.Mode.OptionMode => OptionType(element)
}
)
}
)
}
)
.pure
case CollectionToken.Mode.StreamMode =>
for {
streamName <- M.rename("stream-anon")
raw = valuesRawChecked.map(raws =>
val (values, types) = raws.unzip
val element = CollectionType.elementTypeOf(types)
StreamRaw(
values,
streamName,
StreamType(element)
)
)
// BottomType for empty stream
_ <- N.defineInternal(
streamName,
raw.map(_.streamType).getOrElse(StreamType(BottomType))
)
} yield raw
}
} yield raw
case ca: CallArrowToken[S] =>
@ -470,6 +494,7 @@ object ValuesAlgebra {
N: NamesAlgebra[S, Alg],
T: TypesAlgebra[S, Alg],
A: AbilitiesAlgebra[S, Alg],
M: ManglerAlgebra[Alg],
E: ReportAlgebra[S, Alg]
): ValuesAlgebra[S, Alg] =
new ValuesAlgebra[S, Alg]

View File

@ -7,7 +7,7 @@ import cats.InjectK
trait NamesAlgebra[S[_], Alg[_]] {
def read(name: Name[S], mustBeDefined: Boolean = true): Alg[Option[Type]]
// TODO can be implemented via read?
def constantDefined(name: Name[S]): Alg[Option[Type]]
@ -15,6 +15,8 @@ trait NamesAlgebra[S[_], Alg[_]] {
def define(name: Name[S], `type`: Type): Alg[Boolean]
def defineInternal(name: String, `type`: Type): Alg[Boolean]
def derive(name: Name[S], `type`: Type, derivedFrom: Set[String]): Alg[Boolean]
def getDerivedFrom(fromNames: List[Set[String]]): Alg[List[Set[String]]]

View File

@ -1,5 +1,6 @@
package aqua.semantics.rules.names
import aqua.errors.Errors.internalError
import aqua.parser.lexer.{Name, Token}
import aqua.semantics.Levenshtein
import aqua.semantics.rules.StackInterpreter
@ -94,6 +95,18 @@ class NamesInterpreter[S[_], X](using
.headOption orElse st.rootArrows.get(name)
}
def defineInternal(name: String, `type`: Type): SX[Boolean] = {
// is is for internal names definition, all errors are unexpected
readName(name).flatMap {
case Some(_) =>
internalError(s"Unexpected error. Name $name was already defined")
case None =>
mapStackHeadM(
internalError(s"Unexpected error. Cannot define $name in the root scope")
)(fr => (fr.addInternalName(name, `type`) -> true).pure)
}
}
override def define(name: Name[S], `type`: Type): SX[Boolean] =
readName(name.value).flatMap {
case Some(_) =>

View File

@ -37,6 +37,9 @@ object NamesState {
def addName(n: Name[S], t: Type): NamesState.Frame[S] =
copy[S](names = names.updated(n.value, t))
def addInternalName(n: String, t: Type): NamesState.Frame[S] =
copy[S](names = names.updated(n, t))
def derived(n: Name[S], from: Set[String]): NamesState.Frame[S] =
copy[S](derivedFrom =
derivedFrom + (n.value -> from.flatMap(f => derivedFrom.get(f).fold(Set(f))(_ + f)))

View File

@ -1,7 +1,7 @@
package aqua.semantics.rules.report
import aqua.semantics.{RulesViolated, SemanticError, SemanticWarning}
import aqua.parser.lexer.Token
import aqua.semantics.{RulesViolated, SemanticError, SemanticWarning}
import cats.data.Chain
import cats.kernel.Monoid

View File

@ -56,6 +56,9 @@ class ValuesAlgebraSpec extends AnyFlatSpec with Matchers with Inside {
def option(value: ValueToken[Id]): CollectionToken[Id] =
CollectionToken[Id](CollectionToken.Mode.OptionMode, List(value))
def emptyOption(): CollectionToken[Id] =
CollectionToken[Id](CollectionToken.Mode.OptionMode, Nil)
def array(values: ValueToken[Id]*): CollectionToken[Id] =
CollectionToken[Id](CollectionToken.Mode.ArrayMode, values.toList)
@ -545,6 +548,34 @@ class ValuesAlgebraSpec extends AnyFlatSpec with Matchers with Inside {
st.errors.exists(_.isInstanceOf[RulesViolated[Id]]) shouldBe true
}
it should "convert empty option token to Nil" in {
val emptyOpt = emptyOption()
val alg = algebra()
val (_, result) = alg.valueToRaw(emptyOpt).run(genState()).value
result shouldBe Some(ValueRaw.Nil)
}
it should "convert empty array token to Nil" in {
val emptyArray = array()
val alg = algebra()
val (_, result) = alg.valueToRaw(emptyArray).run(genState()).value
result shouldBe Some(ValueRaw.Nil)
}
it should "convert empty stream token to unique variable" in {
val emptyStream = stream()
val alg = algebra()
val (_, result) = alg.valueToRaw(emptyStream).run(genState()).value
val t = StreamType(BottomType)
result shouldBe Some(StreamRaw(Nil, "stream-anon-0", t))
}
it should "forbid collections with abilities or arrows" in {
val ability = variable("ab")
val abilityType = AbilityType("Ab", NonEmptyMap.of("field" -> ScalarType.i8))

View File

@ -4,15 +4,15 @@ import aqua.errors.Errors.internalError
import aqua.types.*
import aqua.types.Type.*
import cats.Eval
import cats.PartialOrder
import cats.data.NonEmptyList
import cats.data.NonEmptyMap
import cats.syntax.applicative.*
import cats.syntax.foldable.*
import cats.syntax.functor.*
import cats.syntax.option.*
import cats.syntax.partialOrder.*
import cats.syntax.traverse.*
import cats.{Eval, Foldable, Functor, PartialOrder, Traverse}
import scala.collection.immutable.SortedMap
sealed trait Type {
@ -277,60 +277,87 @@ sealed trait CollectionType extends Type {
object CollectionType {
def elementTypeOf(types: List[CollectibleType]): DataType =
NonEmptyList
.fromList(types)
.fold(BottomType)(
_.map {
case StreamType(el) => ArrayType(el)
case dt: DataType => dt
}.reduce[Type](_ `∩` _) match {
// In case we mix values of uncomparable types, intersection returns bottom, meaning "uninhabited type".
// But we want to get to TopType instead: this would mean that intersection is empty, and you cannot
// make any decision about the structure of type, but can push anything inside
case BottomType => TopType
case dt: DataType => dt
case t =>
internalError(
s"Expected data type from " +
s"intersection of ${types.mkString(", ")}; " +
s"got $t"
)
}
)
def elementTypeOf[F[_]: Foldable: Functor](types: F[CollectibleType]): DataType =
types
.map[Type] {
case StreamType(el) => ArrayType(el)
case dt: DataType => dt
}.reduceLeftOption(_ `∩` _)
.map {
// In case we mix values of uncomparable types, intersection returns bottom, meaning "uninhabited type".
// But we want to get to TopType instead: this would mean that intersection is empty, and you cannot
// make any decision about the structure of type, but can push anything inside
case BottomType => TopType
case dt: DataType => dt
case t =>
internalError(
s"Expected data type from " +
s"intersection of ${types.foldLeft("") { case (l, r) => l + ", " + r }}; " +
s"got $t"
)
}
.getOrElse(BottomType)
}
sealed trait ImmutableCollectionType extends CollectionType with DataType {
def withElement(t: DataType): ImmutableCollectionType
}
sealed trait MutableStreamType extends CollectionType
case class CanonStreamType(
override val element: DataType
) extends DataType with CollectionType {
) extends ImmutableCollectionType {
override val isStream: Boolean = false
override def toString: String = "#" + element
override def withElement(t: DataType): CollectionType = copy(element = t)
override def withElement(t: DataType): ImmutableCollectionType = copy(element = t)
}
case class ArrayType(
override val element: DataType
) extends DataType with CollectionType {
) extends ImmutableCollectionType {
override val isStream: Boolean = false
override def toString: String = "[]" + element
override def withElement(t: DataType): CollectionType = copy(element = t)
override def withElement(t: DataType): ImmutableCollectionType = copy(element = t)
}
case class OptionType(
override val element: DataType
) extends DataType with CollectionType {
) extends ImmutableCollectionType {
override val isStream: Boolean = false
override def toString: String = "?" + element
override def withElement(t: DataType): CollectionType = copy(element = t)
override def withElement(t: DataType): ImmutableCollectionType = copy(element = t)
}
case class StreamMapType(override val element: DataType) extends MutableStreamType {
override val isStream: Boolean = true
override def withElement(t: DataType): MutableStreamType = copy(element = t)
override def toString: String = s"%$element"
}
object StreamMapType {
def top(): StreamMapType = StreamMapType(TopType)
}
case class StreamType(override val element: DataType) extends MutableStreamType {
override val isStream: Boolean = true
override def toString: String = s"*$element"
override def withElement(t: DataType): StreamType = copy(element = t)
}
sealed trait NamedType extends Type {
@ -378,7 +405,7 @@ sealed trait NamedType extends Type {
* to allow renaming on call site.
*/
lazy val arrows: Map[String, ArrowType] =
allFields.toSortedMap.toMap.collect { case (name, at: ArrowType) =>
allFields.toSortedMap.collect { case (name, at: ArrowType) =>
name -> at
}
@ -388,7 +415,7 @@ sealed trait NamedType extends Type {
* to allow renaming on call site.
*/
lazy val abilities: Map[String, AbilityType] =
allFields.toSortedMap.toMap.collect { case (name, at: AbilityType) =>
allFields.toSortedMap.collect { case (name, at: AbilityType) =>
name -> at
}
@ -398,7 +425,7 @@ sealed trait NamedType extends Type {
* to allow renaming on call site.
*/
lazy val variables: Map[String, DataType] =
allFields.toSortedMap.toMap.collect { case (name, at: DataType) =>
allFields.toSortedMap.collect { case (name, at: DataType) =>
name -> at
}
}
@ -414,30 +441,6 @@ case class StructType(name: String, fields: NonEmptyMap[String, Type])
s"$fullName{${fields.map(_.toString).toNel.toList.map(kv => kv._1 + ": " + kv._2).mkString(", ")}}"
}
sealed trait MutableStreamType extends Type with CollectionType
case class StreamMapType(override val element: DataType) extends MutableStreamType {
override val isStream: Boolean = true
override def withElement(t: DataType): CollectionType = copy(element = t)
override def toString: String = s"%$element"
}
object StreamMapType {
def top(): StreamMapType = StreamMapType(TopType)
}
case class StreamType(override val element: DataType) extends MutableStreamType {
override val isStream: Boolean = true
override def toString: String = s"*$element"
override def withElement(t: DataType): CollectionType = copy(element = t)
}
/**
* This type unites types that work as abilities,
* namely `ServiceType` and `AbilityType`