feat(compiler): Abilities (#731)

This commit is contained in:
Dima 2023-07-18 20:18:33 +03:00 committed by GitHub
parent ca52e2542c
commit 63a9f42e86
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
51 changed files with 1138 additions and 440 deletions

View File

@ -1,9 +1,50 @@
module Import3 declares *
aqua Main
export foo_bar
use DECLARE_CONST, decl_bar from "declare.aqua" as Declare
use "export.aqua"
export handleAb
func foo_bar() -> string, string:
z <- FooBars.foo()
<- z, FooBars.DECLARE_CONST2
service SomeService("wed"):
getStr(s: string) -> string
ability SomeAb:
someArrow(s: string) -> string, string
str: string
ability SecondAb:
arrow(s: string) -> string
num: u32
func funcStr(s: string) -> string, string:
strInFunc <- SomeService.getStr(Declare.DECLARE_CONST)
-- SomeService.getStr(s)
<- strInFunc, s
--
-- func diffFunc(s: string) -> string:
-- differentStr <- SomeService.different(s)
-- <- differentStr
--
-- func unit():
-- funcStr("")
-- func bbbbbbb()
--
-- func aaaaaa():
-- closure = (a: string) -> string:
-- <- SomeService.str()
func handleSecAb {SomeAb, SecondAb}() -> string, string:
SomeAb.someArrow("eferfrfrf")
b, c <- SomeAb.someArrow("efre")
<- b, c
func returnAb(s: string) -> SomeAb:
SomeAb = SomeAb(someArrow = funcStr, str = s)
<- SomeAb
func handleAb(fff: string) -> string, string:
SomeAb = returnAb(fff)
SecondAb = SecondAb(arrow = funcStr, num = 12)
d, g <- handleSecAb{SomeAb, SecondAb}()
<- d, g

View File

@ -106,6 +106,9 @@ object TypeDefinition {
case t: BoxType => ArrayTypeDef(TypeDefinition(t.element))
case StructType(name, fields) =>
StructTypeDef(name, fields.toSortedMap.view.mapValues(TypeDefinition.apply).toMap)
case AbilityType(name, fieldAndArrows) =>
// TODO: change in union with JS side
StructTypeDef(name, fieldAndArrows.toSortedMap.view.mapValues(TypeDefinition.apply).toMap)
case t: ScalarType => ScalarTypeDef.fromScalar(t)
case t: LiteralType => ScalarTypeDef.fromLiteral(t)
case t: ProductType => ProductTypeDef(t)

View File

@ -1,6 +1,5 @@
package aqua.backend.ts
import aqua.backend.air.FuncAirGen
import aqua.res.FuncRes
import aqua.types.*
import cats.syntax.show.*
@ -36,13 +35,14 @@ object TypeScriptCommon {
"[" + pt.toList.map(typeToTs).mkString(", ") + "]"
case st: StructType =>
s"{ ${st.fields.map(typeToTs).toNel.map(kv => kv._1 + ": " + kv._2 + ";").toList.mkString(" ")} }"
case st: AbilityType =>
s"{ ${st.fields.map(typeToTs).toNel.map(kv => kv._1 + ": " + kv._2 + ";").toList.mkString(" ")} }"
case st: ScalarType if ScalarType.number(st) => "number"
case ScalarType.bool => "boolean"
case ScalarType.string => "string"
case lt: LiteralType if lt.oneOf.exists(ScalarType.number) => "number"
case lt: LiteralType if lt.oneOf(ScalarType.bool) => "boolean"
case lt: LiteralType if lt.oneOf(ScalarType.string) => "string"
case _: DataType => "any"
case at: ArrowType => fnDef(at)
}

View File

@ -43,7 +43,7 @@ lazy val cli = crossProject(JSPlatform, JVMPlatform)
.crossType(CrossType.Pure)
.in(file("cli/cli"))
.enablePlugins(GraalVMNativeImagePlugin)
.settings(commons: _*)
.settings(commons)
.settings(
Compile / mainClass := Some("aqua.AquaCli"),
graalVMNativeImageOptions ++= Seq(
@ -92,13 +92,13 @@ lazy val `aqua-run` = crossProject(JSPlatform, JVMPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("aqua-run"))
.settings(commons: _*)
.settings(commons)
.dependsOn(compiler, `backend-air`, `backend-ts`, io, definitions, logging, constants)
lazy val io = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.settings(commons: _*)
.settings(commons)
.settings(
libraryDependencies ++= Seq(
"org.typelevel" %%% "cats-effect" % catsEffectV,
@ -113,7 +113,7 @@ lazy val `language-server-api` = crossProject(JSPlatform, JVMPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("language-server/language-server-api"))
.settings(commons: _*)
.settings(commons)
.settings(
libraryDependencies ++= Seq(
"org.typelevel" %%% "cats-effect" % catsEffectV,
@ -135,20 +135,20 @@ lazy val `language-server-apiJS` = `language-server-api`.js
lazy val `js-exports` = project
.in(file("js/js-exports"))
.enablePlugins(ScalaJSPlugin)
.settings(commons: _*)
.settings(commons)
.dependsOn(`backend`.js, definitions.js)
lazy val `js-imports` = project
.in(file("js/js-imports"))
.enablePlugins(ScalaJSPlugin)
.settings(commons: _*)
.settings(commons)
.dependsOn(`js-exports`, transform.js)
lazy val `aqua-api` = crossProject(JSPlatform, JVMPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("api/api"))
.settings(commons: _*)
.settings(commons)
.dependsOn(`aqua-run`, `backend-api`)
lazy val `aqua-apiJS` = `aqua-api`.js
@ -175,7 +175,7 @@ lazy val types = crossProject(JVMPlatform, JSPlatform)
lazy val parser = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.settings(commons: _*)
.settings(commons)
.settings(
libraryDependencies ++= Seq(
"org.typelevel" %%% "cats-parse" % catsParseV,
@ -187,14 +187,14 @@ lazy val parser = crossProject(JVMPlatform, JSPlatform)
lazy val linker = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.settings(commons: _*)
.settings(commons)
.dependsOn(parser)
lazy val tree = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("model/tree"))
.settings(commons: _*)
.settings(commons)
.settings(
libraryDependencies ++= Seq(
"org.typelevel" %%% "cats-free" % catsV
@ -205,40 +205,41 @@ lazy val raw = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("model/raw"))
.settings(commons: _*)
.settings(commons)
.dependsOn(types, tree)
lazy val model = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.settings(commons: _*)
.settings(commons)
.dependsOn(types, tree, raw)
lazy val res = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("model/res"))
.settings(commons: _*)
.settings(commons)
.dependsOn(model)
lazy val inline = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("model/inline"))
.settings(commons: _*)
.settings(commons)
.dependsOn(raw, model)
lazy val transform = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("model/transform"))
.settings(commons: _*)
.settings(commons)
.dependsOn(model, res, inline, res % "test->test")
lazy val semantics = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.settings(commons: _*)
.settings(commons)
.settings(
libraryDependencies ++= Seq(
"dev.optics" %%% "monocle-core" % monocleV,
@ -251,14 +252,14 @@ lazy val compiler = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("compiler"))
.settings(commons: _*)
.settings(commons)
.dependsOn(semantics, linker, backend, transform % "test->test", res % "test->test")
lazy val backend = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("backend"))
.settings(commons: _*)
.settings(commons)
.enablePlugins(BuildInfoPlugin)
.settings(
buildInfoKeys := Seq[BuildInfoKey](version),
@ -270,7 +271,7 @@ lazy val definitions = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("backend/definitions"))
.settings(commons: _*)
.settings(commons)
.settings(
libraryDependencies ++= Seq(
"io.circe" %%% "circe-core",
@ -284,7 +285,7 @@ lazy val logging = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("utils/logging"))
.settings(commons: _*)
.settings(commons)
.settings(
libraryDependencies ++= Seq(
"org.typelevel" %%% "cats-core" % catsV
@ -295,7 +296,7 @@ lazy val constants = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("utils/constants"))
.settings(commons: _*)
.settings(commons)
.settings(
libraryDependencies ++= Seq(
"org.typelevel" %%% "cats-core" % catsV
@ -307,21 +308,21 @@ lazy val `backend-air` = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("backend/air"))
.settings(commons: _*)
.settings(commons)
.dependsOn(backend, transform)
lazy val `backend-api` = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("backend/api"))
.settings(commons: _*)
.settings(commons)
.dependsOn(backend, transform, `backend-air`)
lazy val `backend-ts` = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("backend/ts"))
.settings(commons: _*)
.settings(commons)
.settings(
libraryDependencies ++= Seq(
"io.circe" %%% "circe-core",

View File

@ -0,0 +1,37 @@
aqua Main
use DECLARE_CONST, decl_bar from "imports_exports/declare.aqua" as Declare
export handleAb, SomeService
service SomeService("wed"):
getStr(s: string) -> string
ability SomeAb:
someArrow(s: string) -> string, string
str: string
ability SecondAb:
arrow(s: string) -> string
num: u32
func funcStr(s: string) -> string, string:
strInFunc <- SomeService.getStr(Declare.DECLARE_CONST)
strInFunc2 <- SomeService.getStr(s)
<- strInFunc, strInFunc2
func handleSecAb {SomeAb, SecondAb}() -> string, string, string, u32:
SomeAb.someArrow("eferfrfrf")
b, c <- SomeAb.someArrow("efre")
d <- SecondAb.arrow(SomeAb.str)
<- b, c, d, SecondAb.num
func returnAb(s: string) -> SomeAb:
SomeAb = SomeAb(someArrow = funcStr, str = s)
<- SomeAb
func handleAb(fff: string) -> string, string, string, u32:
SomeAb = returnAb(fff)
SecondAb = SecondAb(arrow = funcStr, num = 12)
res1, res2, res3, res4 <- handleSecAb{SomeAb, SecondAb}()
<- res1, res2, res3, res4

View File

@ -71,6 +71,7 @@ export const relay2 = config.relays[1]
const relayPeerId2 = relay2.peerId
import log from 'loglevel';
import {abilityCall} from "../examples/abilityCall";
// log.setDefaultLevel("debug")
async function start() {
@ -313,6 +314,11 @@ describe('Testing examples', () => {
});
});
it('ability.aqua', async () => {
let result = await abilityCall();
expect(result).toStrictEqual(['declare_const123', "efre123", "declare_const123", 12]);
});
it('functors.aqua LNG-119 bug', async () => {
let result = await bugLng119Call();
expect(result).toEqual([1]);

View File

@ -0,0 +1,11 @@
import {handleAb, registerSomeService} from "../compiled/examples/abilities";
export async function abilityCall(): Promise<[string, string, string, number]> {
registerSomeService({
getStr: (s: string) => {
return s + "123"
}
})
return await handleAb("some_string")
}

View File

@ -4,11 +4,14 @@ import aqua.model
import aqua.model.inline.state.{Arrows, Exports, Mangler}
import aqua.model.*
import aqua.raw.ops.RawTag
import aqua.types.{ArrowType, BoxType, StreamType}
import aqua.types.{AbilityType, ArrowType, BoxType, DataType, StreamType, Type}
import aqua.raw.value.{ValueRaw, VarRaw}
import cats.Eval
import cats.data.{Chain, State}
import cats.data.{Chain, IndexedStateT, State}
import cats.syntax.traverse.*
import cats.syntax.apply.*
import cats.syntax.bifunctor.*
import cats.syntax.foldable.*
import scribe.Logging
/**
@ -72,55 +75,98 @@ object ArrowInliner extends Logging {
(ops, rets)
}
/**
* @param tree generated tree after inlining a function
* @param returnedValues function return values
* @param exportsToSave values that must be saved for future states
* @param arrowsToSave arrows that must be saved for future states
*/
case class InlineResult(
tree: OpModel.Tree,
returnedValues: List[ValueModel],
exportsToSave: Map[String, ValueModel],
arrowsToSave: Map[String, FuncArrow]
)
// Apply a callable function, get its fully resolved body & optional value, if any
private def inline[S: Mangler: Arrows: Exports](
fn: FuncArrow,
call: CallModel
): State[S, (OpModel.Tree, List[ValueModel])] =
getOutsideStreamNames.flatMap { outsideDeclaredStreams =>
// Function's internal variables will not be available outside, hence the scope
Exports[S].scope(
for {
// Process renamings, prepare environment
tr <- prelude[S](fn, call)
(tree, results) = tr
): State[S, InlineResult] =
(Exports[S].exports, getOutsideStreamNames).flatMapN {
case (oldExports, outsideDeclaredStreams) =>
// Function's internal variables will not be available outside, hence the scope
Exports[S].scope(
for {
// Process renamings, prepare environment
tr <- prelude[S](fn, call, oldExports)
(tree, results) = tr
// Register captured values as available exports
_ <- Exports[S].resolved(fn.capturedValues)
_ <- Mangler[S].forbid(fn.capturedValues.keySet)
// Register captured values as available exports
_ <- Exports[S].resolved(fn.capturedValues)
_ <- Mangler[S].forbid(fn.capturedValues.keySet)
// Now, substitute the arrows that were received as function arguments
// Use the new op tree (args are replaced with values, names are unique & safe)
callableFuncBodyNoTopology <- TagInliner.handleTree(tree, fn.funcName)
callableFuncBody =
fn.capturedTopology
.fold[OpModel](SeqModel)(ApplyTopologyModel.apply)
.wrap(callableFuncBodyNoTopology)
// Now, substitute the arrows that were received as function arguments
// Use the new op tree (args are replaced with values, names are unique & safe)
callableFuncBodyNoTopology <- TagInliner.handleTree(tree, fn.funcName)
callableFuncBody =
fn.capturedTopology
.fold[OpModel](SeqModel)(ApplyTopologyModel.apply)
.wrap(callableFuncBodyNoTopology)
opsAndRets <- pushStreamResults(
outsideDeclaredStreams,
call.exportTo,
results,
callableFuncBody
)
(ops, rets) = opsAndRets
} yield SeqModel.wrap(ops.reverse: _*) -> rets.reverse
)
opsAndRets <- pushStreamResults(
outsideDeclaredStreams,
call.exportTo,
results,
callableFuncBody
)
(ops, rets) = opsAndRets
exports <- Exports[S].exports
arrows <- Arrows[S].arrows
// gather all arrows and variables from abilities
returnedFromAbilities = rets.collect { case VarModel(name, st @ AbilityType(_, _), _) =>
getVarsAndArrowsFromAbilities(name, None, st, exports, arrows)
}.foldMapA(_.bimap(_.toList, _.toList)).bimap(_.toMap, _.toMap)
// find and get resolved arrows if we return them from the function
returnedArrows = rets.collect { case VarModel(name, ArrowType(_, _), _) =>
name
}.toSet
arrowsToSave <- Arrows[S].pickArrows(returnedArrows)
} yield {
val (valsFromAbilities, arrowsFromAbilities) = returnedFromAbilities
InlineResult(
SeqModel.wrap(ops.reverse: _*),
rets.reverse,
valsFromAbilities,
arrowsFromAbilities ++ arrowsToSave
)
}
)
}
// Get all arrows that is arguments from outer Arrows.
// Purge and push captured arrows and arrows as arguments into state.
// Grab all arrows that must be renamed.
/**
* Get all arrows that is arguments from outer Arrows.
* Purge and push captured arrows and arrows as arguments into state.
* Grab all arrows that must be renamed.
*
* @param argsToArrowsRaw arguments with ArrowType
* @param func function where captured and returned may exist
* @param abilityArrows arrows from abilities that should be renamed
* @return all arrows that must be renamed in function body
*/
private def updateArrowsAndRenameArrowArgs[S: Mangler: Arrows: Exports](
args: ArgsCall,
func: FuncArrow
argsToArrowsRaw: Map[String, FuncArrow],
func: FuncArrow,
abilityArrows: Map[String, String]
): State[S, Map[String, String]] = {
for {
// Arrow arguments: expected type is Arrow, given by-name
argsToArrowsRaw <- Arrows[S].argsArrows(args)
argsToArrowsShouldRename <- Mangler[S].findNewNames(
argsToArrowsRaw.keySet
)
argsToArrowsShouldRename <- Mangler[S]
.findNewNames(
argsToArrowsRaw.keySet
)
.map(_ ++ abilityArrows)
argsToArrows = argsToArrowsRaw.map { case (k, v) =>
argsToArrowsShouldRename.getOrElse(k, k) -> v
}
@ -133,25 +179,29 @@ object ArrowInliner extends Logging {
returnedArrowsShouldRename.getOrElse(k, k) -> v
}
// Going to resolve arrows: collect them all. Names should never collide: it's semantically checked
_ <- Arrows[S].purge
_ <- Arrows[S].resolved(renamedCapturedArrows ++ argsToArrows)
} yield {
argsToArrowsShouldRename ++ returnedArrowsShouldRename
}
}
/**
* @param argsToDataRaw data arguments to rename
* @param abilityValues values from abilities to rename
* @return all values that must be renamed in function body
*/
private def updateExportsAndRenameDataArgs[S: Mangler: Arrows: Exports](
args: ArgsCall
argsToDataRaw: Map[String, ValueModel],
abilityValues: Map[String, String]
): State[S, Map[String, String]] = {
// DataType arguments
val argsToDataRaw = args.dataArgs
for {
// Find all duplicates in arguments
// we should not rename arguments that will be renamed by 'streamToRename'
argsToDataShouldRename <- Mangler[S].findNewNames(
argsToDataRaw.keySet
)
// we should not find new names for 'abilityValues' arguments that will be renamed by 'streamToRename'
argsToDataShouldRename <- Mangler[S]
.findNewNames(
argsToDataRaw.keySet
)
.map(_ ++ abilityValues)
// Do not rename arguments if they just match external names
argsToData = argsToDataRaw.map { case (k, v) =>
@ -165,11 +215,8 @@ object ArrowInliner extends Logging {
// Rename all exports-to-stream for streams that passed as arguments
private def renameStreams(
tree: RawTag.Tree,
args: ArgsCall
streamArgs: Map[String, VarModel]
): RawTag.Tree = {
// Stream arguments
val streamArgs = args.streamArgs
// collect arguments with stream type
// to exclude it from resolving and rename it with a higher-level stream that passed by argument
val streamsToRename = streamArgs.view.mapValues(_.name).toMap
@ -190,6 +237,106 @@ object ArrowInliner extends Logging {
.renameExports(streamsToRename)
}
case class AbilityResolvingResult(
namesToRename: Map[String, String],
renamedExports: Map[String, ValueModel],
renamedArrows: Map[String, FuncArrow]
)
/**
* Generate new names for all ability fields and arrows if necessary.
* Gather all fields and arrows from Arrows and Exports states
* @param name ability name in state
* @param vm ability variable
* @param t ability type
* @param oldExports previous Exports
* @param oldArrows previous Arrows
* @return names to rename, Exports and Arrows with all ability fields and arrows
*/
private def renameAndResolveAbilities[S: Mangler: Arrows: Exports](
name: String,
vm: VarModel,
t: AbilityType,
oldExports: Map[String, ValueModel],
oldArrows: Map[String, FuncArrow]
): State[S, AbilityResolvingResult] = {
for {
newName <- Mangler[S].findNewName(name)
newFieldsName = t.fields.mapBoth { case (n, t) =>
s"$name.$n" -> s"$newName.$n"
}
allNewNames = newFieldsName.add((name, newName)).toSortedMap
} yield {
val (allVars, allArrows) =
getVarsAndArrowsFromAbilities(vm.name, Option(newName), t, oldExports, oldArrows)
AbilityResolvingResult(allNewNames, allVars, allArrows)
}
}
/**
* Gather all arrows and variables from abilities recursively (because of possible nested abilities).
* Rename top names if needed in gathered fields and arrows.
* `top` name is a first name, i.e.: `topName.fieldName`.
* Only top name must be renamed to keep all field names unique.
* @param topOldName old name to find all fields in states
* @param topNewName new name to rename all fields in states
* @param abilityType type of current ability
* @param oldExports where to get values
* @param oldArrows where to get arrows
* @param valAcc accumulator for values
* @param arrowsAcc accumulator for arrows
* @return
*/
private def getVarsAndArrowsFromAbilities(
topOldName: String,
topNewName: Option[String],
abilityType: AbilityType,
oldExports: Map[String, ValueModel],
oldArrows: Map[String, FuncArrow],
valAcc: Map[String, ValueModel] = Map.empty,
arrowsAcc: Map[String, FuncArrow] = Map.empty
): (Map[String, ValueModel], Map[String, FuncArrow]) = {
abilityType.fields.toSortedMap.toList.map { case (fName, fValue) =>
val currentOldName = s"$topOldName.$fName"
// for all nested fields, arrows and abilities only left side must be renamed
val currentNewName = topNewName.map(_ + s".$fName")
fValue match {
case nestedAbilityType @ AbilityType(_, _) =>
getVarsAndArrowsFromAbilities(
currentOldName,
currentNewName,
nestedAbilityType,
oldExports,
oldArrows,
valAcc,
arrowsAcc
)
case ArrowType(_, _) =>
oldExports
.get(currentOldName)
.flatMap {
case vm @ VarModel(name, _, _) =>
oldArrows
.get(name)
.map(fa =>
(
valAcc.updated(currentNewName.getOrElse(currentOldName), vm),
arrowsAcc.updated(name, fa)
)
)
case _ => None
}
.getOrElse((valAcc, arrowsAcc))
case _ =>
oldExports
.get(currentOldName)
.map(vm => (valAcc.updated(currentNewName.getOrElse(currentOldName), vm), arrowsAcc))
.getOrElse((valAcc, arrowsAcc))
}
}.foldMapA(_.bimap(_.toList, _.toList)).bimap(_.toMap, _.toMap)
}
/**
* Prepare the state context for this function call
*
@ -204,19 +351,36 @@ object ArrowInliner extends Logging {
*/
private def prelude[S: Mangler: Arrows: Exports](
fn: FuncArrow,
call: CallModel
call: CallModel,
oldExports: Map[String, ValueModel]
): State[S, (RawTag.Tree, List[ValueRaw])] =
for {
// Collect all arguments: what names are used inside the function, what values are received
args <- State.pure(ArgsCall(fn.arrowType.domain, call.args))
abArgs = args.abilityArgs
// Going to resolve arrows: collect them all. Names should never collide: it's semantically checked
previousArrowsState <- Arrows[S].arrows
_ <- Arrows[S].purge
abilityResolvingResult <- abArgs.toList.traverse { case (str, (vm, sct)) =>
renameAndResolveAbilities(str, vm, sct, oldExports, previousArrowsState)
}
absRenames = abilityResolvingResult.map(_.namesToRename).fold(Map.empty)(_ ++ _)
absVars = abilityResolvingResult.map(_.renamedExports).fold(Map.empty)(_ ++ _)
absArrows = abilityResolvingResult.map(_.renamedArrows).fold(Map.empty)(_ ++ _)
arrowArgs = args.arrowArgs(previousArrowsState)
// Update states and rename tags
renamedArrows <- updateArrowsAndRenameArrowArgs(args, fn)
argsToDataShouldRename <- updateExportsAndRenameDataArgs(args)
allShouldRename = argsToDataShouldRename ++ renamedArrows
renamedArrows <- updateArrowsAndRenameArrowArgs(arrowArgs ++ absArrows, fn, absRenames)
argsToDataShouldRename <- updateExportsAndRenameDataArgs(args.dataArgs ++ absVars, absRenames)
allShouldRename = argsToDataShouldRename ++ renamedArrows ++ absRenames
// Rename all renamed arguments in the body
treeRenamed = fn.body.rename(allShouldRename)
treeStreamsRenamed = renameStreams(treeRenamed, args)
treeStreamsRenamed = renameStreams(treeRenamed, args.streamArgs)
// Function body on its own defines some values; collect their names
// except stream arguments. They should be already renamed
@ -241,26 +405,49 @@ object ArrowInliner extends Logging {
// Result could be renamed; take care about that
} yield (tree, fn.ret.map(_.renameVars(shouldRename)))
private def getAllArrowsFromAbility[S: Exports: Arrows: Mangler](
name: String,
sc: AbilityType
): State[S, Map[String, FuncArrow]] = {
for {
exports <- Exports[S].exports
arrows <- Arrows[S].arrows
} yield {
sc.fields.toSortedMap.toList.flatMap {
case (n, ArrowType(_, _)) =>
val fullName = s"$name.$n"
exports.get(fullName).flatMap {
case VarModel(n, _, _) => arrows.get(n).map(n -> _)
case _ => None
}
case _ => None
}.toMap
}
}
private[inline] def callArrowRet[S: Exports: Arrows: Mangler](
arrow: FuncArrow,
call: CallModel
): State[S, (OpModel.Tree, List[ValueModel])] =
for {
passArrows <- Arrows[S].pickArrows(call.arrowArgNames)
arrowsFromAbilities <- call.abilityArgs
.traverse(getAllArrowsFromAbility)
.map(_.fold(Map.empty)(_ ++ _))
av <- Arrows[S].scope(
inlineResult <- Arrows[S].scope(
for {
_ <- Arrows[S].resolved(passArrows)
av <- ArrowInliner.inline(arrow, call)
// find and get resolved arrows if we return them from the function
returnedArrows = av._2.collect { case VarModel(name, ArrowType(_, _), _) =>
name
}
arrowsToSave <- Arrows[S].pickArrows(returnedArrows.toSet)
} yield av -> arrowsToSave
_ <- Arrows[S].resolved(passArrows ++ arrowsFromAbilities)
inlineResult <- ArrowInliner.inline(arrow, call)
} yield inlineResult
)
((appliedOp, values), arrowsToSave) = av
_ <- Arrows[S].resolved(arrowsToSave)
_ <- Exports[S].resolved(call.exportTo.map(_.name).zip(values).toMap)
} yield appliedOp -> values
_ <- Arrows[S].resolved(inlineResult.arrowsToSave)
_ <- Exports[S].resolved(
call.exportTo
.map(_.name)
.zip(inlineResult.returnedValues)
.toMap ++ inlineResult.exportsToSave
)
} yield inlineResult.tree -> inlineResult.returnedValues
}

View File

@ -1,14 +1,6 @@
package aqua.model.inline
import aqua.model.{
CallModel,
CallServiceModel,
LiteralModel,
OpModel,
SeqModel,
ValueModel,
VarModel
}
import aqua.model.{CallModel, CallServiceModel, LiteralModel, OpModel, SeqModel, ValueModel, VarModel}
import aqua.model.inline.raw.RawInliner
import cats.data.Chain
import aqua.model.inline.state.{Arrows, Exports, Mangler}

View File

@ -2,16 +2,10 @@ package aqua.model.inline
import aqua.model.inline.state.{Arrows, Counter, Exports, Mangler}
import aqua.model.*
import aqua.model.inline.raw.{
ApplyFunctorRawInliner,
ApplyGateRawInliner,
ApplyPropertiesRawInliner,
CallArrowRawInliner,
CollectionRawInliner
}
import aqua.model.inline.raw.{ApplyFunctorRawInliner, ApplyGateRawInliner, ApplyPropertiesRawInliner, CallArrowRawInliner, CollectionRawInliner, MakeAbilityRawInliner}
import aqua.raw.ops.*
import aqua.raw.value.*
import aqua.types.{ArrayType, OptionType, StreamType}
import aqua.types.{ArrayType, LiteralType, OptionType, StreamType}
import cats.syntax.traverse.*
import cats.syntax.monoid.*
import cats.syntax.functor.*
@ -23,7 +17,7 @@ import scribe.Logging
object RawValueInliner extends Logging {
import Inline.*
import aqua.model.inline.Inline.*
private[inline] def unfold[S: Mangler: Exports: Arrows](
raw: ValueRaw,
@ -48,6 +42,9 @@ object RawValueInliner extends Logging {
case dr: MakeStructRaw =>
MakeStructRawInliner(dr, propertiesAllowed)
case sr: AbilityRaw =>
MakeAbilityRawInliner(sr, propertiesAllowed)
case cr: CallArrowRaw =>
CallArrowRawInliner(cr, propertiesAllowed)
}

View File

@ -1,13 +1,12 @@
package aqua.model.inline
import aqua.model.inline.state.{Arrows, Counter, Exports, Mangler}
import aqua.model.inline.state.{Arrows, Exports, Mangler}
import aqua.model.*
import aqua.model.inline.RawValueInliner.collectionToModel
import aqua.model.inline.raw.{CallArrowRawInliner, CollectionRawInliner}
import aqua.raw.arrow.FuncRaw
import aqua.model.inline.raw.CallArrowRawInliner
import aqua.raw.ops.*
import aqua.raw.value.*
import aqua.types.{ArrayType, ArrowType, BoxType, CanonStreamType, StreamType}
import aqua.types.{BoxType, CanonStreamType, StreamType}
import cats.syntax.traverse.*
import cats.syntax.applicative.*
import cats.syntax.flatMap.*
@ -34,7 +33,7 @@ object TagInliner extends Logging {
import RawValueInliner.{callToModel, valueListToModel, valueToModel}
import Inline.parDesugarPrefix
import aqua.model.inline.Inline.parDesugarPrefix
/**
* Result of [[RawTag]] inlining

View File

@ -1,14 +1,6 @@
package aqua.model.inline.raw
import aqua.model.{
CallModel,
CallServiceModel,
LiteralModel,
OpModel,
SeqModel,
ValueModel,
VarModel
}
import aqua.model.{CallModel, CallServiceModel, LiteralModel, OpModel, SeqModel, ValueModel, VarModel}
import aqua.model.inline.{Inline, SeqMode, TagInliner}
import aqua.model.inline.MakeStructRawInliner.createObj
import aqua.model.inline.RawValueInliner.unfold

View File

@ -28,6 +28,7 @@ import aqua.raw.value.{
ApplyPropertyRaw,
CallArrowRaw,
FunctorRaw,
IntoArrowRaw,
IntoCopyRaw,
IntoFieldRaw,
IntoIndexRaw,
@ -36,7 +37,17 @@ import aqua.raw.value.{
ValueRaw,
VarRaw
}
import aqua.types.{ArrayType, CanonStreamType, ScalarType, StreamType, Type}
import aqua.types.{
AbilityType,
ArrayType,
ArrowType,
BottomType,
CanonStreamType,
NilType,
ScalarType,
StreamType,
Type
}
import cats.Eval
import cats.data.{Chain, IndexedStateT, State}
import cats.syntax.monoid.*
@ -83,6 +94,59 @@ object ApplyPropertiesRawInliner extends RawInliner[ApplyPropertyRaw] with Loggi
}
}
private def unfoldAbilityProperty[S: Mangler: Exports: Arrows](
varModel: VarModel,
scopeType: AbilityType,
p: PropertyRaw
): State[S, (VarModel, Inline)] = {
p match {
case IntoArrowRaw(arrowName, t, arguments) =>
val arrowType = scopeType.fields
.lookup(arrowName)
.collect { case at @ ArrowType(_, _) =>
at
}
.getOrElse {
logger.error(s"Inlining, cannot find arrow $arrowName in ability $varModel")
ArrowType(NilType, NilType)
}
for {
callArrow <- CallArrowRawInliner(
CallArrowRaw(None, s"${varModel.name}.$arrowName", arguments, arrowType, None)
)
result <- callArrow match {
case (vm: VarModel, inl) =>
State.pure((vm, inl))
case (lm: LiteralModel, inl) =>
flatLiteralWithProperties(lm, inl, Chain.empty).flatMap { case (vm, inline) =>
Exports[S].resolved(vm.name, vm).map(_ => (vm, inline))
}
}
} yield {
result
}
case IntoFieldRaw(fieldName, t) =>
for {
exports <- Exports[S].exports
fullName = s"${varModel.name}.$fieldName"
result <- exports.get(fullName) match {
case Some(vm: VarModel) =>
State.pure((vm, Inline.empty))
case Some(lm: LiteralModel) =>
flatLiteralWithProperties(lm, Inline.empty, Chain.empty)
case _ =>
logger.error(
s"Inlining, cannot find field $fullName in ability $varModel. Available: ${exports.keySet}"
)
flatLiteralWithProperties(LiteralModel.quote(""), Inline.empty, Chain.empty)
}
} yield {
result
}
}
}
private[inline] def unfoldProperty[S: Mangler: Exports: Arrows](
varModel: VarModel,
p: PropertyRaw
@ -184,30 +248,42 @@ object ApplyPropertiesRawInliner extends RawInliner[ApplyPropertyRaw] with Loggi
.foldLeft[State[S, (VarModel, Inline)]](
State.pure((vm, prevInline.mergeWith(optimizationInline, SeqMode)))
) { case (state, property) =>
state.flatMap { case (vm, leftInline) =>
property match {
case PropertyRawWithModel(_, Some(model)) =>
State.pure(vm.copy(properties = vm.properties :+ model) -> leftInline)
case PropertyRawWithModel(raw, _) =>
unfoldProperty(vm, raw).flatMap {
case (v, i) if !propertiesAllowed && v.properties.nonEmpty =>
removeProperties(v).map { case (vf, inlf) =>
vf -> Inline(
leftInline.flattenValues ++ i.flattenValues ++ inlf.flattenValues,
leftInline.predo ++ i.predo ++ inlf.predo,
mergeMode = SeqMode
state.flatMap {
case (vm @ VarModel(name, st @ AbilityType(_, _), _), leftInline) =>
unfoldAbilityProperty(vm, st, property.raw).map { case (vm, inl) =>
(
vm,
Inline(
leftInline.flattenValues ++ inl.flattenValues,
leftInline.predo ++ inl.predo,
mergeMode = SeqMode
)
)
}
case (vm, leftInline) =>
property match {
case PropertyRawWithModel(_, Some(model)) =>
State.pure(vm.copy(properties = vm.properties :+ model) -> leftInline)
case PropertyRawWithModel(raw, _) =>
unfoldProperty(vm, raw).flatMap {
case (v, i) if !propertiesAllowed && v.properties.nonEmpty =>
removeProperties(v).map { case (vf, inlf) =>
vf -> Inline(
leftInline.flattenValues ++ i.flattenValues ++ inlf.flattenValues,
leftInline.predo ++ i.predo ++ inlf.predo,
mergeMode = SeqMode
)
}
case (v, i) =>
State.pure(
v -> Inline(
leftInline.flattenValues ++ i.flattenValues,
leftInline.predo ++ i.predo,
mergeMode = SeqMode
)
)
}
case (v, i) =>
State.pure(
v -> Inline(
leftInline.flattenValues ++ i.flattenValues,
leftInline.predo ++ i.predo,
mergeMode = SeqMode
)
)
}
}
}
}
}
}
}
@ -227,7 +303,7 @@ object ApplyPropertiesRawInliner extends RawInliner[ApplyPropertyRaw] with Loggi
case (gateResVal: VarModel, gateResInline) =>
unfoldProperties(gateResInline, gateResVal, properties, propertiesAllowed).map {
case (v, i) =>
(v: ValueModel) -> Inline(
v -> Inline(
inl.flattenValues ++ i.flattenValues,
inl.predo ++ i.predo,
mergeMode = SeqMode
@ -247,7 +323,7 @@ object ApplyPropertiesRawInliner extends RawInliner[ApplyPropertyRaw] with Loggi
unfold(raw).flatMap {
case (vm: VarModel, prevInline) =>
unfoldProperties(prevInline, vm, properties, propertiesAllowed).map { case (v, i) =>
(v: ValueModel) -> i
v -> i
}
case (l: LiteralModel, inline) =>
flatLiteralWithProperties(
@ -257,7 +333,7 @@ object ApplyPropertiesRawInliner extends RawInliner[ApplyPropertyRaw] with Loggi
).flatMap { (varModel, prevInline) =>
unfoldProperties(prevInline, varModel, properties, propertiesAllowed).map {
case (v, i) =>
(v: ValueModel) -> i
v -> i
}
}
}

View File

@ -92,7 +92,7 @@ object CallArrowRawInliner extends RawInliner[CallArrowRaw] with Logging {
result <- arrow.fold {
logger.error(
s"Inlining, cannot find arrow $funcName, available: ${arrows.keys
.mkString(", ")}"
.mkString(", ")} and vars: ${exports.keys.mkString(", ")}"
)
State.pure(Nil -> Inline.empty)

View File

@ -1,16 +1,6 @@
package aqua.model.inline.raw
import aqua.model.{
CallModel,
CanonicalizeModel,
NullModel,
PushToStreamModel,
RestrictionModel,
SeqModel,
ValueModel,
VarModel,
XorModel
}
import aqua.model.{CallModel, CanonicalizeModel, NullModel, PushToStreamModel, RestrictionModel, SeqModel, ValueModel, VarModel, XorModel}
import aqua.model.inline.Inline
import aqua.model.inline.RawValueInliner.valueToModel
import aqua.model.inline.state.{Arrows, Exports, Mangler}

View File

@ -0,0 +1,51 @@
package aqua.model.inline.raw
import aqua.model.{
CallModel,
CallServiceModel,
LiteralModel,
OpModel,
SeqModel,
ValueModel,
VarModel
}
import aqua.model.inline.raw.RawInliner
import cats.data.Chain
import aqua.model.inline.state.{Arrows, Exports, Mangler}
import aqua.raw.value.{AbilityRaw, LiteralRaw, MakeStructRaw}
import cats.data.{NonEmptyList, NonEmptyMap, State}
import aqua.model.inline.Inline
import aqua.model.inline.RawValueInliner.{unfold, valueToModel}
import aqua.types.{ArrowType, ScalarType}
import cats.syntax.traverse.*
import cats.syntax.monoid.*
import cats.syntax.functor.*
import cats.syntax.flatMap.*
import cats.syntax.apply.*
object MakeAbilityRawInliner extends RawInliner[AbilityRaw] {
override def apply[S: Mangler: Exports: Arrows](
raw: AbilityRaw,
propertiesAllowed: Boolean
): State[S, (ValueModel, Inline)] = {
for {
name <- Mangler[S].findAndForbidName(raw.abilityType.name + "_ab")
foldedFields <- raw.fieldsAndArrows.nonEmptyTraverse(unfold(_))
varModel = VarModel(name, raw.baseType)
valsInline = foldedFields.toSortedMap.values.map(_._2).fold(Inline.empty)(_ |+| _).desugar
_ <- foldedFields.map(_._1).toNel.toList.traverse { case (n, vm) =>
val namef = s"$name.$n"
Exports[S].resolved(namef, vm)
}
} yield {
(
varModel,
Inline(
valsInline.flattenValues,
Chain.one(SeqModel.wrap(valsInline.predo))
)
)
}
}
}

View File

@ -61,11 +61,12 @@ object Exports {
object Simple extends Exports[Map[String, ValueModel]] {
// Exports[Map[NonEmptyList[String], ValueModel]]
override def resolved(
exportName: String,
value: ValueModel
): State[Map[String, ValueModel], Unit] =
State.modify(_ + (exportName -> value))
): State[Map[String, ValueModel], Unit] = State.modify(_ + (exportName -> value))
override def resolved(exports: Map[String, ValueModel]): State[Map[String, ValueModel], Unit] =
State.modify(_ ++ exports)

View File

@ -19,6 +19,17 @@ case class IntoFieldRaw(name: String, `type`: Type) extends PropertyRaw {
override def varNames: Set[String] = Set.empty
}
case class IntoArrowRaw(name: String, arrowType: Type, arguments: List[ValueRaw]) extends PropertyRaw {
override def `type`: Type = arrowType
override def map(f: ValueRaw => ValueRaw): PropertyRaw = this
override def varNames: Set[String] = arguments.flatMap(_.varNames).toSet
override def renameVars(vals: Map[String, String]): PropertyRaw = copy(arguments = arguments.map(_.renameVars(vals)))
}
case class IntoCopyRaw(`type`: StructType, fields: NonEmptyMap[String, ValueRaw]) extends PropertyRaw {
override def map(f: ValueRaw => ValueRaw): IntoCopyRaw = copy(fields = fields.map(f))
@ -27,6 +38,14 @@ case class IntoCopyRaw(`type`: StructType, fields: NonEmptyMap[String, ValueRaw]
override def renameVars(vals: Map[String, String]): IntoCopyRaw = this
}
case class MethodRaw(name: String, `type`: Type) extends PropertyRaw {
override def map(f: ValueRaw => ValueRaw): MethodRaw = this
override def renameVars(vals: Map[String, String]): MethodRaw = this
override def varNames: Set[String] = Set.empty
}
case class FunctorRaw(name: String, `type`: Type) extends PropertyRaw {
override def map(f: ValueRaw => ValueRaw): FunctorRaw = this

View File

@ -162,6 +162,20 @@ case class MakeStructRaw(fields: NonEmptyMap[String, ValueRaw], structType: Stru
copy(fields = fields.map(_.renameVars(map)))
}
case class AbilityRaw(fieldsAndArrows: NonEmptyMap[String, ValueRaw], abilityType: AbilityType) extends ValueRaw {
override def baseType: Type = abilityType
override def map(f: ValueRaw => ValueRaw): ValueRaw = f(copy(fieldsAndArrows = fieldsAndArrows.map(f)))
override def varNames: Set[String] = {
fieldsAndArrows.toSortedMap.values.flatMap(_.varNames).toSet
}
override def renameVars(map: Map[String, String]): ValueRaw =
copy(fieldsAndArrows = fieldsAndArrows.map(_.renameVars(map)))
}
case class CallArrowRaw(
// TODO: ability should hold a type, not name
ability: Option[String],

View File

@ -16,13 +16,18 @@ import aqua.types.*
case class ArgsCall(args: ProductType, callWith: List[ValueModel]) {
// Both arguments (arg names and types how they seen from the function body)
// and values (value models and types how they seen on the call site)
lazy val zipped: List[((String, Type), ValueModel)] = args.toLabelledList() zip callWith
private lazy val zipped: List[((String, Type), ValueModel)] = args.toLabelledList() zip callWith
lazy val dataArgs: Map[String, ValueModel] =
zipped.collect { case ((name, _: DataType), value) =>
name -> value
}.toMap
lazy val abilityArgs: Map[String, (VarModel, AbilityType)] =
zipped.collect { case (k, vr@VarModel(_, t@AbilityType(_, _), _)) =>
k._1 -> (vr, t)
}.toMap
lazy val streamArgs: Map[String, VarModel] =
dataArgs.collect { case (k, vr @ VarModel(n, StreamType(_), _)) =>
(k, vr)

View File

@ -1,7 +1,7 @@
package aqua.model
import aqua.raw.ops.Call
import aqua.types.{ArrowType, Type}
import aqua.types.{ArrowType, AbilityType, Type}
// TODO docs
case class CallModel(args: List[ValueModel], exportTo: List[CallModel.Export]) {
@ -11,6 +11,10 @@ case class CallModel(args: List[ValueModel], exportTo: List[CallModel.Export]) {
m
}.toSet
def abilityArgs: List[(String, AbilityType)] = args.collect { case VarModel(m, t: AbilityType, _) =>
(m, t)
}
def usesVarNames: Set[String] = args.flatMap(_.usesVarNames).toSet
}

View File

@ -0,0 +1,22 @@
package aqua.parser.expr
import aqua.parser.Expr
import aqua.parser.lexer.NamedTypeToken
import aqua.parser.lexer.Token.*
import aqua.parser.lift.Span
import cats.parse.Parser
import cats.{Comonad, ~>}
case class AbilityExpr[F[_]](name: NamedTypeToken[F]) extends Expr[F](AbilityExpr, name) {
override def mapK[K[_]: Comonad](fk: F ~> K): AbilityExpr[K] =
copy(name.mapK(fk))
}
object AbilityExpr extends Expr.AndIndented {
override def validChildren: List[Expr.Lexem] = FieldTypeExpr :: ArrowTypeExpr :: Nil
override val p: Parser[AbilityExpr[Span.S]] =
(`ability` *> ` ` *> NamedTypeToken.ct).map(AbilityExpr(_))
}

View File

@ -20,12 +20,7 @@ object ArrowTypeExpr extends Expr.Leaf {
override val p: Parser[ArrowTypeExpr[Span.S]] =
(Name.p ~ ((` : ` *> ArrowTypeToken.`arrowdef`(
DataTypeToken.`datatypedef`
)) | ArrowTypeToken.`arrowWithNames`(DataTypeToken.`datatypedef`))).flatMap { case (name, t) =>
// services cannot return multiple results
if (t.res.length > 1) {
Parser.failWith("Service functions cannot have multiple results")
} else {
Parser.pure(ArrowTypeExpr(name, t))
}
)) | ArrowTypeToken.`arrowWithNames`(DataTypeToken.`datatypedef`))).map { case (name, t) =>
ArrowTypeExpr(name, t)
}
}

View File

@ -22,7 +22,7 @@ object RootExpr extends Expr.Companion {
import Span.*
def validChildren: List[Expr.Lexem] =
ServiceExpr :: AliasExpr :: DataStructExpr :: ConstantExpr :: FuncExpr :: Nil
ServiceExpr :: AliasExpr :: DataStructExpr :: AbilityExpr :: ConstantExpr :: FuncExpr :: Nil
private def gatherResults[F[_]: LiftParser: Comonad](results: NonEmptyList[ValidatedNec[ParserError[F], Tree[F]]]): (Chain[ParserError[F]], Chain[Tree[F]]) = {
results.foldLeft[(Chain[ParserError[F]], Chain[Tree[F]])](Chain.empty -> Chain.empty) {

View File

@ -1,25 +0,0 @@
package aqua.parser.expr
import aqua.parser.Expr
import aqua.parser.lexer.Token.*
import aqua.parser.lexer.{Ability, Name, ValueToken}
import aqua.parser.lift.LiftParser
import cats.Comonad
import cats.parse.Parser
import cats.~>
import aqua.parser.lift.Span
import aqua.parser.lift.Span.{P0ToSpan, PToSpan}
case class ScopeExpr[F[_]](name: Ability[F]) extends Expr[F](ScopeExpr, name) {
override def mapK[K[_]: Comonad](fk: F ~> K): ScopeExpr[K] =
copy(name.mapK(fk))
}
object ScopeExpr extends Expr.AndIndented {
override def validChildren: List[Expr.Lexem] = FieldTypeExpr :: ArrowTypeExpr :: Nil
override val p: Parser[ScopeExpr[Span.S]] =
(`scope` *> ` ` *> Ability.ab).map(ScopeExpr(_))
}

View File

@ -20,7 +20,7 @@ case class AssignmentExpr[F[_]](
object AssignmentExpr extends Expr.Leaf {
override val p: P[AssignmentExpr[Span.S]] =
((Name.p <* ` = `).with1 ~ ValueToken.`value`).flatMap { case (variable, value) =>
(((Name.cl | Name.p) <* ` = `).with1 ~ ValueToken.`value`).flatMap { case (variable, value) =>
value match {
case CollectionToken(_, values) =>
if (values.isEmpty)

View File

@ -30,6 +30,9 @@ object Name {
val p: P[Name[Span.S]] =
`name`.lift.map(Name(_))
val cl: P[Name[Span.S]] =
`Class`.lift.map(Name(_))
val upper: P[Name[Span.S]] =
NAME.lift.map(Name(_))

View File

@ -19,6 +19,14 @@ sealed trait PropertyOp[F[_]] extends Token[F] {
def mapK[K[_]: Comonad](fk: F ~> K): PropertyOp[K]
}
case class IntoArrow[F[_]: Comonad](name: Name[F], arguments: List[ValueToken[F]]) extends PropertyOp[F] {
override def as[T](v: T): F[T] = name.as(v)
override def mapK[K[_]: Comonad](fk: F ~> K): PropertyOp[K] = copy(name.mapK(fk), arguments.map(_.mapK(fk)))
override def toString: String = s".$name(${arguments.map(_.toString).mkString(", ")})"
}
case class IntoField[F[_]: Comonad](name: F[String]) extends PropertyOp[F] {
override def as[T](v: T): F[T] = name.as(v)
@ -49,6 +57,9 @@ object PropertyOp {
private val parseField: P[PropertyOp[Span.S]] =
(`.` *> `name`).lift.map(IntoField(_))
val parseArrow: P[PropertyOp[Span.S]] =
(`.` *> CallArrowToken.callBraces()).lift.map(p => IntoArrow(p._2._1, p._2._2 ++ p._2._3))
val parseCopy: P[PropertyOp[Span.S]] =
(`.` *> (`copy`.lift ~ namedArgs)).map { case (point, fields) =>
IntoCopy(point, NonEmptyMap.of(fields.head, fields.tail: _*))
@ -68,7 +79,7 @@ object PropertyOp {
}
private val parseOp: P[PropertyOp[Span.S]] =
P.oneOf(parseCopy.backtrack :: parseField.backtrack :: parseIdx :: Nil)
P.oneOf(parseCopy.backtrack :: parseArrow.backtrack :: parseField :: parseIdx :: Nil)
val ops: P[NonEmptyList[PropertyOp[Span.S]]] =
parseOp.rep

View File

@ -49,7 +49,7 @@ object Token {
val ` as ` : P[Unit] = `as`.surroundedBy(` `)
val `alias`: P[Unit] = P.string("alias")
val `service`: P[Unit] = P.string("service")
val `scope`: P[Unit] = P.string("scope")
val `ability`: P[Unit] = P.string("ability")
val `func`: P[Unit] = P.string("func")
val `on`: P[Unit] = P.string("on")
val `via`: P[Unit] = P.string("via")
@ -97,6 +97,8 @@ object Token {
val `∅` : P[Unit] = P.char('∅')
val `(` : P[Unit] = P.char('(') <* ` `.?
val `)` : P[Unit] = ` `.?.with1 *> P.char(')')
val `{` : P[Unit] = P.char('{') <* ` `.?
val `}` : P[Unit] = ` `.?.with1 *> P.char('}')
val `()` : P[Unit] = P.string("()")
val ` -> ` : P[Unit] = P.string("->").surroundedBy(` `.?)
val ` <- ` : P[Unit] = P.string("<-").surroundedBy(` `.?)

View File

@ -5,12 +5,13 @@ import aqua.parser.lift.LiftParser
import aqua.parser.lift.LiftParser.*
import aqua.types.ScalarType
import cats.Comonad
import cats.parse.{Accumulator0, Parser as P}
import cats.parse.{Accumulator0, Parser as P, Parser0 as P0}
import cats.syntax.comonad.*
import cats.syntax.functor.*
import cats.~>
import aqua.parser.lift.Span
import aqua.parser.lift.Span.{P0ToSpan, PToSpan, S}
import cats.data.NonEmptyList
sealed trait TypeToken[S[_]] extends Token[S] {
def mapK[K[_]: Comonad](fk: S ~> K): TypeToken[K]
@ -117,26 +118,37 @@ case class ArrowTypeToken[S[_]: Comonad](
object ArrowTypeToken {
def typeDef(): P[TypeToken[S]] = P.defer(TypeToken.`typedef`.between(`(`, `)`).backtrack | TypeToken.`typedef`)
def typeDef(): P[TypeToken[S]] =
P.defer(TypeToken.`typedef`.between(`(`, `)`).backtrack | TypeToken.`typedef`)
def returnDef(): P[List[TypeToken[S]]] = comma(
typeDef().backtrack
).map(_.toList)
// {SomeAb, SecondAb} for NamedTypeToken
def abilities(): P0[List[(Option[Name[S]], NamedTypeToken[S])]] =
(`{` *> comma(`Class`.surroundedBy(`/s*`).lift.map(s => Option(Name(s)) -> NamedTypeToken(s)))
.map(_.toList) <* `}`).?.map(_.getOrElse(List.empty))
def `arrowdef`(argTypeP: P[TypeToken[Span.S]]): P[ArrowTypeToken[Span.S]] =
(comma0(argTypeP).with1 ~ ` -> `.lift ~
((abilities() ~ comma0(argTypeP)).with1 ~ ` -> `.lift ~
(returnDef().backtrack
| `()`.as(Nil))).map { case ((args, point), res)
ArrowTypeToken(point, args.map(Option.empty[Name[Span.S]] -> _), res)
| `()`.as(Nil))).map { case (((abs, argsList), point), res)
val args = argsList.map(Option.empty[Name[Span.S]] -> _)
ArrowTypeToken(
point,
abs ++ args,
res
)
}
def `arrowWithNames`(argTypeP: P[TypeToken[Span.S]]): P[ArrowTypeToken[Span.S]] =
(((` `.?.with1 *> `(`.lift <* `/s*`) ~ comma0(
(((` `.?.with1 *> abilities().with1 ~ `(`.lift <* `/s*`) ~ comma0(
(Name.p.map(Option(_)) ~ (` : ` *> (argTypeP | argTypeP.between(`(`, `)`))))
.surroundedBy(`/s*`)
) <* (`/s*` *> `)` <* ` `.?)) ~
(` -> ` *> returnDef()).?).map { case ((point, args), res) =>
ArrowTypeToken(point, args, res.toList.flatMap(_.toList))
(` -> ` *> returnDef()).?).map { case (((abilities, point), args), res) =>
ArrowTypeToken(point, abilities ++ args, res.toList.flatMap(_.toList))
}
}
@ -172,7 +184,7 @@ object TypeToken {
val `typedef`: P[TypeToken[Span.S]] =
P.oneOf(
ArrowTypeToken
.`arrowdef`((DataTypeToken.`datatypedef`))
.`arrowdef`(DataTypeToken.`datatypedef`)
.backtrack :: DataTypeToken.`datatypedef` :: Nil
)

View File

@ -85,38 +85,54 @@ case class CallArrowToken[F[_]: Comonad](
object CallArrowToken {
case class CallBraces(name: Name[S], abilities: List[ValueToken[S]], args: List[ValueToken[S]])
// {SomeAb, SecondAb} for ValueToken
def abilities(): P[NonEmptyList[ValueToken[S]]] =
`{` *> comma(ValueToken.`value`.surroundedBy(`/s*`)) <* `}`
def callBraces(): P[CallBraces] = P
.defer(
Name.p
~ abilities().? ~ comma0(ValueToken.`value`.surroundedBy(`/s*`))
.between(` `.?.with1 *> `(` <* `/s*`, `/s*` *> `)`)
).map { case ((n, ab), args) =>
CallBraces(n, ab.map(_.toList).getOrElse(Nil), args)
}
.withContext(
"Missing braces '()' after the function call"
)
val callArrow: P[CallArrowToken[Span.S]] =
((NamedTypeToken.dotted <* `.`).?.with1 ~
(Name.p
~ comma0(ValueToken.`value`.surroundedBy(`/s*`))
.between(` `.?.with1 *> `(` <* `/s*`, `/s*` *> `)`))
callBraces()
.withContext(
"Missing braces '()' after the function call"
)).map { case (ab, (fn, args)) =>
CallArrowToken(ab, fn, args)
)).map { case (ab, callBraces) =>
CallArrowToken(ab, callBraces.name, callBraces.abilities ++ callBraces.args)
}
}
case class StructValueToken[F[_]: Comonad](
case class NamedValueToken[F[_]: Comonad](
typeName: NamedTypeToken[F],
fields: NonEmptyMap[String, ValueToken[F]]
) extends ValueToken[F] {
override def mapK[K[_]: Comonad](fk: F ~> K): StructValueToken[K] =
override def mapK[K[_]: Comonad](fk: F ~> K): NamedValueToken[K] =
copy(typeName.mapK(fk), fields.map(_.mapK(fk)))
override def as[T](v: T): F[T] = typeName.as(v)
}
object StructValueToken {
object NamedValueToken {
val dataValue: P[StructValueToken[Span.S]] =
val dataValue: P[NamedValueToken[Span.S]] =
(`Class`.lift ~ namedArgs)
.withContext(
"Missing braces '()' after the struct type"
)
.map { case (dn, args) =>
StructValueToken(NamedTypeToken(dn), NonEmptyMap.of(args.head, args.tail: _*))
NamedValueToken(NamedTypeToken(dn), NonEmptyMap.of(args.head, args.tail: _*))
}
}
@ -191,15 +207,16 @@ object InfixToken {
basic.between(`(`, `)`).backtrack
// One element of math expression
private val atom: P[ValueToken[S]] = P.oneOf(
val atom: P[ValueToken[S]] = P.oneOf(
literal.backtrack ::
initPeerId.backtrack ::
P.defer(
CollectionToken.collection
) ::
P.defer(StructValueToken.dataValue).backtrack ::
P.defer(NamedValueToken.dataValue).backtrack ::
P.defer(CallArrowToken.callArrow).backtrack ::
P.defer(brackets(InfixToken.mathExpr)) ::
P.defer(abProperty).backtrack ::
P.defer(brackets(InfixToken.mathExpr)).backtrack ::
varProperty ::
Nil
)
@ -313,6 +330,11 @@ object ValueToken {
VarToken(n, l.fold[List[PropertyOp[Span.S]]](Nil)(_.toList))
}
val abProperty: P[VarToken[Span.S]] =
(Name.cl ~ PropertyOp.ops.?).map { case (n, l)
VarToken(n, l.fold[List[PropertyOp[Span.S]]](Nil)(_.toList))
}
val bool: P[LiteralToken[Span.S]] =
P.oneOf(
("true" :: "false" :: Nil)

View File

@ -122,8 +122,14 @@ trait AquaSpec extends EitherValues {
def parseAssign(str: String): AssignmentExpr[Id] =
AssignmentExpr.p.parseAll(str).value.mapK(spanToId)
def parseData(str: String): StructValueToken[Id] =
StructValueToken.dataValue.parseAll(str).value.mapK(spanToId)
def parseVar(str: String): VarToken[Id] =
ValueToken.varProperty.parseAll(str).value.mapK(spanToId)
def parseData(str: String): NamedValueToken[Id] =
NamedValueToken.dataValue.parseAll(str).value.mapK(spanToId)
def parseIntoArrow(str: String): PropertyOp[Id] =
PropertyOp.parseArrow.parseAll(str).value.mapK(spanToId)
def parsePush(str: String): PushToStreamExpr[Id] =
PushToStreamExpr.p.parseAll(str).value.mapK(spanToId)

View File

@ -0,0 +1,43 @@
package aqua.parser
import aqua.AquaSpec
import aqua.AquaSpec.{toNumber, toStr, toVar}
import aqua.parser.expr.ConstantExpr
import aqua.parser.expr.func.AssignmentExpr
import aqua.parser.lexer.CollectionToken.Mode.ArrayMode
import aqua.parser.lexer.*
import aqua.types.LiteralType
import cats.Id
import cats.data.{NonEmptyList, NonEmptyMap}
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
class AbilityValueExprSpec extends AnyFlatSpec with Matchers with AquaSpec {
import AquaSpec.*
private def parseAndCheckAbility(str: String) = {
val one = LiteralToken[Id]("1", LiteralType.number)
parseData(
str
) should be(
NamedValueToken(
NamedTypeToken[Id]("AbilityA"),
NonEmptyMap.of(
"v1" -> one,
"f1" -> VarToken(Name[Id]("input"), IntoField[Id]("arrow") :: Nil)
)
)
)
}
"one line struct value" should "be parsed" in {
parseAndCheckAbility("""AbilityA(v1 = 1, f1 = input.arrow)""")
}
"multiline line struct value" should "be parsed" in {
parseAndCheckAbility(
"""AbilityA(v1 = 1, f1 = input.arrow)""".stripMargin)
}
}

View File

@ -28,16 +28,18 @@ class ArrowTypeExprSpec extends AnyFlatSpec with Matchers with AquaSpec {
)
)
parseArrow("onIn{SomeAb}(a: Custom, b: Custom2)") should be(
ArrowTypeExpr[Id](
"onIn",
toNamedArrow(List("SomeAb" -> toNamedType("SomeAb"), "a" -> toNamedType("Custom"), "b" -> toNamedType("Custom2")), Nil)
)
)
parseArrow("onIn: Custom, string, u32, Custom3 -> Custom2") should be(
ArrowTypeExpr[Id](
"onIn",
toArrowType(List("Custom", string, u32, "Custom3"), Some("Custom2"))
)
)
ArrowTypeExpr.p
.parseAll(
"onIn: Custom, string, u32, Custom3 -> Custom2, string"
)
.isLeft shouldBe (true)
}
}

View File

@ -12,7 +12,7 @@ class CallArrowSpec extends AnyFlatSpec with Matchers with AquaSpec {
"func calls" should "parse func()" in {
parseExpr("func()") should be(
CallArrowExpr[Id](Nil, CallArrowToken(None, toName("func"), List()))
CallArrowExpr[Id](Nil, CallArrowToken(None, toName("func"), Nil))
)
parseExpr("Ab.func(arg)") should be(
CallArrowExpr[Id](

View File

@ -266,13 +266,13 @@ class FuncExprSpec extends AnyFlatSpec with Matchers with Inside with Inspectors
qTree.d() shouldBe ArrowExpr(toNamedArrow(("val" -> string) :: Nil, boolSc :: Nil))
qTree.d() shouldBe CallArrowExpr(
List("one"),
CallArrowToken(Some(toNamedType("Local")), "gt", List())
CallArrowToken(Some(toNamedType("Local")), "gt", Nil)
)
qTree.d() shouldBe OnExpr(toStr("smth"), List(toStr("else")))
qTree.d() shouldBe CallArrowExpr(List("two"), CallArrowToken(None, "tryGen", List()))
qTree.d() shouldBe CallArrowExpr(List("two"), CallArrowToken(None, "tryGen", Nil))
qTree.d() shouldBe CallArrowExpr(
List("three"),
CallArrowToken(Some(toNamedType("Local")), "gt", List())
CallArrowToken(Some(toNamedType("Local")), "gt", Nil)
)
qTree.d() shouldBe ReturnExpr(NonEmptyList.one(toVar("two")))
}

View File

@ -0,0 +1,33 @@
package aqua.parser
import aqua.AquaSpec
import aqua.parser.lexer.{IntoArrow, PropertyOp, VarToken}
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import cats.Id
class IntoArrowSpec extends AnyFlatSpec with Matchers with AquaSpec {
import AquaSpec.*
"into arrow" should "be parsed" in {
val arrowStr = ".arrow(\"\")"
val result = parseIntoArrow(arrowStr)
result should be(IntoArrow[Id](toName("arrow"), toStr("") :: Nil))
}
"into arrow without arguments" should "be parsed" in {
val arrowStr = ".arrow()"
val result = parseIntoArrow(arrowStr)
result should be(IntoArrow[Id](toName("arrow"), Nil))
}
"into arrow with value" should "be parsed" in {
val arrowStr = "input.arrow(\"\")"
val result = parseVar(arrowStr)
val expected = VarToken[Id](toName("input"), IntoArrow[Id](toName("arrow"), toStr("") :: Nil) :: Nil)
result should be(expected)
}
}

View File

@ -4,9 +4,8 @@ import aqua.AquaSpec
import aqua.AquaSpec.{toNumber, toStr, toVar}
import aqua.parser.expr.ConstantExpr
import aqua.parser.expr.func.AssignmentExpr
import aqua.parser.lexer.Token
import aqua.parser.lexer.{Ability, CallArrowToken, CollectionToken, IntoArrow, LiteralToken, Name, NamedTypeToken, NamedValueToken, Token, ValueToken, VarToken}
import aqua.parser.lexer.CollectionToken.Mode.ArrayMode
import aqua.parser.lexer.{Ability, CallArrowToken, CollectionToken, NamedTypeToken, LiteralToken, Name, StructValueToken, ValueToken, VarToken}
import aqua.types.LiteralType
import cats.Id
import org.scalatest.flatspec.AnyFlatSpec
@ -27,14 +26,14 @@ class StructValueExprSpec extends AnyFlatSpec with Matchers with AquaSpec {
parseData(
str
) should be(
StructValueToken(
NamedValueToken(
NamedTypeToken[Id]("Obj"),
NonEmptyMap.of(
"f1" -> one,
"f2" -> a,
"f3" -> CollectionToken[Id](ArrayMode, List(one, two, three)),
"f4" -> CollectionToken[Id](ArrayMode, List(b, c)),
"f5" -> StructValueToken(
"f5" -> NamedValueToken(
NamedTypeToken[Id]("NestedObj"),
NonEmptyMap.of(
"i1" -> two,

View File

@ -144,6 +144,18 @@ class TypeTokenSpec extends AnyFlatSpec with Matchers with EitherValues {
)
)
arrowWithNames("{SomeAb, SecondAb}(a: A) -> B") should be(
ArrowTypeToken[Id](
(),
(Some(Name[Id]("SomeAb")) -> NamedTypeToken[Id]("SomeAb")) :: (Some(Name[Id](
"SecondAb"
)) -> NamedTypeToken[Id]("SecondAb")) :: (
Some(Name[Id]("a")) -> NamedTypeToken[Id]("A")
) :: Nil,
List(NamedTypeToken[Id]("B"))
)
)
arrowdef("u32 -> Boo") should be(
ArrowTypeToken[Id](
(),

View File

@ -13,16 +13,46 @@ class VarLambdaSpec extends AnyFlatSpec with Matchers with EitherValues {
"var lambda" should "parse" in {
val opsP = (s: String) => Name.dotted.parseAll(s).value.mapK(spanToId)
opsP("SomeClass.some_val") should be(Name[Id]("SomeClass.some_val"))
opsP("some_val") should be(Name[Id]("some_val"))
opsP("SOME_CONST") should be(Name[Id]("SOME_CONST"))
opsP("SomeClass.SOME_CONST") should be(Name[Id]("SomeClass.SOME_CONST"))
opsP("SomeClass.some_val") should be(Name[Id]("SomeClass.some_val"))
opsP("some_val") should be(Name[Id]("some_val"))
opsP("SOME_CONST") should be(Name[Id]("SOME_CONST"))
opsP("SomeClass.SOME_CONST") should be(Name[Id]("SomeClass.SOME_CONST"))
}
"var lambda in VarToken" should "parse" in {
val opsP = (s: String) => ValueToken.varProperty.parseAll(s).value.mapK(spanToId)
opsP("some_val") should be(VarToken[Id](Name[Id]("some_val")))
opsP("SomeClass.SOME_CONST") should be(VarToken[Id](Name[Id]("SomeClass.SOME_CONST")))
}
"var lambda in value" should "parse" in {
val opsP = (s: String) => InfixToken.atom.parseAll(s).value.mapK(spanToId)
opsP("some_val") should be(VarToken[Id](Name[Id]("some_val")))
opsP("SomeClass.SOME_CONST") should be(VarToken[Id](Name[Id]("SomeClass.SOME_CONST")))
}
"var lambda in ability" should "parse" in {
val opsP = (s: String) => ValueToken.abProperty.parseAll(s).value.mapK(spanToId)
opsP("SomeClass") should be(VarToken[Id](Name[Id]("SomeClass")))
opsP("SomeClass.call()") should be(
VarToken[Id](Name[Id]("SomeClass"), IntoArrow(Name[Id]("call"), Nil) :: Nil)
)
}
"parse Class " should "parse" in {
val opsP = (s: String) => Name.cl.parseAll(s).value.mapK(spanToId)
opsP("SomeClass") should be(Name[Id]("SomeClass"))
opsP("SC") should be(Name[Id]("SC"))
}
}

View File

@ -51,7 +51,7 @@ object ExprSem {
case expr: JoinExpr[S] => new JoinSem(expr).program[G]
case expr: ReturnExpr[S] => new ReturnSem(expr).program[G]
case expr: ServiceExpr[S] => new ServiceSem(expr).program[G]
case expr: ScopeExpr[S] => new ScopeSem(expr).program[G]
case expr: AbilityExpr[S] => new AbilitySem(expr).program[G]
case expr: RootExpr[S] => new RootSem(expr).program[G]
}

View File

@ -0,0 +1,45 @@
package aqua.semantics.expr
import aqua.parser.expr.AbilityExpr
import aqua.raw.{Raw, ScopeRaw, ServiceRaw, TypeRaw}
import aqua.parser.lexer.{Name, NamedTypeToken}
import aqua.raw.{Raw, ServiceRaw}
import aqua.semantics.Prog
import aqua.semantics.rules.ValuesAlgebra
import aqua.semantics.rules.abilities.AbilitiesAlgebra
import aqua.semantics.rules.definitions.DefinitionsAlgebra
import aqua.semantics.rules.names.NamesAlgebra
import aqua.semantics.rules.types.TypesAlgebra
import aqua.types.{ArrowType, AbilityType, Type}
import cats.syntax.apply.*
import cats.syntax.flatMap.*
import cats.syntax.functor.*
import cats.syntax.applicative.*
import cats.syntax.semigroupal.*
import cats.Monad
import cats.data.{NonEmptyList, NonEmptyMap}
class AbilitySem[S[_]](val expr: AbilityExpr[S]) extends AnyVal {
def program[Alg[_]: Monad](implicit
T: TypesAlgebra[S, Alg],
D: DefinitionsAlgebra[S, Alg]
): Prog[Alg, Raw] = {
Prog.after(_ =>
D.purgeDefs(expr.name).flatMap {
case Some(fields) =>
val t = AbilityType(expr.name.value, fields)
T.defineNamedType(expr.name, t).map {
case true =>
TypeRaw(
expr.name.value,
t
): Raw
case false =>
Raw.error("Scope types unresolved")
}
case None => Raw.error("Scope types unresolved").pure[Alg]
}
)
}
}

View File

@ -21,13 +21,14 @@ class DataStructSem[S[_]](val expr: DataStructExpr[S]) extends AnyVal {
Prog.after((_: Raw) =>
D.purgeDefs(expr.name).flatMap {
case Some(fields) =>
T.defineDataType(expr.name, fields).map {
case Some(st@StructType(_, _)) =>
val t = StructType(expr.name.value, fields)
T.defineNamedType(expr.name, t).map {
case true =>
TypeRaw(
expr.name.value,
st
t
): Raw
case None =>
case false =>
Raw.error("Data struct types unresolved")
}
case None => Raw.error("Data struct types unresolved").pure[Alg]

View File

@ -1,31 +0,0 @@
package aqua.semantics.expr
import aqua.parser.expr.ScopeExpr
import aqua.parser.lexer.{NamedTypeToken, Name}
import aqua.raw.{Raw, ServiceRaw}
import aqua.semantics.Prog
import aqua.semantics.rules.ValuesAlgebra
import aqua.semantics.rules.abilities.AbilitiesAlgebra
import aqua.semantics.rules.names.NamesAlgebra
import aqua.semantics.rules.types.TypesAlgebra
import aqua.types.{ArrowType, Type}
import aqua.raw.ScopeRaw
import cats.syntax.apply.*
import cats.syntax.flatMap.*
import cats.syntax.functor.*
import cats.syntax.applicative.*
import cats.Monad
import cats.data.NonEmptyList
class ScopeSem[S[_]](val expr: ScopeExpr[S]) extends AnyVal {
def program[Alg[_]: Monad](implicit
A: AbilitiesAlgebra[S, Alg],
N: NamesAlgebra[S, Alg],
T: TypesAlgebra[S, Alg],
V: ValuesAlgebra[S, Alg]
): Prog[Alg, Raw] =
Prog.after(
_ =>
Raw.error("Undefined").pure[Alg])
}

View File

@ -3,20 +3,16 @@ package aqua.semantics.expr.func
import aqua.parser.expr.func.CallArrowExpr
import aqua.raw.Raw
import aqua.raw.ops.{Call, CallArrowRawTag, FuncOp}
import aqua.raw.value.ValueRaw
import aqua.raw.value.CallArrowRaw
import aqua.semantics.Prog
import aqua.semantics.rules.ValuesAlgebra
import aqua.semantics.rules.abilities.AbilitiesAlgebra
import aqua.semantics.rules.names.NamesAlgebra
import aqua.semantics.rules.types.TypesAlgebra
import aqua.types.{ArrowType, StreamType, Type}
import cats.syntax.applicative.*
import cats.syntax.apply.*
import aqua.types.{StreamType, Type}
import cats.Monad
import cats.syntax.flatMap.*
import cats.syntax.functor.*
import cats.syntax.traverse.*
import cats.{Monad, Traverse}
import aqua.raw.value.CallArrowRaw
class CallArrowSem[S[_]](val expr: CallArrowExpr[S]) extends AnyVal {
@ -37,7 +33,6 @@ class CallArrowSem[S[_]](val expr: CallArrowExpr[S]) extends AnyVal {
private def toModel[Alg[_]: Monad](implicit
N: NamesAlgebra[S, Alg],
A: AbilitiesAlgebra[S, Alg],
T: TypesAlgebra[S, Alg],
V: ValuesAlgebra[S, Alg]
): Alg[Option[FuncOp]] = for {
@ -55,7 +50,6 @@ class CallArrowSem[S[_]](val expr: CallArrowExpr[S]) extends AnyVal {
def program[Alg[_]: Monad](implicit
N: NamesAlgebra[S, Alg],
A: AbilitiesAlgebra[S, Alg],
T: TypesAlgebra[S, Alg],
V: ValuesAlgebra[S, Alg]
): Prog[Alg, Raw] =

View File

@ -47,6 +47,15 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](implicit
op match {
case op: IntoField[S] =>
T.resolveField(rootType, op)
case op: IntoArrow[S] =>
op.arguments
.map(valueToRaw)
.sequence
.map(_.sequence)
.flatMap {
case None => None.pure[Alg]
case Some(arguments) => T.resolveArrow(rootType, op, arguments)
}
case op: IntoCopy[S] =>
op.fields
.map(valueToRaw)
@ -79,10 +88,10 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](implicit
(Some(t) -> Chain.empty).pure[Alg]
) { case (acc, op) =>
acc.flatMap {
// Some(tt) means that the previous property op was resolved successfully
case (Some(tt), prop) =>
// Some(rootType) means that the previous property op was resolved successfully
case (Some(rootType), prop) =>
// Resolve a single property
resolveSingleProperty(tt, op).map {
resolveSingleProperty(rootType, op).map {
// Property op resolved, add it to accumulator and update the last known type
case Some(p) => (Some(p.`type`), prop :+ p)
// Property op is not resolved, it's an error, stop iterations
@ -108,9 +117,9 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](implicit
None.pure[Alg]
}
case dvt @ StructValueToken(typeName, fields) =>
case dvt @ NamedValueToken(typeName, fields) =>
T.resolveType(typeName).flatMap {
case Some(struct @ StructType(_, _)) =>
case Some(resolvedType) =>
for {
fieldsRawOp: NonEmptyMap[String, Option[ValueRaw]] <- fields.traverse(valueToRaw)
fieldsRaw: List[(String, ValueRaw)] = fieldsRawOp.toSortedMap.toList.collect {
@ -119,21 +128,29 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](implicit
rawFields = NonEmptyMap.fromMap(SortedMap.from(fieldsRaw))
typeFromFieldsWithData = rawFields
.map(rf =>
(
StructType(typeName.value, rf.map(_.`type`)),
Some(MakeStructRaw(rf, struct))
)
resolvedType match {
case struct@StructType(_, _) =>
(
StructType(typeName.value, rf.map(_.`type`)),
Some(MakeStructRaw(rf, struct))
)
case scope@AbilityType(_, _) =>
(
AbilityType(typeName.value, rf.map(_.`type`)),
Some(AbilityRaw(rf, scope))
)
}
)
.getOrElse(BottomType -> None)
(typeFromFields, data) = typeFromFieldsWithData
isTypesCompatible <- T.ensureTypeMatches(dvt, struct, typeFromFields)
isTypesCompatible <- T.ensureTypeMatches(dvt, resolvedType, typeFromFields)
} yield data.filter(_ => isTypesCompatible)
case _ =>
None.pure[Alg]
case _ => None.pure[Alg]
}
case ct @ CollectionToken(_, values) =>
values.traverse(valueToRaw).map(_.toList.flatten).map(NonEmptyList.fromList).map {
values.traverse(valueToRaw).map(_.flatten).map(NonEmptyList.fromList).map {
case Some(raws) if raws.size == values.size =>
val element = raws.map(_.`type`).reduceLeft(_ `∩` _)
// In case we mix values of uncomparable types, intersection returns bottom, meaning "uninhabited type".
@ -157,7 +174,7 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](implicit
case ca: CallArrowToken[S] =>
callArrowToRaw(ca).map(_.widen[ValueRaw])
case it @ InfixToken(l, r, i) =>
case it @ InfixToken(l, r, _) =>
(valueToRaw(l), valueToRaw(r)).mapN((ll, rr) => ll -> rr).flatMap {
case (Some(leftRaw), Some(rightRaw)) =>
// TODO handle literal types
@ -210,90 +227,84 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](implicit
}
def callArrowToRaw(ca: CallArrowToken[S]): Alg[Option[CallArrowRaw]] = for {
raw <- ca.ability
.fold(
N.readArrow(ca.funcName)
.map(
_.map(bt =>
CallArrowRaw(
ability = None,
name = ca.funcName.value,
arguments = Nil,
baseType = bt,
serviceId = None
// Generate CallArrowRaw for arrow in ability
def callAbType(ab: String, abType: AbilityType, ca: CallArrowToken[S]): Alg[Option[CallArrowRaw]] =
abType.arrows.get(ca.funcName.value) match {
case Some(arrowType) => Option(CallArrowRaw(None, s"$ab.${ca.funcName.value}", Nil, arrowType, None)).pure[Alg]
case None => None.pure[Alg]
}
def callArrowToRaw(ca: CallArrowToken[S]): Alg[Option[CallArrowRaw]] = {
for {
raw <- ca.ability
.fold(
N.readArrow(ca.funcName)
.map(
_.map(bt =>
CallArrowRaw(
ability = None,
name = ca.funcName.value,
arguments = Nil,
baseType = bt,
serviceId = None
)
)
)
)
)(ab =>
(A.getArrow(ab, ca.funcName), A.getServiceId(ab)).mapN {
case (Some(at), Right(sid)) =>
// Service call, actually
CallArrowRaw(
ability = Some(ab.value),
name = ca.funcName.value,
arguments = Nil,
baseType = at,
serviceId = Some(sid)
).some
case (Some(at), Left(true)) =>
// Ability function call, actually
CallArrowRaw(
ability = Some(ab.value),
name = ca.funcName.value,
arguments = Nil,
baseType = at,
serviceId = None
).some
case _ => none
}
)
result <- raw.flatTraverse(r =>
val arr = r.baseType
for {
argsCheck <- T.checkArgumentsNumber(ca.funcName, arr.domain.length, ca.args.length)
args <- Option
.when(argsCheck)(ca.args zip arr.domain.toList)
.traverse(
_.flatTraverse { case (tkn, tp) =>
for {
maybeValueRaw <- valueToRaw(tkn)
checked <- maybeValueRaw.flatTraverse(v =>
T.ensureTypeMatches(tkn, tp, v.`type`)
.map(Option.when(_)(v))
)
} yield checked.toList
}
)
result = args
.filter(_.length == arr.domain.length)
.map(args => r.copy(arguments = args))
} yield result
)
} yield result
def checkArguments(token: Token[S], arr: ArrowType, args: List[ValueToken[S]]): Alg[Boolean] =
// TODO: do we really need to check this?
T.checkArgumentsNumber(token, arr.domain.length, args.length).flatMap {
case false => false.pure[Alg]
case true =>
args
.map[Alg[Option[(Token[S], Type)]]](tkn => resolveType(tkn).map(_.map(t => tkn -> t)))
.zip(arr.domain.toList)
.foldLeft(
true.pure[Alg]
) { case (f, (ft, t)) =>
(
f,
ft.flatMap {
case None =>
false.pure[Alg]
case Some((tkn, valType)) =>
T.ensureTypeMatches(tkn, t, valType)
)(ab =>
// TODO: Hack. Check that we have registered ability type.
// If it exists - this is ability type in file, if not - imported ability
T.getType(ab.value).flatMap {
case Some(abType: AbilityType) =>
callAbType(ab.value, abType, ca)
case _ =>
(A.getArrow(ab, ca.funcName), A.getServiceId(ab)).mapN {
case (Some(at), Right(sid)) =>
// Service call, actually
CallArrowRaw(
ability = Some(ab.value),
name = ca.funcName.value,
arguments = Nil,
baseType = at,
serviceId = Some(sid)
).some
case (Some(at), Left(true)) =>
// Ability function call, actually
CallArrowRaw(
ability = Some(ab.value),
name = ca.funcName.value,
arguments = Nil,
baseType = at,
serviceId = None
).some
case _ => none
}
).mapN(_ && _)
}
}
)
result <- raw.flatTraverse(r =>
val arr = r.baseType
for {
argsCheck <- T.checkArgumentsNumber(ca.funcName, arr.domain.length, ca.args.length)
args <- Option
.when(argsCheck)(ca.args zip arr.domain.toList)
.traverse(
_.flatTraverse { case (tkn, tp) =>
for {
maybeValueRaw <- valueToRaw(tkn)
checked <- maybeValueRaw.flatTraverse(v =>
T.ensureTypeMatches(tkn, tp, v.`type`)
.map(Option.when(_)(v))
)
} yield checked.toList
}
)
result = args
.filter(_.length == arr.domain.length)
.map(args => r.copy(arguments = args))
} yield result
)
} yield result
}
}

View File

@ -12,6 +12,7 @@ import aqua.semantics.rules.errors.ReportErrors
import aqua.types.ArrowType
import cats.data.{NonEmptyList, NonEmptyMap, State}
import cats.syntax.functor.*
import cats.syntax.traverse.*
import cats.~>
import monocle.Lens
import monocle.macros.GenLens
@ -43,19 +44,24 @@ class AbilitiesInterpreter[S[_], X](implicit
}
case None =>
modify(s =>
s.copy(
services = s.services
.updated(name.value, ServiceRaw(name.value, arrows.map(_._2), defaultId)),
definitions = s.definitions.updated(name.value, name)
)
).flatMap { _ =>
locations.addTokenWithFields(
name.value,
name,
arrows.toNel.toList.map(t => t._1 -> t._2._1)
)
}.as(true)
arrows.toNel.map(_._2).collect {
case (n, arr) if arr.codomain.length > 1 =>
report(n, "Service functions cannot have multiple results")
}.sequence.flatMap{ _ =>
modify(s =>
s.copy(
services = s.services
.updated(name.value, ServiceRaw(name.value, arrows.map(_._2), defaultId)),
definitions = s.definitions.updated(name.value, name)
)
).flatMap { _ =>
locations.addTokenWithFields(
name.value,
name,
arrows.toNel.toList.map(t => t._1 -> t._2._1)
)
}.as(true)
}
}
// adds location from token to its definition

View File

@ -9,13 +9,15 @@ import cats.data.NonEmptyList
trait TypesAlgebra[S[_], Alg[_]] {
def resolveType(token: TypeToken[S]): Alg[Option[Type]]
def getType(name: String): Alg[Option[Type]]
def resolveArrowDef(arrowDef: ArrowTypeToken[S]): Alg[Option[ArrowType]]
def defineDataType(
def defineNamedType(
name: NamedTypeToken[S],
fields: NonEmptyMap[String, Type]
): Alg[Option[StructType]]
`type`: Type
): Alg[Boolean]
def defineAlias(name: NamedTypeToken[S], target: Type): Alg[Boolean]
@ -28,6 +30,8 @@ trait TypesAlgebra[S[_], Alg[_]] {
): Alg[Option[PropertyRaw]]
def resolveField(rootT: Type, op: IntoField[S]): Alg[Option[PropertyRaw]]
def resolveArrow(rootT: Type, op: IntoArrow[S], arguments: List[ValueRaw]): Alg[Option[PropertyRaw]]
def ensureValuesComparable(token: Token[S], left: Type, right: Type): Alg[Boolean]
def ensureTypeMatches(token: Token[S], expected: Type, givenType: Type): Alg[Boolean]

View File

@ -1,7 +1,15 @@
package aqua.semantics.rules.types
import aqua.parser.lexer.*
import aqua.raw.value.{FunctorRaw, IntoCopyRaw, IntoFieldRaw, IntoIndexRaw, PropertyRaw, ValueRaw}
import aqua.raw.value.{
FunctorRaw,
IntoArrowRaw,
IntoCopyRaw,
IntoFieldRaw,
IntoIndexRaw,
PropertyRaw,
ValueRaw
}
import aqua.semantics.rules.locations.LocationsAlgebra
import aqua.semantics.rules.StackInterpreter
import aqua.semantics.rules.errors.ReportErrors
@ -10,9 +18,11 @@ import aqua.types.{
ArrowType,
BoxType,
LiteralType,
NamedType,
OptionType,
ProductType,
ScalarType,
AbilityType,
StreamType,
StructType,
Type
@ -51,6 +61,9 @@ class TypesInterpreter[S[_], X](implicit
state.strict.get(ctt.value).map(t => (t, state.definitions.get(ctt.value).toList.map(ctt -> _)))
}
override def getType(name: String): State[X, Option[Type]] =
getState.map(st => st.strict.get(name))
override def resolveType(token: TypeToken[S]): State[X, Option[Type]] =
getState.map(st => TypesStateHelper.resolveTypeToken(token, st, resolver)).flatMap {
case Some(t) =>
@ -77,23 +90,21 @@ class TypesInterpreter[S[_], X](implicit
}
}
override def defineDataType(
override def defineNamedType(
name: NamedTypeToken[S],
fields: NonEmptyMap[String, Type]
): State[X, Option[StructType]] =
`type`: Type
): State[X, Boolean] =
getState.map(_.definitions.get(name.value)).flatMap {
case Some(n) if n == name => State.pure(None)
case Some(n) if n == name => State.pure(true)
case Some(_) =>
report(name, s"Type `${name.value}` was already defined").as(None)
report(name, s"Type `${name.value}` was already defined").as(false)
case None =>
val structType = StructType(name.value, fields)
modify { st =>
st.copy(
strict = st.strict.updated(name.value, structType),
strict = st.strict.updated(name.value, `type`),
definitions = st.definitions.updated(name.value, name)
)
}
.as(Option(structType))
}.as(true)
}
override def defineAlias(name: NamedTypeToken[S], target: Type): State[X, Boolean] =
@ -111,28 +122,64 @@ class TypesInterpreter[S[_], X](implicit
override def resolveField(rootT: Type, op: IntoField[S]): State[X, Option[PropertyRaw]] = {
rootT match {
case StructType(name, fields) =>
fields(op.value).fold(
report(
op,
s"Field `${op.value}` not found in type `$name`, available: ${fields.toNel.toList.map(_._1).mkString(", ")}"
).as(None)
) { t =>
locations.pointFieldLocation(name, op.value, op).as(Some(IntoFieldRaw(op.value, t)))
}
case nt: NamedType =>
nt.fields(op.value)
.fold(
report(
op,
s"Field `${op.value}` not found in type `${nt.name}`, available: ${nt.fields.toNel.toList.map(_._1).mkString(", ")}"
).as(None)
) { t =>
locations.pointFieldLocation(nt.name, op.value, op).as(Some(IntoFieldRaw(op.value, t)))
}
case t =>
t.properties
.get(op.value)
.fold(
report(
op,
s"Expected Struct type to resolve a field '${op.value}' or a type with this property. Got: $rootT"
s"Expected data type to resolve a field '${op.value}' or a type with this property. Got: $rootT"
).as(None)
)(t => State.pure(Some(FunctorRaw(op.value, t))))
}
}
override def resolveArrow(
rootT: Type,
op: IntoArrow[S],
arguments: List[ValueRaw]
): State[X, Option[PropertyRaw]] = {
rootT match {
case AbilityType(name, fieldsAndArrows) =>
fieldsAndArrows(op.name.value).fold(
report(
op,
s"Arrow `${op.name.value}` not found in type `$name`, available: ${fieldsAndArrows.toNel.toList.map(_._1).mkString(", ")}"
).as(None)
) { t =>
val resolvedType = t match {
// TODO: is it a correct way to resolve `IntoArrow` type?
case ArrowType(_, codomain) => codomain.uncons.map(_._1).getOrElse(t)
case _ => t
}
locations
.pointFieldLocation(name, op.name.value, op)
.as(Some(IntoArrowRaw(op.name.value, resolvedType, arguments)))
}
case t =>
t.properties
.get(op.name.value)
.fold(
report(
op,
s"Expected scope type to resolve an arrow '${op.name.value}' or a type with this property. Got: $rootT"
).as(None)
)(t => State.pure(Some(FunctorRaw(op.name.value, t))))
}
}
// TODO actually it's stateless, exists there just for reporting needs
override def resolveCopy(
rootT: Type,
@ -205,7 +252,9 @@ class TypesInterpreter[S[_], X](implicit
if (expected.acceptsValueOf(givenType)) State.pure(true)
else {
(expected, givenType) match {
case (StructType(n, valueFields), StructType(_, typeFields)) =>
case (valueNamedType: NamedType, typeNamedType: NamedType) =>
val valueFields = valueNamedType.fields
val typeFields = typeNamedType.fields
// value can have more fields
if (valueFields.length < typeFields.length) {
report(
@ -217,7 +266,7 @@ class TypesInterpreter[S[_], X](implicit
typeFields.lookup(name) match {
case Some(t) =>
val nextToken = extractToken(token match {
case StructValueToken(_, fields) =>
case NamedValueToken(_, fields) =>
fields.lookup(name).getOrElse(token)
case t => t
})

View File

@ -54,7 +54,7 @@ object CompareTypes {
case _ => Double.NaN
}
private def compareStructs(
private def compareNamed(
lfNEM: NonEmptyMap[String, Type],
rfNEM: NonEmptyMap[String, Type]
): Double = {
@ -127,8 +127,8 @@ object CompareTypes {
case (x: OptionType, y: StreamType) => apply(x.element, y.element)
case (x: OptionType, y: ArrayType) => apply(x.element, y.element)
case (x: StreamType, y: StreamType) => apply(x.element, y.element)
case (StructType(_, lFields), StructType(_, rFields)) =>
compareStructs(lFields, rFields)
case (lnt: AbilityType, rnt: AbilityType) => compareNamed(lnt.fields, rnt.fields)
case (lnt: StructType, rnt: StructType) => compareNamed(lnt.fields, rnt.fields)
// Products
case (l: ProductType, r: ProductType) => compareProducts(l, r)

View File

@ -228,13 +228,39 @@ case class OptionType(element: Type) extends BoxType {
override def withElement(t: Type): BoxType = copy(element = t)
}
sealed trait NamedType extends Type {
def name: String
def fields: NonEmptyMap[String, Type]
}
// Struct is an unordered collection of labelled types
case class StructType(name: String, fields: NonEmptyMap[String, Type]) extends DataType {
case class StructType(name: String, fields: NonEmptyMap[String, Type]) extends DataType with NamedType {
override def toString: String =
s"$name{${fields.map(_.toString).toNel.toList.map(kv => kv._1 + ": " + kv._2).mkString(", ")}}"
}
// Ability is an unordered collection of labelled types and arrows
case class AbilityType(name: String, fields: NonEmptyMap[String, Type]) extends NamedType {
lazy val arrows: Map[String, ArrowType] = fields.toNel.collect {
case (name, at@ArrowType(_, _)) => (name, at)
}.toMap
lazy val abilities: List[(String, AbilityType)] = fields.toNel.collect {
case (name, at@AbilityType(_, _)) => (name, at)
}
lazy val variables: List[(String, Type)] = fields.toNel.filter {
case (_, AbilityType(_, _)) => false
case (_, ArrowType(_, _)) => false
case (_, _) => true
}
override def toString: String =
s"scope $name{${fields.map(_.toString).toNel.toList.map(kv => kv._1 + ": " + kv._2).mkString(", ")}}"
}
/**
* ArrowType is a profunctor pointing its domain to codomain.
* Profunctor means variance: Arrow is contravariant on domain, and variant on codomain.