mirror of
https://github.com/fluencelabs/aqua.git
synced 2024-12-04 22:50:18 +00:00
feat(compiler): Add boolean algebra [fixes LNG-211] (#814)
This commit is contained in:
parent
fabf8d7d61
commit
a5b6102422
40
integration-tests/aqua/examples/boolAlgebra.aqua
Normal file
40
integration-tests/aqua/examples/boolAlgebra.aqua
Normal file
@ -0,0 +1,40 @@
|
||||
aqua Bool
|
||||
|
||||
export main, Effector
|
||||
|
||||
service Effector("effector"):
|
||||
effect(name: string) -> bool
|
||||
|
||||
func foo(x: i8) -> bool:
|
||||
y = x + 1
|
||||
<- y < 5
|
||||
|
||||
func bar(x: i8) -> i8:
|
||||
y = x - 1
|
||||
<- y
|
||||
|
||||
func main(peer: string) -> []bool:
|
||||
res: *bool
|
||||
|
||||
on peer:
|
||||
a = 1 + 2
|
||||
b = 2 - 1
|
||||
res <<- true || false && true -- true
|
||||
res <<- (true || false) && true -- true
|
||||
res <<- foo(3) && b > 0 || a > 4 -- true
|
||||
res <<- bar(a) > 2 || true -- true
|
||||
res <<- foo(4) && bar(2) < 2 -- false
|
||||
res <<- !foo(10) && !!true -- true
|
||||
res <<- !(bar(2) < 1) || !!(a < 2) -- true
|
||||
|
||||
-- Effector is only registered on init_peer
|
||||
res <<- true || Effector.effect("impossible") -- true
|
||||
res <<- !!false && Effector.effect("impossible") -- false
|
||||
res <<- foo(0) || Effector.effect("impossible") -- true
|
||||
res <<- foo(10) && Effector.effect("impossible") -- false
|
||||
res <<- Effector.effect("true") || true -- true
|
||||
res <<- Effector.effect("true") && false -- false
|
||||
res <<- !foo(10) || Effector.effect("impossible") -- true
|
||||
res <<- !(1 < 2) && !Effector.effect("impossible") -- false
|
||||
|
||||
<- res
|
@ -1,6 +1,6 @@
|
||||
import { Fluence, IFluenceClient, createClient } from '@fluencelabs/js-client.api';
|
||||
import "@fluencelabs/js-client.node"
|
||||
import {getObjAssignCall, getObjCall, getObjRelayCall} from "../examples/objectCall.js";
|
||||
import '@fluencelabs/js-client.node';
|
||||
import { getObjAssignCall, getObjCall, getObjRelayCall } from '../examples/objectCall.js';
|
||||
import { callArrowCall, reproArgsBug426Call } from '../examples/callArrowCall.js';
|
||||
import { dataAliasCall } from '../examples/dataAliasCall.js';
|
||||
import { onCall } from '../examples/onCall.js';
|
||||
@ -17,10 +17,13 @@ import {
|
||||
nilLengthCall,
|
||||
nilLiteralCall,
|
||||
returnNilCall,
|
||||
returnNoneCall, streamAssignmentCall,
|
||||
returnNoneCall,
|
||||
streamAssignmentCall,
|
||||
streamCall,
|
||||
streamFunctorCall, streamIntFunctorCall, streamJoinCall,
|
||||
streamReturnFromInnerFunc
|
||||
streamFunctorCall,
|
||||
streamIntFunctorCall,
|
||||
streamJoinCall,
|
||||
streamReturnFromInnerFunc,
|
||||
} from '../examples/streamCall.js';
|
||||
import { topologyBug205Call, topologyBug394Call, topologyBug427Call, topologyCall } from '../examples/topologyCall.js';
|
||||
import { foldJoinCall } from '../examples/foldJoinCall.js';
|
||||
@ -28,6 +31,7 @@ import {registerHandlers, returnNull, returnOptionalCall, useOptionalCall} from
|
||||
import { viaArrCall, viaOptCall, viaOptNullCall, viaStreamCall } from '../examples/viaCall.js';
|
||||
import { nestedFuncsCall } from '../examples/nestedFuncsCall.js';
|
||||
import { assignmentCall } from '../examples/assignment.js';
|
||||
import { boolAlgebraCall } from '../examples/boolAlgebra.js';
|
||||
import { tryCatchCall } from '../examples/tryCatchCall.js';
|
||||
import { tryOtherwiseCall } from '../examples/tryOtherwiseCall.js';
|
||||
import { coCall } from '../examples/coCall.js';
|
||||
@ -46,58 +50,59 @@ import {streamCallbackCall} from '../examples/streamCallback.js';
|
||||
import { streamResCall } from '../examples/streamRestrictionsCall.js';
|
||||
import { joinIdxCall, joinIdxLocalCall, joinIdxRelayCall } from '../examples/joinCall.js';
|
||||
import { recursiveStreamsCall } from '../examples/recursiveStreamsCall.js';
|
||||
import {
|
||||
arraySugarCall,
|
||||
bugLNG59Call,
|
||||
optionSugarCall,
|
||||
streamSugarCall,
|
||||
} from '../examples/collectionSugarCall.js';
|
||||
import { arraySugarCall, bugLNG59Call, optionSugarCall, streamSugarCall } from '../examples/collectionSugarCall.js';
|
||||
import { funcsCall } from '../examples/funcsCall.js';
|
||||
import { nestedDataCall } from '../examples/nestedDataCall.js';
|
||||
import {mathTest1Call, mathTest2Call, mathTestI16Call, mathTestI32Call, mathTestI64Call, mathTestU64Call} from '../examples/mathCall.js';
|
||||
import {
|
||||
mathTest1Call,
|
||||
mathTest2Call,
|
||||
mathTestI16Call,
|
||||
mathTestI32Call,
|
||||
mathTestI64Call,
|
||||
mathTestU64Call,
|
||||
} from '../examples/mathCall.js';
|
||||
import { lng58Bug } from '../compiled/examples/closures.js';
|
||||
import { config, isEphemeral } from '../config.js';
|
||||
import {bugLng79Call} from "../examples/canonCall.js";
|
||||
import {bugLng119Call} from "../examples/functorsCall.js";
|
||||
import {returnArrowCall, returnArrowChainCall} from "../examples/returnArrowCall.js";
|
||||
import { bugLng79Call } from '../examples/canonCall.js';
|
||||
import { bugLng119Call } from '../examples/functorsCall.js';
|
||||
import { returnArrowCall, returnArrowChainCall } from '../examples/returnArrowCall.js';
|
||||
|
||||
var selfPeerId: string;
|
||||
var peer1: IFluenceClient;
|
||||
var peer2: IFluenceClient;
|
||||
|
||||
export const relay1 = config.relays[0]
|
||||
const relayPeerId1 = relay1.peerId
|
||||
export const relay2 = config.relays[1]
|
||||
const relayPeerId2 = relay2.peerId
|
||||
export const relay1 = config.relays[0];
|
||||
const relayPeerId1 = relay1.peerId;
|
||||
export const relay2 = config.relays[1];
|
||||
const relayPeerId2 = relay2.peerId;
|
||||
|
||||
import log from 'loglevel';
|
||||
import {abilityCall} from "../examples/abilityCall";
|
||||
import { abilityCall } from '../examples/abilityCall';
|
||||
// log.setDefaultLevel("debug")
|
||||
|
||||
async function start() {
|
||||
console.log("CONNECTING TO FIRST:")
|
||||
console.log('CONNECTING TO FIRST:');
|
||||
Fluence.onConnectionStateChange((s) => {
|
||||
console.log(s)
|
||||
})
|
||||
await Fluence.connect(relay1)
|
||||
console.log(s);
|
||||
});
|
||||
await Fluence.connect(relay1);
|
||||
const cl = await Fluence.getClient();
|
||||
peer1 = cl;
|
||||
selfPeerId = cl.getPeerId()
|
||||
console.log("CONNECTED")
|
||||
selfPeerId = cl.getPeerId();
|
||||
console.log('CONNECTED');
|
||||
|
||||
peer2 = await createClient(relay2)
|
||||
console.log("CONNECTING TO SECOND:")
|
||||
peer2 = await createClient(relay2);
|
||||
console.log('CONNECTING TO SECOND:');
|
||||
peer2.onConnectionStateChange((s) => {
|
||||
console.log(s)
|
||||
})
|
||||
await peer2.connect()
|
||||
console.log("CONNECTED")
|
||||
console.log(s);
|
||||
});
|
||||
await peer2.connect();
|
||||
console.log('CONNECTED');
|
||||
}
|
||||
|
||||
async function stop() {
|
||||
await Fluence.disconnect();
|
||||
if (peer2) {
|
||||
|
||||
await peer2.disconnect();
|
||||
}
|
||||
}
|
||||
@ -116,7 +121,6 @@ describe('Testing examples', () => {
|
||||
|
||||
afterAll(async () => {
|
||||
await stop();
|
||||
|
||||
});
|
||||
|
||||
it('callArrow.aqua args bug 426', async () => {
|
||||
@ -135,11 +139,15 @@ describe('Testing examples', () => {
|
||||
it('returnArrow.aqua chain', async () => {
|
||||
let argResult = await returnArrowChainCall();
|
||||
|
||||
expect(argResult).toStrictEqual(
|
||||
["first", "firstarg for func1 literal",
|
||||
"second", "secondarg for func2 second literal",
|
||||
"third", "thirdarg for func2 second literal",
|
||||
"fourth", "fourth from second literal"
|
||||
expect(argResult).toStrictEqual([
|
||||
'first',
|
||||
'firstarg for func1 literal',
|
||||
'second',
|
||||
'secondarg for func2 second literal',
|
||||
'third',
|
||||
'thirdarg for func2 second literal',
|
||||
'fourth',
|
||||
'fourth from second literal',
|
||||
]);
|
||||
});
|
||||
|
||||
@ -191,24 +199,24 @@ describe('Testing examples', () => {
|
||||
});
|
||||
|
||||
it('stream.aqua return stream from inner func', async () => {
|
||||
let streamResult = await streamReturnFromInnerFunc()
|
||||
let streamResult = await streamReturnFromInnerFunc();
|
||||
expect(streamResult).toEqual([1, 2, 3, 4]);
|
||||
})
|
||||
});
|
||||
|
||||
it('stream.aqua functor', async () => {
|
||||
let streamResult = await streamFunctorCall()
|
||||
expect(streamResult).toEqual("123");
|
||||
})
|
||||
let streamResult = await streamFunctorCall();
|
||||
expect(streamResult).toEqual('123');
|
||||
});
|
||||
|
||||
it('stream.aqua assignment', async () => {
|
||||
let streamResult = await streamAssignmentCall()
|
||||
expect(streamResult).toEqual("333");
|
||||
})
|
||||
let streamResult = await streamAssignmentCall();
|
||||
expect(streamResult).toEqual('333');
|
||||
});
|
||||
|
||||
it('stream.aqua nil literal', async () => {
|
||||
let result = await nilLiteralCall()
|
||||
let result = await nilLiteralCall();
|
||||
expect(result).toEqual([]);
|
||||
})
|
||||
});
|
||||
|
||||
it('collectionSugar array', async () => {
|
||||
let result = await arraySugarCall();
|
||||
@ -219,35 +227,38 @@ describe('Testing examples', () => {
|
||||
});
|
||||
|
||||
it('object creation getObj', async () => {
|
||||
let result = await getObjCall()
|
||||
let result = await getObjCall();
|
||||
expect(result).toEqual({
|
||||
str: "some str",
|
||||
str: 'some str',
|
||||
num: 5,
|
||||
inner: {
|
||||
arr: ["a", "b", "c"],
|
||||
num: 6
|
||||
}
|
||||
arr: ['a', 'b', 'c'],
|
||||
num: 6,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('object creation getObjAssign', async () => {
|
||||
let result = await getObjAssignCall()
|
||||
expect(result).toEqual([{
|
||||
str: "first str",
|
||||
let result = await getObjAssignCall();
|
||||
expect(result).toEqual([
|
||||
{
|
||||
str: 'first str',
|
||||
num: 5,
|
||||
inner: {
|
||||
arr: ["d", "e", "f"],
|
||||
num: 7
|
||||
}
|
||||
}, {
|
||||
str: "some str",
|
||||
arr: ['d', 'e', 'f'],
|
||||
num: 7,
|
||||
},
|
||||
},
|
||||
{
|
||||
str: 'some str',
|
||||
num: 6,
|
||||
inner: {
|
||||
arr: ["a", "b", "c"],
|
||||
num: 7
|
||||
}
|
||||
arr: ['a', 'b', 'c'],
|
||||
num: 7,
|
||||
},
|
||||
1]);
|
||||
},
|
||||
1,
|
||||
]);
|
||||
});
|
||||
|
||||
it('collectionSugar stream', async () => {
|
||||
@ -340,7 +351,7 @@ describe('Testing examples', () => {
|
||||
|
||||
it('ability.aqua', async () => {
|
||||
let result = await abilityCall();
|
||||
expect(result).toStrictEqual(['declare_const123', "efre123", "declare_const123", 12]);
|
||||
expect(result).toStrictEqual(['declare_const123', 'efre123', 'declare_const123', 12]);
|
||||
});
|
||||
|
||||
it('functors.aqua LNG-119 bug', async () => {
|
||||
@ -373,6 +384,27 @@ describe('Testing examples', () => {
|
||||
expect(assignmentResult).toEqual(['abc', 'hello']);
|
||||
});
|
||||
|
||||
it('boolAlgebra.aqua', async () => {
|
||||
let boolAlgebraResult = await boolAlgebraCall(relayPeerId1);
|
||||
expect(boolAlgebraResult).toEqual([
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
]);
|
||||
});
|
||||
|
||||
it('join.aqua local', async () => {
|
||||
let joinLocalCallResult = await joinIdxLocalCall(relayPeerId1);
|
||||
expect(joinLocalCallResult.length).toBeGreaterThanOrEqual(2);
|
||||
@ -389,14 +421,14 @@ describe('Testing examples', () => {
|
||||
});
|
||||
|
||||
it('stream.aqua nil length', async () => {
|
||||
let result = await nilLengthCall()
|
||||
let result = await nilLengthCall();
|
||||
expect(result).toEqual(0);
|
||||
})
|
||||
});
|
||||
|
||||
it('stream.aqua int functor', async () => {
|
||||
let streamResult = await streamIntFunctorCall()
|
||||
expect(streamResult).toEqual("123");
|
||||
})
|
||||
let streamResult = await streamIntFunctorCall();
|
||||
expect(streamResult).toEqual('123');
|
||||
});
|
||||
|
||||
it('streamCan.aqua LNG-63', async () => {
|
||||
let result = await bugLNG63Call();
|
||||
@ -444,9 +476,9 @@ describe('Testing examples', () => {
|
||||
});
|
||||
|
||||
it('stream.aqua join', async () => {
|
||||
let streamResult = await streamJoinCall()
|
||||
expect(streamResult).toEqual("444");
|
||||
})
|
||||
let streamResult = await streamJoinCall();
|
||||
expect(streamResult).toEqual('444');
|
||||
});
|
||||
|
||||
it('funcs.aqua', async () => {
|
||||
let result = await funcsCall();
|
||||
@ -501,22 +533,28 @@ describe('Testing examples', () => {
|
||||
it('onErrorPropagate.aqua', async () => {
|
||||
let call = onPropagateCall(peer2, relay2.peerId);
|
||||
expect(call).rejects.toMatchObject({
|
||||
message: expect.stringContaining("propagated error")
|
||||
})
|
||||
message: expect.stringContaining('propagated error'),
|
||||
});
|
||||
});
|
||||
|
||||
it('onErrorPropagate.aqua nested', async () => {
|
||||
let call = nestedOnPropagateCall(peer2, relay2.peerId, config.relays[3].peerId, config.relays[4].peerId, config.relays[5].peerId);
|
||||
let call = nestedOnPropagateCall(
|
||||
peer2,
|
||||
relay2.peerId,
|
||||
config.relays[3].peerId,
|
||||
config.relays[4].peerId,
|
||||
config.relays[5].peerId,
|
||||
);
|
||||
expect(call).rejects.toMatchObject({
|
||||
message: expect.stringContaining("propagated error")
|
||||
})
|
||||
message: expect.stringContaining('propagated error'),
|
||||
});
|
||||
});
|
||||
|
||||
it('onErrorPropagate.aqua sequential', async () => {
|
||||
let call = seqOnPropagateCall(peer2, relay2.peerId, config.relays[3].peerId, config.relays[4].peerId);
|
||||
expect(call).rejects.toMatchObject({
|
||||
message: expect.stringContaining("propagated error")
|
||||
})
|
||||
message: expect.stringContaining('propagated error'),
|
||||
});
|
||||
});
|
||||
|
||||
it('complex.aqua', async () => {
|
||||
@ -525,14 +563,14 @@ describe('Testing examples', () => {
|
||||
});
|
||||
|
||||
it('object creation getObjRelay', async () => {
|
||||
let result = await getObjRelayCall()
|
||||
let result = await getObjRelayCall();
|
||||
expect(result).toEqual({
|
||||
str: "some str",
|
||||
str: 'some str',
|
||||
num: 5,
|
||||
inner: {
|
||||
arr: ["a", "b", "c"],
|
||||
num: 6
|
||||
}
|
||||
arr: ['a', 'b', 'c'],
|
||||
num: 6,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
@ -560,7 +598,12 @@ describe('Testing examples', () => {
|
||||
});
|
||||
|
||||
it('topology.aqua bug 394', async () => {
|
||||
let topologyResult = await topologyBug394Call(peer1.getPeerId(), relay1.peerId, peer2.getPeerId(), relay2.peerId);
|
||||
let topologyResult = await topologyBug394Call(
|
||||
peer1.getPeerId(),
|
||||
relay1.peerId,
|
||||
peer2.getPeerId(),
|
||||
relay2.peerId,
|
||||
);
|
||||
|
||||
expect(topologyResult).toEqual(selfPeerId);
|
||||
});
|
||||
@ -582,8 +625,8 @@ describe('Testing examples', () => {
|
||||
|
||||
it('closureReturnRename.aqua bug LNG-193', async () => {
|
||||
let result = await lng193BugCall();
|
||||
expect(result).toEqual((1 + 42) + (2 + 42) + (3 + 42) + (4 + 42))
|
||||
}, 20000)
|
||||
expect(result).toEqual(1 + 42 + (2 + 42) + (3 + 42) + (4 + 42));
|
||||
}, 20000);
|
||||
|
||||
it('closures.aqua', async () => {
|
||||
let closuresResult = await closuresCall();
|
||||
@ -618,6 +661,4 @@ describe('Testing examples', () => {
|
||||
let joinCallResult = await joinIdxCall(relayPeerId1);
|
||||
expect(joinCallResult.length).toBeGreaterThanOrEqual(2);
|
||||
}, 10000);
|
||||
|
||||
|
||||
});
|
||||
|
@ -1,9 +1,5 @@
|
||||
import {
|
||||
krasnodar,
|
||||
stage,
|
||||
testNet,
|
||||
} from "@fluencelabs/fluence-network-environment";
|
||||
import { local } from "./local-nodes.js";
|
||||
import { krasnodar, stage, testNet } from '@fluencelabs/fluence-network-environment';
|
||||
import { local } from './local-nodes.js';
|
||||
|
||||
declare global {
|
||||
namespace NodeJS {
|
||||
@ -15,13 +11,13 @@ declare global {
|
||||
|
||||
function setConfig(env) {
|
||||
switch (env) {
|
||||
case "krasnodar":
|
||||
case 'krasnodar':
|
||||
return { config: krasnodarConfig, isEphemeral: false };
|
||||
case "testnet":
|
||||
case 'testnet':
|
||||
return { config: testNetConfig, isEphemeral: false };
|
||||
case "ephemeral":
|
||||
case 'ephemeral':
|
||||
return { config: null, isEphemeral: true };
|
||||
case "local":
|
||||
case 'local':
|
||||
return { config: localConfig, isEphemeral: false };
|
||||
default:
|
||||
return { config: stageConfig, isEphemeral: false };
|
||||
@ -30,42 +26,24 @@ function setConfig(env) {
|
||||
|
||||
export const krasnodarConfig = {
|
||||
relays: krasnodar,
|
||||
externalAddressesRelay1: [
|
||||
"/ip4/164.90.171.139/tcp/7770",
|
||||
"/ip4/164.90.171.139/tcp/9990/ws",
|
||||
],
|
||||
externalAddressesRelay2: [
|
||||
"/ip4/164.90.164.229/tcp/7001",
|
||||
"/ip4/164.90.164.229/tcp/9001/ws",
|
||||
],
|
||||
externalAddressesRelay1: ['/ip4/164.90.171.139/tcp/7770', '/ip4/164.90.171.139/tcp/9990/ws'],
|
||||
externalAddressesRelay2: ['/ip4/164.90.164.229/tcp/7001', '/ip4/164.90.164.229/tcp/9001/ws'],
|
||||
tryCatchError:
|
||||
"Local service error, ret_code is 1, error message is '\"Service with id 'unex' not found (function getStr)\"'",
|
||||
};
|
||||
|
||||
export const stageConfig = {
|
||||
relays: stage,
|
||||
externalAddressesRelay1: [
|
||||
"/ip4/134.209.186.43/tcp/7001",
|
||||
"/ip4/134.209.186.43/tcp/9001/ws",
|
||||
],
|
||||
externalAddressesRelay2: [
|
||||
"/ip4/134.209.186.43/tcp/7770",
|
||||
"/ip4/134.209.186.43/tcp/9990/ws",
|
||||
],
|
||||
externalAddressesRelay1: ['/ip4/134.209.186.43/tcp/7001', '/ip4/134.209.186.43/tcp/9001/ws'],
|
||||
externalAddressesRelay2: ['/ip4/134.209.186.43/tcp/7770', '/ip4/134.209.186.43/tcp/9990/ws'],
|
||||
tryCatchError:
|
||||
"Local service error, ret_code is 1, error message is '\"Service with id 'unex' not found (function getStr)\"'",
|
||||
};
|
||||
|
||||
export const testNetConfig = {
|
||||
relays: testNet,
|
||||
externalAddressesRelay1: [
|
||||
"/ip4/165.227.164.206/tcp/7001",
|
||||
"/ip4/165.227.164.206/tcp/9001/ws",
|
||||
],
|
||||
externalAddressesRelay2: [
|
||||
"/ip4/142.93.169.49/tcp/7001",
|
||||
"/ip4/142.93.169.49/tcp/9001/ws",
|
||||
],
|
||||
externalAddressesRelay1: ['/ip4/165.227.164.206/tcp/7001', '/ip4/165.227.164.206/tcp/9001/ws'],
|
||||
externalAddressesRelay2: ['/ip4/142.93.169.49/tcp/7001', '/ip4/142.93.169.49/tcp/9001/ws'],
|
||||
tryCatchError:
|
||||
"Local service error, ret_code is 1, error message is '\"Service with id 'unex' not found (function getStr)\"'",
|
||||
};
|
||||
@ -84,19 +62,19 @@ export const testNetConfig = {
|
||||
export const localConfig = {
|
||||
relays: local,
|
||||
externalAddressesRelay1: [
|
||||
"/ip4/10.50.10.10/tcp/7771",
|
||||
"/ip4/10.50.10.10/tcp/9991/ws",
|
||||
"/dns4/nox-1/tcp/7771",
|
||||
"/dns4/nox-1/tcp/9991/ws",
|
||||
'/ip4/10.50.10.10/tcp/7771',
|
||||
'/ip4/10.50.10.10/tcp/9991/ws',
|
||||
'/dns4/nox-1/tcp/7771',
|
||||
'/dns4/nox-1/tcp/9991/ws',
|
||||
],
|
||||
externalAddressesRelay2: [
|
||||
"/ip4/10.50.10.60/tcp/7776",
|
||||
"/ip4/10.50.10.60/tcp/9996/ws",
|
||||
"/dns4/nox-6/tcp/7776",
|
||||
"/dns4/nox-6/tcp/9996/ws",
|
||||
'/ip4/10.50.10.60/tcp/7776',
|
||||
'/ip4/10.50.10.60/tcp/9996/ws',
|
||||
'/dns4/nox-6/tcp/7776',
|
||||
'/dns4/nox-6/tcp/9996/ws',
|
||||
],
|
||||
tryCatchError:
|
||||
"Local service error, ret_code is 1, error message is '\"Service with id 'unex' not found (function getStr)\"'",
|
||||
};
|
||||
|
||||
export const { config, isEphemeral } = setConfig("local");
|
||||
export const { config, isEphemeral } = setConfig('local');
|
||||
|
12
integration-tests/src/examples/boolAlgebra.ts
Normal file
12
integration-tests/src/examples/boolAlgebra.ts
Normal file
@ -0,0 +1,12 @@
|
||||
import { main, registerEffector } from '../compiled/examples/boolAlgebra.js';
|
||||
|
||||
export async function boolAlgebraCall(relay: string): Promise<boolean[]> {
|
||||
registerEffector({
|
||||
effect(name, _) {
|
||||
if (name == 'true') return Promise.resolve(true);
|
||||
else return Promise.reject(`unknown effect: ${name}`);
|
||||
},
|
||||
});
|
||||
|
||||
return await main(relay);
|
||||
}
|
@ -3,75 +3,75 @@ package aqua.model.inline
|
||||
import aqua.model.{EmptyModel, OpModel, ParModel, SeqModel}
|
||||
import aqua.raw.ops.RawTag
|
||||
import aqua.raw.value.ValueRaw
|
||||
import aqua.model.inline.Inline.MergeMode
|
||||
import aqua.model.inline.Inline.MergeMode.*
|
||||
|
||||
import cats.Monoid
|
||||
import cats.data.Chain
|
||||
import cats.data.Chain.*
|
||||
import cats.syntax.option.*
|
||||
|
||||
import scala.collection.immutable.ListMap
|
||||
|
||||
sealed trait MergeMode
|
||||
object SeqMode extends MergeMode
|
||||
object ParMode extends MergeMode
|
||||
|
||||
/**
|
||||
* @param flattenValues values that need to be resolved before `predo`.
|
||||
* ListMap for keeping order of values (mostly for debugging purposes)
|
||||
* Inlining result
|
||||
*
|
||||
* @param predo operations tree
|
||||
* @param mergeMode how `flattenValues` and `predo` must be merged
|
||||
* @param mergeMode how `predo` must be merged
|
||||
*/
|
||||
private[inline] case class Inline(
|
||||
flattenValues: ListMap[String, ValueRaw] = ListMap.empty,
|
||||
predo: Chain[OpModel.Tree] = Chain.empty,
|
||||
mergeMode: MergeMode = ParMode
|
||||
) {
|
||||
|
||||
def desugar: Inline = {
|
||||
val desugaredPredo =
|
||||
predo.toList match {
|
||||
case Nil => Chain.empty
|
||||
case x :: Nil =>
|
||||
Chain.one(x)
|
||||
case l =>
|
||||
val desugaredPredo = predo match {
|
||||
case Chain.nil | _ ==: Chain.nil => predo
|
||||
case chain =>
|
||||
mergeMode match
|
||||
case SeqMode =>
|
||||
val wrapped = SeqModel.wrap(l: _*)
|
||||
val wrapped = SeqModel.wrap(chain)
|
||||
wrapped match
|
||||
case EmptyModel.leaf => Chain.empty
|
||||
case _ => Chain.one(wrapped)
|
||||
case ParMode => Chain.one(ParModel.wrap(l: _*))
|
||||
case ParMode => Chain.one(ParModel.wrap(chain))
|
||||
}
|
||||
|
||||
Inline(
|
||||
flattenValues,
|
||||
desugaredPredo
|
||||
)
|
||||
Inline(desugaredPredo)
|
||||
}
|
||||
|
||||
def mergeWith(inline: Inline, mode: MergeMode): Inline = {
|
||||
val left = desugar
|
||||
val right = inline.desugar
|
||||
|
||||
Inline(left.flattenValues ++ right.flattenValues, left.predo ++ right.predo, mode)
|
||||
Inline(left.predo ++ right.predo, mode)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO may not be needed there
|
||||
private[inline] object Inline {
|
||||
val empty: Inline = Inline()
|
||||
|
||||
def preload(pairs: (String, ValueRaw)*): Inline = Inline(ListMap.from(pairs))
|
||||
enum MergeMode {
|
||||
case SeqMode
|
||||
case ParMode
|
||||
}
|
||||
|
||||
val empty: Inline = Inline()
|
||||
|
||||
def tree(tr: OpModel.Tree): Inline = Inline(predo = Chain.one(tr))
|
||||
|
||||
given Monoid[Inline] with
|
||||
override val empty: Inline = Inline()
|
||||
override val empty: Inline = Inline.empty
|
||||
|
||||
override def combine(a: Inline, b: Inline): Inline =
|
||||
Inline(a.flattenValues ++ b.flattenValues, a.predo ++ b.predo)
|
||||
// TODO: Is it ok to ignore merge mode?
|
||||
Inline(a.predo ++ b.predo)
|
||||
|
||||
def parDesugarPrefix(ops: List[OpModel.Tree]): Option[OpModel.Tree] = ops match {
|
||||
case Nil => None
|
||||
case x :: Nil => Option(x)
|
||||
case _ => Option(ParModel.wrap(ops: _*))
|
||||
def parDesugarPrefix(ops: List[OpModel.Tree]): Option[OpModel.Tree] =
|
||||
ops match {
|
||||
case Nil => none
|
||||
case x :: Nil => x.some
|
||||
case _ => ParModel.wrap(ops).some
|
||||
}
|
||||
|
||||
def parDesugarPrefixOpt(ops: Option[OpModel.Tree]*): Option[OpModel.Tree] =
|
||||
|
@ -1,19 +1,29 @@
|
||||
package aqua.model.inline
|
||||
|
||||
import aqua.model.{CallModel, CallServiceModel, LiteralModel, OpModel, SeqModel, ValueModel, VarModel}
|
||||
import aqua.model.{
|
||||
CallModel,
|
||||
CallServiceModel,
|
||||
LiteralModel,
|
||||
OpModel,
|
||||
SeqModel,
|
||||
ValueModel,
|
||||
VarModel
|
||||
}
|
||||
import aqua.model.inline.raw.RawInliner
|
||||
import cats.data.Chain
|
||||
import aqua.model.inline.state.{Arrows, Exports, Mangler}
|
||||
import aqua.raw.value.{LiteralRaw, MakeStructRaw}
|
||||
import cats.data.{NonEmptyMap, State}
|
||||
import aqua.model.inline.Inline
|
||||
import aqua.model.inline.RawValueInliner.{unfold, valueToModel}
|
||||
import aqua.types.ScalarType
|
||||
|
||||
import cats.data.Chain
|
||||
import cats.data.{NonEmptyMap, State}
|
||||
import cats.syntax.traverse.*
|
||||
import cats.syntax.monoid.*
|
||||
import cats.syntax.functor.*
|
||||
import cats.syntax.flatMap.*
|
||||
import cats.syntax.apply.*
|
||||
import cats.syntax.foldable.*
|
||||
|
||||
object MakeStructRawInliner extends RawInliner[MakeStructRaw] {
|
||||
|
||||
@ -45,15 +55,14 @@ object MakeStructRawInliner extends RawInliner[MakeStructRaw] {
|
||||
name <- Mangler[S].findAndForbidName(raw.structType.name + "_obj")
|
||||
foldedFields <- raw.fields.nonEmptyTraverse(unfold(_))
|
||||
varModel = VarModel(name, raw.baseType)
|
||||
valsInline = foldedFields.toSortedMap.values.map(_._2).fold(Inline.empty)(_ |+| _).desugar
|
||||
fields = foldedFields.map(_._1)
|
||||
valsInline = foldedFields.foldMap { case (_, inline) => inline }.desugar
|
||||
fields = foldedFields.map { case (vm, _) => vm }
|
||||
objCreation <- createObj(fields, varModel)
|
||||
} yield {
|
||||
(
|
||||
varModel,
|
||||
Inline(
|
||||
valsInline.flattenValues,
|
||||
Chain.one(SeqModel.wrap((valsInline.predo :+ objCreation).toList: _*))
|
||||
Chain.one(SeqModel.wrap(valsInline.predo :+ objCreation))
|
||||
)
|
||||
)
|
||||
}
|
||||
|
@ -1,11 +1,22 @@
|
||||
package aqua.model.inline
|
||||
|
||||
import aqua.model.inline.state.{Arrows, Counter, Exports, Mangler}
|
||||
import aqua.model.inline.Inline.MergeMode.*
|
||||
import aqua.model.*
|
||||
import aqua.model.inline.raw.{ApplyFunctorRawInliner, ApplyGateRawInliner, ApplyPropertiesRawInliner, CallArrowRawInliner, CollectionRawInliner, MakeAbilityRawInliner}
|
||||
import aqua.model.inline.raw.{
|
||||
ApplyBinaryOpRawInliner,
|
||||
ApplyFunctorRawInliner,
|
||||
ApplyGateRawInliner,
|
||||
ApplyPropertiesRawInliner,
|
||||
ApplyUnaryOpRawInliner,
|
||||
CallArrowRawInliner,
|
||||
CollectionRawInliner,
|
||||
MakeAbilityRawInliner
|
||||
}
|
||||
import aqua.raw.ops.*
|
||||
import aqua.raw.value.*
|
||||
import aqua.types.{ArrayType, LiteralType, OptionType, StreamType}
|
||||
|
||||
import cats.syntax.traverse.*
|
||||
import cats.syntax.monoid.*
|
||||
import cats.syntax.functor.*
|
||||
@ -13,6 +24,7 @@ import cats.syntax.flatMap.*
|
||||
import cats.syntax.apply.*
|
||||
import cats.instances.list.*
|
||||
import cats.data.{Chain, State, StateT}
|
||||
import cats.syntax.applicative.*
|
||||
import scribe.Logging
|
||||
|
||||
object RawValueInliner extends Logging {
|
||||
@ -42,8 +54,14 @@ object RawValueInliner extends Logging {
|
||||
case dr: MakeStructRaw =>
|
||||
MakeStructRawInliner(dr, propertiesAllowed)
|
||||
|
||||
case sr: AbilityRaw =>
|
||||
MakeAbilityRawInliner(sr, propertiesAllowed)
|
||||
case ar: AbilityRaw =>
|
||||
MakeAbilityRawInliner(ar, propertiesAllowed)
|
||||
|
||||
case auor: ApplyUnaryOpRaw =>
|
||||
ApplyUnaryOpRawInliner(auor, propertiesAllowed)
|
||||
|
||||
case abbor: ApplyBinaryOpRaw =>
|
||||
ApplyBinaryOpRawInliner(abbor, propertiesAllowed)
|
||||
|
||||
case cr: CallArrowRaw =>
|
||||
CallArrowRawInliner(cr, propertiesAllowed)
|
||||
@ -51,22 +69,11 @@ object RawValueInliner extends Logging {
|
||||
|
||||
private[inline] def inlineToTree[S: Mangler: Exports: Arrows](
|
||||
inline: Inline
|
||||
): State[S, List[OpModel.Tree]] = {
|
||||
inline.flattenValues.toList.traverse { case (name, v) =>
|
||||
valueToModel(v).map {
|
||||
case (vv, Some(op)) =>
|
||||
SeqModel.wrap(op, FlattenModel(vv, name).leaf)
|
||||
|
||||
case (vv, _) =>
|
||||
FlattenModel(vv, name).leaf
|
||||
}
|
||||
}.map { predo =>
|
||||
inline.mergeMode match
|
||||
case SeqMode =>
|
||||
SeqModel.wrap((inline.predo.toList ++ predo): _*) :: Nil
|
||||
case ParMode => inline.predo.toList ::: predo
|
||||
}
|
||||
}
|
||||
): State[S, List[OpModel.Tree]] =
|
||||
(inline.mergeMode match {
|
||||
case SeqMode => SeqModel.wrap(inline.predo) :: Nil
|
||||
case ParMode => inline.predo.toList
|
||||
}).pure
|
||||
|
||||
private[inline] def toModel[S: Mangler: Exports: Arrows](
|
||||
unfoldF: State[S, (ValueModel, Inline)]
|
||||
|
@ -0,0 +1,111 @@
|
||||
package aqua.model.inline.raw
|
||||
|
||||
import aqua.model.*
|
||||
import aqua.model.inline.raw.RawInliner
|
||||
import aqua.model.inline.state.{Arrows, Exports, Mangler}
|
||||
import aqua.raw.value.{AbilityRaw, LiteralRaw, MakeStructRaw}
|
||||
import cats.data.{NonEmptyList, NonEmptyMap, State}
|
||||
import aqua.model.inline.Inline
|
||||
import aqua.model.inline.RawValueInliner.{unfold, valueToModel}
|
||||
import aqua.types.{ArrowType, ScalarType}
|
||||
import aqua.raw.value.ApplyBinaryOpRaw
|
||||
import aqua.raw.value.ApplyBinaryOpRaw.Op.*
|
||||
|
||||
import cats.data.Chain
|
||||
import cats.syntax.traverse.*
|
||||
import cats.syntax.monoid.*
|
||||
import cats.syntax.functor.*
|
||||
import cats.syntax.flatMap.*
|
||||
import cats.syntax.apply.*
|
||||
import cats.syntax.foldable.*
|
||||
import cats.syntax.applicative.*
|
||||
|
||||
object ApplyBinaryOpRawInliner extends RawInliner[ApplyBinaryOpRaw] {
|
||||
|
||||
override def apply[S: Mangler: Exports: Arrows](
|
||||
raw: ApplyBinaryOpRaw,
|
||||
propertiesAllowed: Boolean
|
||||
): State[S, (ValueModel, Inline)] = for {
|
||||
left <- unfold(raw.left)
|
||||
(lmodel, linline) = left
|
||||
right <- unfold(raw.right)
|
||||
(rmodel, rinline) = right
|
||||
|
||||
result <- (lmodel, rmodel) match {
|
||||
// Optimize in case of left value is known at compile time
|
||||
case (LiteralModel.Bool(lvalue), _) =>
|
||||
(raw.op match {
|
||||
case And if !lvalue => (LiteralModel.bool(false), linline)
|
||||
case Or if lvalue => (LiteralModel.bool(true), linline)
|
||||
case _ => (rmodel, Inline(linline.predo ++ rinline.predo))
|
||||
}).pure[State[S, *]]
|
||||
// Optimize in case of right value is known at compile time and it has no computation
|
||||
case (_, LiteralModel.Bool(rvalue)) if rinline.predo.isEmpty =>
|
||||
(raw.op match {
|
||||
case And if !rvalue => (LiteralModel.bool(false), linline)
|
||||
case Or if rvalue => (LiteralModel.bool(true), linline)
|
||||
case _ => (lmodel, linline)
|
||||
}).pure[State[S, *]]
|
||||
// Produce unoptimized inline
|
||||
case _ => fullInline(lmodel, rmodel, linline, rinline, raw.op)
|
||||
}
|
||||
} yield result
|
||||
|
||||
private def fullInline[S: Mangler: Exports: Arrows](
|
||||
lmodel: ValueModel,
|
||||
rmodel: ValueModel,
|
||||
linline: Inline,
|
||||
rinline: Inline,
|
||||
op: ApplyBinaryOpRaw.Op
|
||||
): State[S, (ValueModel, Inline)] = {
|
||||
val (name, compareWith) = op match {
|
||||
case And => ("and", false)
|
||||
case Or => ("or", true)
|
||||
}
|
||||
|
||||
/**
|
||||
* (seq
|
||||
* <left-inline>
|
||||
* (xor
|
||||
* (match <left-res> <compare-with>
|
||||
* (ap <left-res> <res-name>)
|
||||
* )
|
||||
* (seq
|
||||
* <right-inline>
|
||||
* (ap <right-res> <res-name>)
|
||||
* )
|
||||
* )
|
||||
* )
|
||||
*/
|
||||
val predo = (resName: String) =>
|
||||
SeqModel.wrap(
|
||||
linline.predo :+ XorModel.wrap(
|
||||
MatchMismatchModel(
|
||||
lmodel,
|
||||
LiteralModel.bool(compareWith),
|
||||
shouldMatch = true
|
||||
).wrap(
|
||||
FlattenModel(
|
||||
lmodel,
|
||||
resName
|
||||
).leaf
|
||||
),
|
||||
SeqModel.wrap(
|
||||
rinline.predo :+ FlattenModel(
|
||||
rmodel,
|
||||
resName
|
||||
).leaf
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
Mangler[S]
|
||||
.findAndForbidName(name)
|
||||
.map(resName =>
|
||||
(
|
||||
VarModel(resName, ScalarType.bool),
|
||||
Inline(Chain.one(predo(resName)))
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
@ -10,7 +10,8 @@ import aqua.model.{
|
||||
ValueModel,
|
||||
VarModel
|
||||
}
|
||||
import aqua.model.inline.{Inline, SeqMode}
|
||||
import aqua.model.inline.Inline.MergeMode.*
|
||||
import aqua.model.inline.Inline
|
||||
import aqua.model.inline.state.{Arrows, Exports, Mangler}
|
||||
import aqua.raw.value.{FunctorRaw, ValueRaw}
|
||||
import cats.data.State
|
||||
@ -53,10 +54,12 @@ object ApplyFunctorRawInliner extends Logging {
|
||||
}
|
||||
} yield {
|
||||
val tree = Inline(
|
||||
predo = Chain.one(SeqModel.wrap(
|
||||
predo = Chain.one(
|
||||
SeqModel.wrap(
|
||||
flat,
|
||||
FlattenModel(apVar, resultName).leaf
|
||||
)),
|
||||
)
|
||||
),
|
||||
mergeMode = SeqMode
|
||||
)
|
||||
|
||||
|
@ -121,8 +121,7 @@ object ApplyGateRawInliner extends RawInliner[ApplyGateRaw] with Logging {
|
||||
|
||||
val tree = SeqModel.wrap(idxInline.predo.toList :+ gate)
|
||||
|
||||
val treeInline =
|
||||
Inline(idxInline.flattenValues, predo = Chain.one(tree))
|
||||
val treeInline = Inline(predo = Chain.one(tree))
|
||||
|
||||
(
|
||||
VarModel(uniqueResultName, ArrayType(afr.streamType.element)),
|
||||
|
@ -1,12 +1,22 @@
|
||||
package aqua.model.inline.raw
|
||||
|
||||
import aqua.model.{CallModel, CallServiceModel, LiteralModel, OpModel, SeqModel, ValueModel, VarModel}
|
||||
import aqua.model.inline.{Inline, SeqMode, TagInliner}
|
||||
import aqua.model.{
|
||||
CallModel,
|
||||
CallServiceModel,
|
||||
LiteralModel,
|
||||
OpModel,
|
||||
SeqModel,
|
||||
ValueModel,
|
||||
VarModel
|
||||
}
|
||||
import aqua.model.inline.Inline.MergeMode.*
|
||||
import aqua.model.inline.{Inline, TagInliner}
|
||||
import aqua.model.inline.MakeStructRawInliner.createObj
|
||||
import aqua.model.inline.RawValueInliner.unfold
|
||||
import aqua.model.inline.state.{Arrows, Exports, Mangler}
|
||||
import aqua.raw.value.{IntoCopyRaw, LiteralRaw}
|
||||
import aqua.types.ScalarType
|
||||
|
||||
import cats.data.{Chain, NonEmptyMap, State}
|
||||
import scribe.Logging
|
||||
import cats.syntax.traverse.*
|
||||
@ -14,6 +24,7 @@ import cats.syntax.monoid.*
|
||||
import cats.syntax.functor.*
|
||||
import cats.syntax.flatMap.*
|
||||
import cats.syntax.apply.*
|
||||
import cats.syntax.foldable.*
|
||||
|
||||
object ApplyIntoCopyRawInliner extends Logging {
|
||||
|
||||
@ -45,15 +56,14 @@ object ApplyIntoCopyRawInliner extends Logging {
|
||||
name <- Mangler[S].findAndForbidName(value.name + "_obj_copy")
|
||||
foldedFields <- intoCopy.fields.nonEmptyTraverse(unfold(_))
|
||||
varModel = VarModel(name, value.baseType)
|
||||
valsInline = foldedFields.toSortedMap.values.map(_._2).fold(Inline.empty)(_ |+| _).desugar
|
||||
valsInline = foldedFields.toList.foldMap { case (_, inline) => inline }.desugar
|
||||
fields = foldedFields.map(_._1)
|
||||
objCopy <- copyObj(value, fields, varModel)
|
||||
} yield {
|
||||
(
|
||||
varModel,
|
||||
Inline(
|
||||
valsInline.flattenValues,
|
||||
Chain.one(SeqModel.wrap((valsInline.predo :+ objCopy).toList: _*)),
|
||||
Chain.one(SeqModel.wrap(valsInline.predo :+ objCopy)),
|
||||
SeqMode
|
||||
)
|
||||
)
|
||||
|
@ -20,7 +20,7 @@ import aqua.model.{
|
||||
XorModel
|
||||
}
|
||||
import aqua.model.inline.Inline
|
||||
import aqua.model.inline.{ParMode, SeqMode}
|
||||
import aqua.model.inline.Inline.MergeMode.*
|
||||
import aqua.model.inline.RawValueInliner.unfold
|
||||
import aqua.model.inline.state.{Arrows, Exports, Mangler}
|
||||
import aqua.raw.value.{
|
||||
@ -48,10 +48,14 @@ import aqua.types.{
|
||||
StreamType,
|
||||
Type
|
||||
}
|
||||
|
||||
import cats.Eval
|
||||
import cats.syntax.bifunctor.*
|
||||
import cats.data.{Chain, IndexedStateT, State}
|
||||
import cats.syntax.monoid.*
|
||||
import cats.syntax.traverse.*
|
||||
import cats.syntax.foldable.*
|
||||
import cats.Id
|
||||
import cats.instances.list.*
|
||||
import scribe.Logging
|
||||
|
||||
@ -183,7 +187,6 @@ object ApplyPropertiesRawInliner extends RawInliner[ApplyPropertyRaw] with Loggi
|
||||
newVI <- ApplyFunctorRawInliner(flatten, f)
|
||||
} yield {
|
||||
newVI._1 -> Inline(
|
||||
inline.flattenValues ++ newVI._2.flattenValues,
|
||||
inline.predo ++ newVI._2.predo,
|
||||
mergeMode = SeqMode
|
||||
)
|
||||
@ -198,7 +201,6 @@ object ApplyPropertiesRawInliner extends RawInliner[ApplyPropertyRaw] with Loggi
|
||||
newVI <- ApplyIntoCopyRawInliner(varModel, ic)
|
||||
} yield {
|
||||
newVI._1 -> Inline(
|
||||
inline.flattenValues ++ newVI._2.flattenValues,
|
||||
inline.predo ++ newVI._2.predo,
|
||||
mergeMode = SeqMode
|
||||
)
|
||||
@ -218,7 +220,6 @@ object ApplyPropertiesRawInliner extends RawInliner[ApplyPropertyRaw] with Loggi
|
||||
case (vm @ VarModel(_, _, _), inline) if vm.properties.nonEmpty =>
|
||||
removeProperties(vm).map { case (vf, inlf) =>
|
||||
PropertyRawWithModel(iir, Option(IntoIndexModel(vf.name, t))) -> Inline(
|
||||
inline.flattenValues ++ inlf.flattenValues,
|
||||
inline.predo ++ inlf.predo,
|
||||
mergeMode = SeqMode
|
||||
)
|
||||
@ -230,11 +231,7 @@ object ApplyPropertiesRawInliner extends RawInliner[ApplyPropertyRaw] with Loggi
|
||||
}
|
||||
|
||||
case p => State.pure(PropertyRawWithModel(p, None) -> Inline.empty)
|
||||
}.sequence.map { (propsWithInline: Chain[(PropertyRawWithModel, Inline)]) =>
|
||||
val fullInline = propsWithInline.map(_._2).foldLeft(Inline.empty)(_ |+| _)
|
||||
val props = propsWithInline.map(_._1)
|
||||
(props, fullInline)
|
||||
}
|
||||
}.sequence.map(_.toList.unzip.bimap(Chain.fromSeq, _.combineAll))
|
||||
}
|
||||
|
||||
private def unfoldProperties[S: Mangler: Exports: Arrows](
|
||||
@ -254,7 +251,6 @@ object ApplyPropertiesRawInliner extends RawInliner[ApplyPropertyRaw] with Loggi
|
||||
(
|
||||
vm,
|
||||
Inline(
|
||||
leftInline.flattenValues ++ inl.flattenValues,
|
||||
leftInline.predo ++ inl.predo,
|
||||
mergeMode = SeqMode
|
||||
)
|
||||
@ -269,7 +265,6 @@ object ApplyPropertiesRawInliner extends RawInliner[ApplyPropertyRaw] with Loggi
|
||||
case (v, i) if !propertiesAllowed && v.properties.nonEmpty =>
|
||||
removeProperties(v).map { case (vf, inlf) =>
|
||||
vf -> Inline(
|
||||
leftInline.flattenValues ++ i.flattenValues ++ inlf.flattenValues,
|
||||
leftInline.predo ++ i.predo ++ inlf.predo,
|
||||
mergeMode = SeqMode
|
||||
)
|
||||
@ -277,7 +272,6 @@ object ApplyPropertiesRawInliner extends RawInliner[ApplyPropertyRaw] with Loggi
|
||||
case (v, i) =>
|
||||
State.pure(
|
||||
v -> Inline(
|
||||
leftInline.flattenValues ++ i.flattenValues,
|
||||
leftInline.predo ++ i.predo,
|
||||
mergeMode = SeqMode
|
||||
)
|
||||
@ -304,7 +298,6 @@ object ApplyPropertiesRawInliner extends RawInliner[ApplyPropertyRaw] with Loggi
|
||||
unfoldProperties(gateResInline, gateResVal, properties, propertiesAllowed).map {
|
||||
case (v, i) =>
|
||||
v -> Inline(
|
||||
inl.flattenValues ++ i.flattenValues,
|
||||
inl.predo ++ i.predo,
|
||||
mergeMode = SeqMode
|
||||
)
|
||||
|
@ -0,0 +1,92 @@
|
||||
package aqua.model.inline.raw
|
||||
|
||||
import aqua.model.*
|
||||
import aqua.model.inline.raw.RawInliner
|
||||
import aqua.model.inline.state.{Arrows, Exports, Mangler}
|
||||
import aqua.raw.value.{AbilityRaw, LiteralRaw, MakeStructRaw}
|
||||
import cats.data.{NonEmptyList, NonEmptyMap, State}
|
||||
import aqua.model.inline.Inline
|
||||
import aqua.model.inline.RawValueInliner.{unfold, valueToModel}
|
||||
import aqua.types.{ArrowType, ScalarType}
|
||||
import aqua.raw.value.ApplyUnaryOpRaw
|
||||
import aqua.raw.value.ApplyUnaryOpRaw.Op.*
|
||||
|
||||
import cats.data.Chain
|
||||
import cats.syntax.traverse.*
|
||||
import cats.syntax.monoid.*
|
||||
import cats.syntax.functor.*
|
||||
import cats.syntax.flatMap.*
|
||||
import cats.syntax.apply.*
|
||||
import cats.syntax.foldable.*
|
||||
import cats.syntax.applicative.*
|
||||
|
||||
object ApplyUnaryOpRawInliner extends RawInliner[ApplyUnaryOpRaw] {
|
||||
|
||||
override def apply[S: Mangler: Exports: Arrows](
|
||||
raw: ApplyUnaryOpRaw,
|
||||
propertiesAllowed: Boolean
|
||||
): State[S, (ValueModel, Inline)] = for {
|
||||
value <- unfold(raw.value)
|
||||
(vm, vinline) = value
|
||||
|
||||
result <- vm match {
|
||||
// Optimize in case of value is known at compile time
|
||||
case LiteralModel.Bool(bvalue) =>
|
||||
(raw.op match {
|
||||
case Not => (LiteralModel.bool(!bvalue), vinline)
|
||||
}).pure[State[S, *]]
|
||||
// Produce unoptimized inline
|
||||
case _ => fullInline(vm, vinline, raw.op)
|
||||
}
|
||||
} yield result
|
||||
|
||||
private def fullInline[S: Mangler: Exports: Arrows](
|
||||
vm: ValueModel,
|
||||
vinline: Inline,
|
||||
op: ApplyUnaryOpRaw.Op
|
||||
): State[S, (ValueModel, Inline)] = {
|
||||
val name = op match {
|
||||
case Not => "not"
|
||||
}
|
||||
|
||||
/*
|
||||
* (seq
|
||||
* <value-inline>
|
||||
* (xor
|
||||
* (match <value-res> true
|
||||
* (ap false <res-name>)
|
||||
* )
|
||||
* (ap true <res-name>)
|
||||
* )
|
||||
* )
|
||||
*/
|
||||
val predo = (resName: String) =>
|
||||
SeqModel.wrap(
|
||||
vinline.predo :+ XorModel.wrap(
|
||||
MatchMismatchModel(
|
||||
vm,
|
||||
LiteralModel.bool(true),
|
||||
shouldMatch = true
|
||||
).wrap(
|
||||
FlattenModel(
|
||||
LiteralModel.bool(false),
|
||||
resName
|
||||
).leaf
|
||||
),
|
||||
FlattenModel(
|
||||
LiteralModel.bool(true),
|
||||
resName
|
||||
).leaf
|
||||
)
|
||||
)
|
||||
|
||||
Mangler[S]
|
||||
.findAndForbidName(name)
|
||||
.map(resName =>
|
||||
(
|
||||
VarModel(resName, ScalarType.bool),
|
||||
Inline(Chain.one(predo(resName)))
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
@ -29,11 +29,10 @@ object CallArrowRawInliner extends RawInliner[CallArrowRaw] with Logging {
|
||||
cd <- callToModel(call, true)
|
||||
sd <- valueToModel(serviceId)
|
||||
} yield cd._1.exportTo.map(_.asVar.resolveWith(exports)) -> Inline(
|
||||
ListMap.empty,
|
||||
Chain(
|
||||
SeqModel.wrap(
|
||||
sd._2.toList ++
|
||||
cd._2.toList :+ CallServiceModel(sd._1, value.name, cd._1).leaf: _*
|
||||
cd._2.toList :+ CallServiceModel(sd._1, value.name, cd._1).leaf
|
||||
)
|
||||
)
|
||||
)
|
||||
@ -58,7 +57,6 @@ object CallArrowRawInliner extends RawInliner[CallArrowRaw] with Logging {
|
||||
.callArrowRet(fn, cm)
|
||||
.map { case (body, vars) =>
|
||||
vars -> Inline(
|
||||
ListMap.empty,
|
||||
Chain.one(
|
||||
// Leave meta information in tree after inlining
|
||||
MetaModel
|
||||
|
@ -22,6 +22,7 @@ import cats.syntax.monoid.*
|
||||
import cats.syntax.functor.*
|
||||
import cats.syntax.flatMap.*
|
||||
import cats.syntax.apply.*
|
||||
import cats.syntax.foldable.*
|
||||
|
||||
object MakeAbilityRawInliner extends RawInliner[AbilityRaw] {
|
||||
|
||||
@ -33,16 +34,14 @@ object MakeAbilityRawInliner extends RawInliner[AbilityRaw] {
|
||||
name <- Mangler[S].findAndForbidName(raw.abilityType.name + "_ab")
|
||||
foldedFields <- raw.fieldsAndArrows.nonEmptyTraverse(unfold(_))
|
||||
varModel = VarModel(name, raw.baseType)
|
||||
valsInline = foldedFields.toSortedMap.values.map(_._2).fold(Inline.empty)(_ |+| _).desugar
|
||||
_ <- foldedFields.map(_._1).toNel.toList.traverse { case (n, vm) =>
|
||||
val namef = s"$name.$n"
|
||||
Exports[S].resolved(namef, vm)
|
||||
valsInline = foldedFields.toList.foldMap { case (_, inline) => inline }.desugar
|
||||
_ <- foldedFields.toNel.traverse { case (n, (vm, _)) =>
|
||||
Exports[S].resolved(s"$name.$n", vm)
|
||||
}
|
||||
} yield {
|
||||
(
|
||||
varModel,
|
||||
Inline(
|
||||
valsInline.flattenValues,
|
||||
Chain.one(SeqModel.wrap(valsInline.predo))
|
||||
)
|
||||
)
|
||||
|
@ -22,6 +22,8 @@ import org.scalatest.flatspec.AnyFlatSpec
|
||||
import org.scalatest.matchers.should.Matchers
|
||||
|
||||
import scala.collection.immutable.SortedMap
|
||||
import aqua.raw.value.ApplyBinaryOpRaw
|
||||
import aqua.raw.value.CallArrowRaw
|
||||
|
||||
class RawValueInlinerSpec extends AnyFlatSpec with Matchers {
|
||||
|
||||
@ -127,9 +129,8 @@ class RawValueInlinerSpec extends AnyFlatSpec with Matchers {
|
||||
"raw value inliner" should "desugarize a single non-recursive raw value" in {
|
||||
// x[y]
|
||||
valueToModel[InliningState](`raw x[y]`)
|
||||
.run(InliningState(noNames = Set("x", "y")))
|
||||
.value
|
||||
._2 should be(
|
||||
.runA(InliningState(noNames = Set("x", "y")))
|
||||
.value shouldBe (
|
||||
VarModel(
|
||||
"x",
|
||||
ArrayType(ScalarType.string),
|
||||
@ -143,13 +144,12 @@ class RawValueInlinerSpec extends AnyFlatSpec with Matchers {
|
||||
import aqua.model.inline.state.Mangler.Simple
|
||||
// a.field1.field2
|
||||
valueToModel[InliningState](`raw res.c`)
|
||||
.run(
|
||||
.runA(
|
||||
InliningState(resolvedExports =
|
||||
Map("res" -> VarModel("a", aType, Chain.one(IntoFieldModel("b", bType))))
|
||||
)
|
||||
)
|
||||
.value
|
||||
._2 should be(
|
||||
.value shouldBe (
|
||||
VarModel(
|
||||
"a",
|
||||
aType,
|
||||
@ -158,14 +158,13 @@ class RawValueInlinerSpec extends AnyFlatSpec with Matchers {
|
||||
)
|
||||
}
|
||||
|
||||
"raw value inliner" should "desugarize a single recursive raw value" in {
|
||||
it should "desugarize a single recursive raw value" in {
|
||||
// x[ys!]
|
||||
val (resVal, resTree) = valueToModel[InliningState](
|
||||
`raw x[ys[0]]`
|
||||
)
|
||||
.run(InliningState(noNames = Set("x", "ys")))
|
||||
.runA(InliningState(noNames = Set("x", "ys")))
|
||||
.value
|
||||
._2
|
||||
|
||||
resVal should be(
|
||||
VarModel(
|
||||
@ -189,10 +188,10 @@ class RawValueInlinerSpec extends AnyFlatSpec with Matchers {
|
||||
) should be(true)
|
||||
}
|
||||
|
||||
"raw value inliner" should "desugarize properties with functors x[ys[ys.length]][2] and make proper flattener tags" in {
|
||||
it should "desugarize properties with functors x[ys[ys.length]][2] and make proper flattener tags" in {
|
||||
val (resVal, resTree) = valueToModel[InliningState](
|
||||
`x[xs[ys.length]][xss[yss.length]]`
|
||||
).run(InliningState(noNames = Set("x", "ys", "xs", "yss", "xss"))).value._2
|
||||
).runA(InliningState(noNames = Set("x", "ys", "xs", "yss", "xss"))).value
|
||||
|
||||
resVal should be(
|
||||
VarModel(
|
||||
@ -263,13 +262,12 @@ class RawValueInlinerSpec extends AnyFlatSpec with Matchers {
|
||||
) should be(true)
|
||||
}
|
||||
|
||||
"raw value inliner" should "desugarize x[ys[0]][ys[1]] and make proper flattener tags" in {
|
||||
it should "desugarize x[ys[0]][ys[1]] and make proper flattener tags" in {
|
||||
val (resVal, resTree) = valueToModel[InliningState](
|
||||
`raw x[ys[0]][ys[1]]`
|
||||
)
|
||||
.run(InliningState(noNames = Set("x", "ys")))
|
||||
.runA(InliningState(noNames = Set("x", "ys")))
|
||||
.value
|
||||
._2
|
||||
|
||||
resVal should be(
|
||||
VarModel(
|
||||
@ -306,16 +304,15 @@ class RawValueInlinerSpec extends AnyFlatSpec with Matchers {
|
||||
) should be(true)
|
||||
}
|
||||
|
||||
"raw value inliner" should "desugarize stream with gate" in {
|
||||
it should "desugarize stream with gate" in {
|
||||
val streamWithProps =
|
||||
VarRaw("x", StreamType(ScalarType.string)).withProperty(
|
||||
IntoIndexRaw(ysVarRaw(1), ScalarType.string)
|
||||
)
|
||||
|
||||
val (resVal, resTree) = valueToModel[InliningState](streamWithProps)
|
||||
.run(InliningState(noNames = Set("x", "ys")))
|
||||
.runA(InliningState(noNames = Set("x", "ys")))
|
||||
.value
|
||||
._2
|
||||
|
||||
resVal should be(
|
||||
VarModel(
|
||||
@ -328,28 +325,23 @@ class RawValueInlinerSpec extends AnyFlatSpec with Matchers {
|
||||
)
|
||||
}
|
||||
|
||||
"raw value inliner" should "desugarize stream with length" in {
|
||||
it should "desugarize stream with length" in {
|
||||
val streamWithProps =
|
||||
VarRaw("x", StreamType(ScalarType.string)).withProperty(
|
||||
FunctorRaw("length", ScalarType.u32)
|
||||
)
|
||||
|
||||
val (resVal, resTree) = valueToModel[InliningState](streamWithProps)
|
||||
.run(InliningState(noNames = Set("x", "ys")))
|
||||
.runA(InliningState(noNames = Set("x", "ys")))
|
||||
.value
|
||||
._2
|
||||
|
||||
// println(resVal)
|
||||
// println(resTree)
|
||||
}
|
||||
|
||||
"raw value inliner" should "desugarize a recursive lambda value" in {
|
||||
it should "desugarize a recursive lambda value" in {
|
||||
val (resVal, resTree) = valueToModel[InliningState](
|
||||
`raw x[zs[ys[0]]][ys[1]]`
|
||||
)
|
||||
.run(InliningState(noNames = Set("x", "ys", "zs")))
|
||||
.runA(InliningState(noNames = Set("x", "ys", "zs")))
|
||||
.value
|
||||
._2
|
||||
|
||||
// This is x[zs-0][ys-0]
|
||||
// zs-0 should be zs[ys[0]], which should be already flattened
|
||||
@ -396,5 +388,4 @@ class RawValueInlinerSpec extends AnyFlatSpec with Matchers {
|
||||
)
|
||||
) should be(true)
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -179,6 +179,56 @@ case class AbilityRaw(fieldsAndArrows: NonEmptyMap[String, ValueRaw], abilityTyp
|
||||
copy(fieldsAndArrows = fieldsAndArrows.map(_.renameVars(map)))
|
||||
}
|
||||
|
||||
case class ApplyBinaryOpRaw(
|
||||
op: ApplyBinaryOpRaw.Op,
|
||||
left: ValueRaw,
|
||||
right: ValueRaw
|
||||
) extends ValueRaw {
|
||||
|
||||
// Only boolean operations are supported for now
|
||||
override def baseType: Type = ScalarType.bool
|
||||
|
||||
override def map(f: ValueRaw => ValueRaw): ValueRaw =
|
||||
f(copy(left = f(left), right = f(right)))
|
||||
|
||||
override def varNames: Set[String] = left.varNames ++ right.varNames
|
||||
|
||||
override def renameVars(map: Map[String, String]): ValueRaw =
|
||||
copy(left = left.renameVars(map), right = right.renameVars(map))
|
||||
}
|
||||
|
||||
object ApplyBinaryOpRaw {
|
||||
|
||||
enum Op {
|
||||
case And
|
||||
case Or
|
||||
}
|
||||
}
|
||||
|
||||
case class ApplyUnaryOpRaw(
|
||||
op: ApplyUnaryOpRaw.Op,
|
||||
value: ValueRaw
|
||||
) extends ValueRaw {
|
||||
|
||||
// Only boolean operations are supported for now
|
||||
override def baseType: Type = ScalarType.bool
|
||||
|
||||
override def map(f: ValueRaw => ValueRaw): ValueRaw =
|
||||
f(copy(value = f(value)))
|
||||
|
||||
override def varNames: Set[String] = value.varNames
|
||||
|
||||
override def renameVars(map: Map[String, String]): ValueRaw =
|
||||
copy(value = value.renameVars(map))
|
||||
}
|
||||
|
||||
object ApplyUnaryOpRaw {
|
||||
|
||||
enum Op {
|
||||
case Not
|
||||
}
|
||||
}
|
||||
|
||||
case class CallArrowRaw(
|
||||
// TODO: ability should hold a type, not name
|
||||
ability: Option[String],
|
||||
|
@ -62,6 +62,19 @@ case class LiteralModel(value: String, `type`: Type) extends ValueModel {
|
||||
|
||||
object LiteralModel {
|
||||
|
||||
/**
|
||||
* Used to match bool literals in pattern matching
|
||||
*/
|
||||
object Bool {
|
||||
|
||||
def unapply(lm: LiteralModel): Option[Boolean] =
|
||||
lm match {
|
||||
case LiteralModel("true", ScalarType.bool | LiteralType.bool) => true.some
|
||||
case LiteralModel("false", ScalarType.bool | LiteralType.bool) => false.some
|
||||
case _ => none
|
||||
}
|
||||
}
|
||||
|
||||
// AquaVM will return empty string for
|
||||
// %last_error%.$.error_code if there is no %last_error%
|
||||
val emptyErrorCode = quote("")
|
||||
@ -71,6 +84,8 @@ object LiteralModel {
|
||||
def quote(str: String): LiteralModel = LiteralModel(s"\"$str\"", LiteralType.string)
|
||||
|
||||
def number(n: Int): LiteralModel = LiteralModel(n.toString, LiteralType.forInt(n))
|
||||
|
||||
def bool(b: Boolean): LiteralModel = LiteralModel(b.toString.toLowerCase, LiteralType.bool)
|
||||
}
|
||||
|
||||
sealed trait PropertyModel {
|
||||
|
@ -3,17 +3,19 @@ package aqua.parser.lexer
|
||||
import aqua.parser.Expr
|
||||
import aqua.parser.head.FilenameExpr
|
||||
import aqua.parser.lexer.Token.*
|
||||
import aqua.parser.lexer.ValueToken.{initPeerId, literal}
|
||||
import aqua.parser.lift.LiftParser
|
||||
import aqua.parser.lift.LiftParser.*
|
||||
import aqua.types.LiteralType
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan, S}
|
||||
|
||||
import cats.parse.{Numbers, Parser as P, Parser0 as P0}
|
||||
import cats.syntax.comonad.*
|
||||
import cats.syntax.functor.*
|
||||
import cats.{~>, Comonad, Functor}
|
||||
import cats.data.{NonEmptyList, NonEmptyMap}
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.parser.lift.Span.{P0ToSpan, PToSpan, S}
|
||||
import cats.syntax.foldable.*
|
||||
import cats.arrow.FunctionK
|
||||
|
||||
sealed trait ValueToken[F[_]] extends Token[F] {
|
||||
def mapK[K[_]: Comonad](fk: F ~> K): ValueToken[K]
|
||||
@ -156,29 +158,60 @@ case class InfixToken[F[_]: Comonad](
|
||||
|
||||
object InfixToken {
|
||||
|
||||
import ValueToken._
|
||||
enum BoolOp(val symbol: String):
|
||||
case Or extends BoolOp("||")
|
||||
case And extends BoolOp("&&")
|
||||
|
||||
enum MathOp(val symbol: String):
|
||||
case Pow extends MathOp("**")
|
||||
case Mul extends MathOp("*")
|
||||
case Div extends MathOp("/")
|
||||
case Rem extends MathOp("%")
|
||||
case Add extends MathOp("+")
|
||||
case Sub extends MathOp("-")
|
||||
|
||||
enum CmpOp(val symbol: String):
|
||||
case Gt extends CmpOp(">")
|
||||
case Gte extends CmpOp(">=")
|
||||
case Lt extends CmpOp("<")
|
||||
case Lte extends CmpOp("<=")
|
||||
|
||||
enum Op(val symbol: String):
|
||||
case Pow extends Op("**")
|
||||
case Mul extends Op("*")
|
||||
case Div extends Op("/")
|
||||
case Rem extends Op("%")
|
||||
case Add extends Op("+")
|
||||
case Sub extends Op("-")
|
||||
case Gt extends Op(">")
|
||||
case Gte extends Op(">=")
|
||||
case Lt extends Op("<")
|
||||
case Lte extends Op("<=")
|
||||
/**
|
||||
* Scala3 does not support nested enums with fields
|
||||
* so this type acrobatics is used to enable exhaustive matching check
|
||||
*/
|
||||
case Math(mathOp: MathOp) extends Op(mathOp.symbol)
|
||||
case Cmp(cmpOp: CmpOp) extends Op(cmpOp.symbol)
|
||||
case Bool(boolOp: BoolOp) extends Op(boolOp.symbol)
|
||||
|
||||
def p: P[Unit] = P.string(symbol)
|
||||
|
||||
object Op {
|
||||
val math: List[Op] = List(Pow, Mul, Div, Rem, Add, Sub)
|
||||
val compare: List[Op] = List(Gt, Gte, Lt, Lte)
|
||||
val Pow = Math(MathOp.Pow)
|
||||
val Mul = Math(MathOp.Mul)
|
||||
val Div = Math(MathOp.Div)
|
||||
val Rem = Math(MathOp.Rem)
|
||||
val Add = Math(MathOp.Add)
|
||||
val Sub = Math(MathOp.Sub)
|
||||
|
||||
val math = MathOp.values.map(Math(_)).toList
|
||||
|
||||
val Gt = Cmp(CmpOp.Gt)
|
||||
val Gte = Cmp(CmpOp.Gte)
|
||||
val Lt = Cmp(CmpOp.Lt)
|
||||
val Lte = Cmp(CmpOp.Lte)
|
||||
|
||||
val cmp = CmpOp.values.map(Cmp(_)).toList
|
||||
|
||||
val And = Bool(BoolOp.And)
|
||||
val Or = Bool(BoolOp.Or)
|
||||
|
||||
val bool = BoolOp.values.map(Bool(_)).toList
|
||||
}
|
||||
|
||||
private def opsParser(ops: List[Op]): P[(Span, Op)] =
|
||||
P.oneOf(ops.map(op => op.p.lift.map(s => s.as(op))))
|
||||
P.oneOf(ops.map(op => op.p.lift.map(_.as(op))))
|
||||
|
||||
// Parse left-associative operations `basic (OP basic)*`.
|
||||
// We use this form to avoid left recursion.
|
||||
@ -209,26 +242,8 @@ object InfixToken {
|
||||
vt
|
||||
}
|
||||
|
||||
def brackets(basic: P[ValueToken[Span.S]]): P[ValueToken[Span.S]] =
|
||||
basic.between(`(`, `)`).backtrack
|
||||
|
||||
// One element of math expression
|
||||
val atom: P[ValueToken[S]] = P.oneOf(
|
||||
literal.backtrack ::
|
||||
initPeerId.backtrack ::
|
||||
P.defer(
|
||||
CollectionToken.collection
|
||||
) ::
|
||||
P.defer(NamedValueToken.dataValue).backtrack ::
|
||||
P.defer(CallArrowToken.callArrow).backtrack ::
|
||||
P.defer(abProperty).backtrack ::
|
||||
P.defer(brackets(InfixToken.mathExpr)).backtrack ::
|
||||
varProperty ::
|
||||
Nil
|
||||
)
|
||||
|
||||
private val pow: P[ValueToken[Span.S]] =
|
||||
infixParserRight(atom, Op.Pow :: Nil)
|
||||
infixParserRight(P.defer(ValueToken.atom), Op.Pow :: Nil)
|
||||
|
||||
private val mult: P[ValueToken[Span.S]] =
|
||||
infixParserLeft(pow, Op.Mul :: Op.Div :: Op.Rem :: Nil)
|
||||
@ -245,6 +260,12 @@ object InfixToken {
|
||||
Op.Gte :: Op.Lte :: Op.Gt :: Op.Lt :: Nil
|
||||
)
|
||||
|
||||
private val and: P[ValueToken[Span.S]] =
|
||||
infixParserLeft(compare, Op.And :: Nil)
|
||||
|
||||
private val or: P[ValueToken[Span.S]] =
|
||||
infixParserLeft(and, Op.Or :: Nil)
|
||||
|
||||
/**
|
||||
* The math expression parser.
|
||||
*
|
||||
@ -291,9 +312,17 @@ object InfixToken {
|
||||
*
|
||||
* The grammar below expresses the operator precedence and associativity we expect from math expressions:
|
||||
*
|
||||
* -- Comparison is the entry point because it has the lowest priority.
|
||||
* -- Logical OR is the entry point because it has the lowest priority.
|
||||
* mathExpr
|
||||
* -> cmpExpr
|
||||
* -> orExpr
|
||||
*
|
||||
* -- Logical OR is left associative.
|
||||
* orExpr
|
||||
* -> andExpr OR_OP andExpr
|
||||
*
|
||||
* -- Logical AND is left associative.
|
||||
* andExpr
|
||||
* -> cmpExpr AND_OP cmpExpr
|
||||
*
|
||||
* -- Comparison isn't an associative operation so it's not a recursive definition.
|
||||
* cmpExpr
|
||||
@ -326,19 +355,51 @@ object InfixToken {
|
||||
* | ...
|
||||
* | ( mathExpr )
|
||||
*/
|
||||
val mathExpr: P[ValueToken[Span.S]] = compare
|
||||
val value: P[ValueToken[Span.S]] = or
|
||||
}
|
||||
|
||||
case class PrefixToken[F[_]: Comonad](
|
||||
operand: ValueToken[F],
|
||||
prefix: F[PrefixToken.Op]
|
||||
) extends ValueToken[F] {
|
||||
|
||||
def op: PrefixToken.Op = prefix.extract
|
||||
override def as[T](v: T): F[T] = prefix.as(v)
|
||||
|
||||
override def mapK[K[_]: Comonad](fk: FunctionK[F, K]): ValueToken[K] =
|
||||
copy(operand.mapK(fk), fk(prefix))
|
||||
}
|
||||
|
||||
object PrefixToken {
|
||||
|
||||
enum Op(val symbol: String) {
|
||||
case Not extends Op("!")
|
||||
|
||||
def p: P[Unit] = P.string(symbol)
|
||||
}
|
||||
|
||||
private def parseOps(ops: List[Op]): P[S[Op]] =
|
||||
P.oneOf(ops.map(op => op.p.lift.map(_.as(op))))
|
||||
|
||||
private def parsePrefix(basic: P[ValueToken[S]], ops: List[Op]) =
|
||||
(parseOps(ops).surroundedBy(`/s*`) ~ basic).map { case (op, vt) =>
|
||||
PrefixToken(vt, op)
|
||||
}
|
||||
|
||||
val value: P[ValueToken[Span.S]] =
|
||||
parsePrefix(P.defer(ValueToken.atom), Op.Not :: Nil)
|
||||
}
|
||||
|
||||
object ValueToken {
|
||||
|
||||
val varProperty: P[VarToken[Span.S]] =
|
||||
(Name.dotted ~ PropertyOp.ops.?).map { case (n, l) ⇒
|
||||
VarToken(n, l.fold[List[PropertyOp[Span.S]]](Nil)(_.toList))
|
||||
VarToken(n, l.foldMap(_.toList))
|
||||
}
|
||||
|
||||
val abProperty: P[VarToken[Span.S]] =
|
||||
(Name.cl ~ PropertyOp.ops.?).map { case (n, l) ⇒
|
||||
VarToken(n, l.fold[List[PropertyOp[Span.S]]](Nil)(_.toList))
|
||||
VarToken(n, l.foldMap(_.toList))
|
||||
}
|
||||
|
||||
val bool: P[LiteralToken[Span.S]] =
|
||||
@ -375,8 +436,25 @@ object ValueToken {
|
||||
val literal: P[LiteralToken[Span.S]] =
|
||||
P.oneOf(bool.backtrack :: float.backtrack :: num.backtrack :: string :: Nil)
|
||||
|
||||
private def brackets(basic: P[ValueToken[Span.S]]): P[ValueToken[Span.S]] =
|
||||
basic.between(`(`, `)`).backtrack
|
||||
|
||||
// Basic element of math expression
|
||||
val atom: P[ValueToken[S]] = P.oneOf(
|
||||
literal.backtrack ::
|
||||
initPeerId.backtrack ::
|
||||
P.defer(CollectionToken.collection).backtrack ::
|
||||
P.defer(NamedValueToken.dataValue).backtrack ::
|
||||
P.defer(CallArrowToken.callArrow).backtrack ::
|
||||
P.defer(abProperty).backtrack ::
|
||||
P.defer(PrefixToken.value).backtrack ::
|
||||
P.defer(brackets(InfixToken.value)).backtrack ::
|
||||
varProperty ::
|
||||
Nil
|
||||
)
|
||||
|
||||
// One of entry points for parsing the whole math expression
|
||||
val `value`: P[ValueToken[Span.S]] =
|
||||
P.defer(InfixToken.mathExpr)
|
||||
P.defer(InfixToken.value)
|
||||
|
||||
}
|
||||
|
@ -2,18 +2,22 @@ package aqua.parser
|
||||
|
||||
import aqua.AquaSpec
|
||||
import aqua.parser.expr.func.IfExpr
|
||||
import aqua.parser.lexer.InfixToken.Op
|
||||
import aqua.parser.lexer.{EqOp, InfixToken, LiteralToken, ValueToken}
|
||||
import aqua.parser.lexer.InfixToken.Op as InfixOp
|
||||
import aqua.parser.lexer.PrefixToken.Op as PrefixOp
|
||||
import aqua.parser.lexer.*
|
||||
import aqua.parser.lexer.InfixToken.Op.*
|
||||
import aqua.parser.lexer.PrefixToken.Op.*
|
||||
import aqua.parser.lift.Span
|
||||
import aqua.types.LiteralType
|
||||
|
||||
import cats.syntax.comonad.*
|
||||
import cats.{~>, Comonad, Id}
|
||||
import cats.parse.{Numbers, Parser as P, Parser0 as P0}
|
||||
import org.scalatest.flatspec.AnyFlatSpec
|
||||
import org.scalatest.matchers.should.Matchers
|
||||
import cats.parse.{Numbers, Parser as P, Parser0 as P0}
|
||||
import org.scalatest.Inside
|
||||
|
||||
class InfixTokenSpec extends AnyFlatSpec with Matchers with AquaSpec {
|
||||
class ValueTokenComplexSpec extends AnyFlatSpec with Matchers with Inside with AquaSpec {
|
||||
|
||||
def spanToId: Span.S ~> Id = new (Span.S ~> Id) {
|
||||
|
||||
@ -24,9 +28,20 @@ class InfixTokenSpec extends AnyFlatSpec with Matchers with AquaSpec {
|
||||
|
||||
import AquaSpec._
|
||||
|
||||
private def variable(name: String): ValueToken[Id] =
|
||||
VarToken(Name(name), Nil)
|
||||
|
||||
private def func(name: String, args: List[ValueToken[Id]]): ValueToken[Id] =
|
||||
CallArrowToken(None, Name(name), args)
|
||||
|
||||
private def literal(n: Int): ValueToken[Id] = toNumber(n)
|
||||
|
||||
private def infixToken(left: ValueToken[Id], right: ValueToken[Id], op: Op) =
|
||||
private def literalBool(b: Boolean): ValueToken[Id] = toBool(b)
|
||||
|
||||
private def prefixToken(value: ValueToken[Id], op: PrefixOp) =
|
||||
PrefixToken[Id](value, op)
|
||||
|
||||
private def infixToken(left: ValueToken[Id], right: ValueToken[Id], op: InfixOp) =
|
||||
InfixToken[Id](left, right, op)
|
||||
|
||||
private def mul(left: ValueToken[Id], right: ValueToken[Id]): ValueToken[Id] =
|
||||
@ -257,9 +272,14 @@ class InfixTokenSpec extends AnyFlatSpec with Matchers with AquaSpec {
|
||||
}
|
||||
|
||||
"complex cmp math expression " should "be parsed" in {
|
||||
val test = (op: Op) => {
|
||||
val vt =
|
||||
ValueToken.`value`.parseAll(s"(1 + 2) ** 3 ${op.symbol} 4 - 5 * 6").right.get.mapK(spanToId)
|
||||
val test = (op: InfixOp) => {
|
||||
val vt = ValueToken.`value`
|
||||
.parseAll(
|
||||
s"(1 + 2) ** 3 ${op.symbol} 4 - 5 * 6"
|
||||
)
|
||||
.right
|
||||
.get
|
||||
.mapK(spanToId)
|
||||
val left = pow(add(literal(1), literal(2)), literal(3))
|
||||
val right = sub(literal(4), mul(literal(5), literal(6)))
|
||||
val exp = infixToken(left, right, op)
|
||||
@ -273,4 +293,220 @@ class InfixTokenSpec extends AnyFlatSpec with Matchers with AquaSpec {
|
||||
val vt = ValueToken.`value`.parseAll("(1 > 3)").right.get.mapK(spanToId)
|
||||
vt shouldBe InfixToken(literal(1), literal(3), Gt)
|
||||
}
|
||||
|
||||
"simple logical expression" should "be parsed" in {
|
||||
val vtAnd = ValueToken.`value`.parseAll("true && false").map(_.mapK(spanToId))
|
||||
|
||||
inside(vtAnd) { case Right(vt) =>
|
||||
vt shouldBe infixToken(literalBool(true), literalBool(false), And)
|
||||
}
|
||||
|
||||
val vtOr = ValueToken.`value`.parseAll("false || true").map(_.mapK(spanToId))
|
||||
|
||||
inside(vtOr) { case Right(vt) =>
|
||||
vt shouldBe infixToken(literalBool(false), literalBool(true), Or)
|
||||
}
|
||||
|
||||
val vtAndOr = ValueToken.`value`.parseAll("false && true || false").map(_.mapK(spanToId))
|
||||
|
||||
inside(vtAndOr) { case Right(vt) =>
|
||||
vt shouldBe infixToken(
|
||||
infixToken(literalBool(false), literalBool(true), And),
|
||||
literalBool(false),
|
||||
Or
|
||||
)
|
||||
}
|
||||
|
||||
val vtOrAnd = ValueToken.`value`.parseAll("false || true && false").map(_.mapK(spanToId))
|
||||
|
||||
inside(vtOrAnd) { case Right(vt) =>
|
||||
vt shouldBe infixToken(
|
||||
literalBool(false),
|
||||
infixToken(literalBool(true), literalBool(false), And),
|
||||
Or
|
||||
)
|
||||
}
|
||||
|
||||
val vtOrNotAnd = ValueToken.`value`.parseAll("false || !true && false").map(_.mapK(spanToId))
|
||||
|
||||
inside(vtOrNotAnd) { case Right(vt) =>
|
||||
vt shouldBe infixToken(
|
||||
literalBool(false),
|
||||
infixToken(
|
||||
prefixToken(literalBool(true), Not),
|
||||
literalBool(false),
|
||||
And
|
||||
),
|
||||
Or
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
"logical expression with brackets" should "be parsed" in {
|
||||
val vtAndOr = ValueToken.`value`.parseAll("false && (true || false)").map(_.mapK(spanToId))
|
||||
|
||||
inside(vtAndOr) { case Right(vt) =>
|
||||
vt shouldBe infixToken(
|
||||
literalBool(false),
|
||||
infixToken(literalBool(true), literalBool(false), Or),
|
||||
And
|
||||
)
|
||||
}
|
||||
|
||||
val vtOrAnd = ValueToken.`value`.parseAll("(false || true) && false").map(_.mapK(spanToId))
|
||||
|
||||
inside(vtOrAnd) { case Right(vt) =>
|
||||
vt shouldBe infixToken(
|
||||
infixToken(literalBool(false), literalBool(true), Or),
|
||||
literalBool(false),
|
||||
And
|
||||
)
|
||||
}
|
||||
|
||||
val vtNotAndOr = ValueToken.`value`.parseAll("!false && (true || false)").map(_.mapK(spanToId))
|
||||
|
||||
inside(vtNotAndOr) { case Right(vt) =>
|
||||
vt shouldBe infixToken(
|
||||
prefixToken(literalBool(false), Not),
|
||||
infixToken(literalBool(true), literalBool(false), Or),
|
||||
And
|
||||
)
|
||||
}
|
||||
|
||||
val vtNotOrAnd = ValueToken.`value`.parseAll("!(false || true) && false").map(_.mapK(spanToId))
|
||||
|
||||
inside(vtNotOrAnd) { case Right(vt) =>
|
||||
vt shouldBe infixToken(
|
||||
prefixToken(
|
||||
infixToken(literalBool(false), literalBool(true), Or),
|
||||
Not
|
||||
),
|
||||
literalBool(false),
|
||||
And
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
"logical expression with math expressions" should "be parsed" in {
|
||||
val vt1 = ValueToken.`value`.parseAll("1 < 2 + 3 || 3 % 2 > 1").map(_.mapK(spanToId))
|
||||
|
||||
inside(vt1) { case Right(vt) =>
|
||||
vt shouldBe infixToken(
|
||||
infixToken(
|
||||
literal(1),
|
||||
infixToken(literal(2), literal(3), Add),
|
||||
Lt
|
||||
),
|
||||
infixToken(
|
||||
infixToken(literal(3), literal(2), Rem),
|
||||
literal(1),
|
||||
Gt
|
||||
),
|
||||
Or
|
||||
)
|
||||
}
|
||||
|
||||
val vt2 = ValueToken.`value`.parseAll("1 - 2 > 3 && 3 ** 2 <= 1").map(_.mapK(spanToId))
|
||||
|
||||
inside(vt2) { case Right(vt) =>
|
||||
vt shouldBe infixToken(
|
||||
infixToken(
|
||||
infixToken(literal(1), literal(2), Sub),
|
||||
literal(3),
|
||||
Gt
|
||||
),
|
||||
infixToken(
|
||||
infixToken(literal(3), literal(2), Pow),
|
||||
literal(1),
|
||||
Lte
|
||||
),
|
||||
And
|
||||
)
|
||||
}
|
||||
|
||||
val vt3 = ValueToken.`value`.parseAll("!(1 - 2 > 3) && 3 ** 2 <= 1").map(_.mapK(spanToId))
|
||||
|
||||
inside(vt3) { case Right(vt) =>
|
||||
vt shouldBe infixToken(
|
||||
prefixToken(
|
||||
infixToken(
|
||||
infixToken(literal(1), literal(2), Sub),
|
||||
literal(3),
|
||||
Gt
|
||||
),
|
||||
Not
|
||||
),
|
||||
infixToken(
|
||||
infixToken(literal(3), literal(2), Pow),
|
||||
literal(1),
|
||||
Lte
|
||||
),
|
||||
And
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
"logical expression with function calls and variables" should "be parsed" in {
|
||||
val vt1 = ValueToken.`value`.parseAll("foo() || a + 1 < 2 && b").map(_.mapK(spanToId))
|
||||
|
||||
inside(vt1) { case Right(vt) =>
|
||||
vt shouldBe infixToken(
|
||||
func("foo", Nil),
|
||||
infixToken(
|
||||
infixToken(
|
||||
infixToken(
|
||||
variable("a"),
|
||||
literal(1),
|
||||
Add
|
||||
),
|
||||
literal(2),
|
||||
Lt
|
||||
),
|
||||
variable("b"),
|
||||
And
|
||||
),
|
||||
Or
|
||||
)
|
||||
}
|
||||
|
||||
val vt2 = ValueToken.`value`.parseAll("bar(a) < 2 && (b > 5 || c)").map(_.mapK(spanToId))
|
||||
|
||||
inside(vt2) { case Right(vt) =>
|
||||
vt shouldBe infixToken(
|
||||
infixToken(func("bar", List(variable("a"))), literal(2), Lt),
|
||||
infixToken(
|
||||
infixToken(
|
||||
variable("b"),
|
||||
literal(5),
|
||||
Gt
|
||||
),
|
||||
variable("c"),
|
||||
Or
|
||||
),
|
||||
And
|
||||
)
|
||||
}
|
||||
|
||||
val vt3 = ValueToken.`value`.parseAll("!baz(a) && (!(b > 4) || !c)").map(_.mapK(spanToId))
|
||||
|
||||
inside(vt3) { case Right(vt) =>
|
||||
vt shouldBe infixToken(
|
||||
prefixToken(func("baz", List(variable("a"))), Not),
|
||||
infixToken(
|
||||
prefixToken(
|
||||
infixToken(
|
||||
variable("b"),
|
||||
literal(4),
|
||||
Gt
|
||||
),
|
||||
Not
|
||||
),
|
||||
prefixToken(variable("c"), Not),
|
||||
Or
|
||||
),
|
||||
And
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -32,7 +32,7 @@ class VarLambdaSpec extends AnyFlatSpec with Matchers with EitherValues {
|
||||
}
|
||||
|
||||
"var lambda in value" should "parse" in {
|
||||
val opsP = (s: String) => InfixToken.atom.parseAll(s).value.mapK(spanToId)
|
||||
val opsP = (s: String) => ValueToken.atom.parseAll(s).value.mapK(spanToId)
|
||||
opsP("some_val") should be(VarToken[Id](Name[Id]("some_val")))
|
||||
opsP("SomeClass.SOME_CONST") should be(VarToken[Id](Name[Id]("SomeClass.SOME_CONST")))
|
||||
}
|
||||
|
@ -1,12 +1,14 @@
|
||||
package aqua.semantics.rules
|
||||
|
||||
import aqua.parser.lexer.*
|
||||
import aqua.parser.lexer.InfixToken.Op
|
||||
import aqua.parser.lexer.InfixToken.{BoolOp, CmpOp, MathOp, Op as InfOp}
|
||||
import aqua.parser.lexer.PrefixToken.Op as PrefOp
|
||||
import aqua.raw.value.*
|
||||
import aqua.semantics.rules.abilities.AbilitiesAlgebra
|
||||
import aqua.semantics.rules.names.NamesAlgebra
|
||||
import aqua.semantics.rules.types.TypesAlgebra
|
||||
import aqua.types.*
|
||||
|
||||
import cats.Monad
|
||||
import cats.data.Chain
|
||||
import cats.syntax.applicative.*
|
||||
@ -17,6 +19,7 @@ import cats.syntax.traverse.*
|
||||
import cats.syntax.option.*
|
||||
import cats.instances.list.*
|
||||
import cats.data.{NonEmptyList, NonEmptyMap}
|
||||
import cats.data.OptionT
|
||||
import scribe.Logging
|
||||
|
||||
import scala.collection.immutable.SortedMap
|
||||
@ -174,29 +177,73 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](implicit
|
||||
case ca: CallArrowToken[S] =>
|
||||
callArrowToRaw(ca).map(_.widen[ValueRaw])
|
||||
|
||||
case pr @ PrefixToken(operand, _) =>
|
||||
(for {
|
||||
raw <- OptionT(
|
||||
valueToRaw(operand)
|
||||
)
|
||||
typeCheck <- OptionT.liftF(
|
||||
T.ensureTypeMatches(operand, ScalarType.bool, raw.`type`)
|
||||
)
|
||||
result <- OptionT.when(typeCheck)(
|
||||
ApplyUnaryOpRaw(
|
||||
op = pr.op match {
|
||||
case PrefOp.Not => ApplyUnaryOpRaw.Op.Not
|
||||
},
|
||||
value = raw
|
||||
)
|
||||
)
|
||||
} yield result).value
|
||||
|
||||
case it @ InfixToken(l, r, _) =>
|
||||
(valueToRaw(l), valueToRaw(r)).flatMapN {
|
||||
case (Some(leftRaw), Some(rightRaw)) =>
|
||||
val lType = leftRaw.`type`
|
||||
val rType = rightRaw.`type`
|
||||
lazy val uType = lType `∪` rType
|
||||
|
||||
it.op match {
|
||||
case InfOp.Bool(bop) =>
|
||||
for {
|
||||
leftChecked <- T.ensureTypeMatches(l, ScalarType.bool, lType)
|
||||
rightChecked <- T.ensureTypeMatches(r, ScalarType.bool, rType)
|
||||
} yield Option.when(
|
||||
leftChecked && rightChecked
|
||||
)(
|
||||
ApplyBinaryOpRaw(
|
||||
op = bop match {
|
||||
case BoolOp.And => ApplyBinaryOpRaw.Op.And
|
||||
case BoolOp.Or => ApplyBinaryOpRaw.Op.Or
|
||||
},
|
||||
left = leftRaw,
|
||||
right = rightRaw
|
||||
)
|
||||
)
|
||||
case op @ (InfOp.Math(_) | InfOp.Cmp(_)) =>
|
||||
// Some type acrobatics to make
|
||||
// compiler check exhaustive pattern matching
|
||||
val iop = op match {
|
||||
case InfOp.Math(op) => op
|
||||
case InfOp.Cmp(op) => op
|
||||
}
|
||||
|
||||
val hasFloat = List(lType, rType).exists(
|
||||
_ acceptsValueOf LiteralType.float
|
||||
)
|
||||
|
||||
// See https://github.com/fluencelabs/aqua-lib/blob/main/math.aqua
|
||||
val (id, fn) = it.op match {
|
||||
case Op.Add => ("math", "add")
|
||||
case Op.Sub => ("math", "sub")
|
||||
case Op.Mul if hasFloat => ("math", "fmul")
|
||||
case Op.Mul => ("math", "mul")
|
||||
case Op.Div => ("math", "div")
|
||||
case Op.Rem => ("math", "rem")
|
||||
case Op.Pow => ("math", "pow")
|
||||
case Op.Gt => ("cmp", "gt")
|
||||
case Op.Gte => ("cmp", "gte")
|
||||
case Op.Lt => ("cmp", "lt")
|
||||
case Op.Lte => ("cmp", "lte")
|
||||
val (id, fn) = iop match {
|
||||
case MathOp.Add => ("math", "add")
|
||||
case MathOp.Sub => ("math", "sub")
|
||||
case MathOp.Mul if hasFloat => ("math", "fmul")
|
||||
case MathOp.Mul => ("math", "mul")
|
||||
case MathOp.Div => ("math", "div")
|
||||
case MathOp.Rem => ("math", "rem")
|
||||
case MathOp.Pow => ("math", "pow")
|
||||
case CmpOp.Gt => ("cmp", "gt")
|
||||
case CmpOp.Gte => ("cmp", "gte")
|
||||
case CmpOp.Lt => ("cmp", "lt")
|
||||
case CmpOp.Lte => ("cmp", "lte")
|
||||
}
|
||||
|
||||
/*
|
||||
@ -216,16 +263,16 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](implicit
|
||||
} else uType
|
||||
|
||||
// Expected type sets of left and right operands, result type
|
||||
val (leftExp, rightExp, resType) = it.op match {
|
||||
case Op.Add | Op.Sub | Op.Div | Op.Rem =>
|
||||
val (leftExp, rightExp, resType) = iop match {
|
||||
case MathOp.Add | MathOp.Sub | MathOp.Div | MathOp.Rem =>
|
||||
(ScalarType.integer, ScalarType.integer, uTypeBounded)
|
||||
case Op.Pow =>
|
||||
case MathOp.Pow =>
|
||||
(ScalarType.integer, ScalarType.unsigned, uTypeBounded)
|
||||
case Op.Mul if hasFloat =>
|
||||
case MathOp.Mul if hasFloat =>
|
||||
(ScalarType.float, ScalarType.float, ScalarType.i64)
|
||||
case Op.Mul =>
|
||||
case MathOp.Mul =>
|
||||
(ScalarType.integer, ScalarType.integer, uTypeBounded)
|
||||
case Op.Gt | Op.Lt | Op.Gte | Op.Lte =>
|
||||
case CmpOp.Gt | CmpOp.Lt | CmpOp.Gte | CmpOp.Lte =>
|
||||
(ScalarType.integer, ScalarType.integer, ScalarType.bool)
|
||||
}
|
||||
|
||||
@ -247,9 +294,9 @@ class ValuesAlgebra[S[_], Alg[_]: Monad](implicit
|
||||
)
|
||||
)
|
||||
|
||||
}
|
||||
case _ => None.pure[Alg]
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Generate CallArrowRaw for arrow in ability
|
||||
|
@ -14,10 +14,10 @@ import aqua.semantics.rules.definitions.DefinitionsInterpreter
|
||||
import aqua.semantics.rules.types.TypesInterpreter
|
||||
import aqua.semantics.rules.locations.LocationsAlgebra
|
||||
import aqua.semantics.rules.locations.DummyLocationsInterpreter
|
||||
import aqua.raw.value.LiteralRaw
|
||||
import aqua.raw.value.{ApplyBinaryOpRaw, LiteralRaw}
|
||||
import aqua.raw.RawContext
|
||||
import aqua.types.{LiteralType, ScalarType, TopType, Type}
|
||||
import aqua.parser.lexer.{InfixToken, LiteralToken, Name, ValueToken, VarToken}
|
||||
import aqua.parser.lexer.{InfixToken, LiteralToken, Name, PrefixToken, ValueToken, VarToken}
|
||||
|
||||
import org.scalatest.flatspec.AnyFlatSpec
|
||||
import org.scalatest.matchers.should.Matchers
|
||||
@ -27,6 +27,7 @@ import cats.data.State
|
||||
import cats.syntax.functor.*
|
||||
import cats.syntax.comonad.*
|
||||
import monocle.syntax.all.*
|
||||
import aqua.raw.value.ApplyUnaryOpRaw
|
||||
|
||||
class ValuesAlgebraSpec extends AnyFlatSpec with Matchers with Inside {
|
||||
|
||||
@ -203,4 +204,157 @@ class ValuesAlgebraSpec extends AnyFlatSpec with Matchers with Inside {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
it should "handle ||, && on bool values" in {
|
||||
val types = List(LiteralType.bool, ScalarType.bool)
|
||||
|
||||
allPairs(types).foreach { case (lt, rt) =>
|
||||
InfixToken.BoolOp.values.foreach { op =>
|
||||
val left = lt match {
|
||||
case lt: LiteralType =>
|
||||
literal("true", lt)
|
||||
case _ =>
|
||||
variable("left")
|
||||
}
|
||||
val right = rt match {
|
||||
case rt: LiteralType =>
|
||||
literal("false", rt)
|
||||
case _ =>
|
||||
variable("right")
|
||||
}
|
||||
|
||||
val alg = algebra()
|
||||
|
||||
val state = genState(
|
||||
vars = (
|
||||
List("left" -> lt).filter(_ => lt != LiteralType.bool) ++
|
||||
List("right" -> rt).filter(_ => rt != LiteralType.bool)
|
||||
).toMap
|
||||
)
|
||||
|
||||
val token = InfixToken[Id](left, right, InfixToken.Op.Bool(op))
|
||||
|
||||
val (st, res) = alg
|
||||
.valueToRaw(token)
|
||||
.run(state)
|
||||
.value
|
||||
|
||||
inside(res) { case Some(ApplyBinaryOpRaw(bop, _, _)) =>
|
||||
bop shouldBe (op match {
|
||||
case InfixToken.BoolOp.And => ApplyBinaryOpRaw.Op.And
|
||||
case InfixToken.BoolOp.Or => ApplyBinaryOpRaw.Op.Or
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
it should "handle ! on bool values" in {
|
||||
val types = List(LiteralType.bool, ScalarType.bool)
|
||||
|
||||
types.foreach { t =>
|
||||
PrefixToken.Op.values.foreach { op =>
|
||||
val value = t match {
|
||||
case lt: LiteralType =>
|
||||
literal("true", lt)
|
||||
case _ =>
|
||||
variable("val")
|
||||
}
|
||||
|
||||
val alg = algebra()
|
||||
|
||||
val state = genState(
|
||||
vars = List("val" -> t).filter(_ => t != LiteralType.bool).toMap
|
||||
)
|
||||
|
||||
val token = PrefixToken[Id](value, op)
|
||||
|
||||
val (st, res) = alg
|
||||
.valueToRaw(token)
|
||||
.run(state)
|
||||
.value
|
||||
|
||||
inside(res) { case Some(ApplyUnaryOpRaw(uop, _)) =>
|
||||
uop shouldBe (op match {
|
||||
case PrefixToken.Op.Not => ApplyUnaryOpRaw.Op.Not
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
it should "check type of logical operands (binary)" in {
|
||||
val types = List(LiteralType.bool, ScalarType.bool).flatMap(t =>
|
||||
List(t -> ScalarType.i8, ScalarType.i8 -> t)
|
||||
)
|
||||
|
||||
types.foreach { case (lt, rt) =>
|
||||
InfixToken.BoolOp.values.foreach { op =>
|
||||
val left = lt match {
|
||||
case lt: LiteralType =>
|
||||
literal("true", lt)
|
||||
case _ =>
|
||||
variable("left")
|
||||
}
|
||||
val right = rt match {
|
||||
case rt: LiteralType =>
|
||||
literal("false", rt)
|
||||
case _ =>
|
||||
variable("right")
|
||||
}
|
||||
|
||||
val alg = algebra()
|
||||
|
||||
val state = genState(
|
||||
vars = (
|
||||
List("left" -> lt).filter(_ => lt != LiteralType.bool) ++
|
||||
List("right" -> rt).filter(_ => rt != LiteralType.bool)
|
||||
).toMap
|
||||
)
|
||||
|
||||
val token = InfixToken[Id](left, right, InfixToken.Op.Bool(op))
|
||||
|
||||
val (st, res) = alg
|
||||
.valueToRaw(token)
|
||||
.run(state)
|
||||
.value
|
||||
|
||||
res shouldBe None
|
||||
st.errors.exists(_.isInstanceOf[RulesViolated[Id]]) shouldBe true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
it should "check type of logical operand (unary)" in {
|
||||
val types = ScalarType.integer.toList :+ LiteralType.unsigned
|
||||
|
||||
types.foreach { t =>
|
||||
PrefixToken.Op.values.foreach { op =>
|
||||
val value = t match {
|
||||
case lt: LiteralType =>
|
||||
literal("42", lt)
|
||||
case _ =>
|
||||
variable("val")
|
||||
}
|
||||
|
||||
val alg = algebra()
|
||||
|
||||
val state = genState(
|
||||
vars = Map(
|
||||
"value" -> t
|
||||
).filter(_ => t != LiteralType.unsigned)
|
||||
)
|
||||
|
||||
val token = PrefixToken[Id](value, op)
|
||||
|
||||
val (st, res) = alg
|
||||
.valueToRaw(token)
|
||||
.run(state)
|
||||
.value
|
||||
|
||||
res shouldBe None
|
||||
st.errors.exists(_.isInstanceOf[RulesViolated[Id]]) shouldBe true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user