Use custom logic for lambda instead of json path (#154)

This commit is contained in:
Mike Voronov 2021-10-18 23:23:30 +03:00 committed by GitHub
parent 4251a36842
commit 1c55d34981
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
77 changed files with 3149 additions and 746 deletions

View File

@ -1,13 +1,21 @@
## Version 0.15.0 (2021-09-30)
## Version 0.16.0 (2021-10-18)
[PR 154](https://github.com/fluencelabs/aquavm/pull/154)
The json path crate has been removed and changed to custom lambda scheme that have a subset of functionality of json path used by the Aqua compiler. The flattening sign `!` is still allowed now, but does nothing.
[PR 150](https://github.com/fluencelabs/aquavm/pull/150), [PR 152](https://github.com/fluencelabs/aquavm/pull/152), [PR 153](https://github.com/fluencelabs/aquavm/pull/153) [PR 160](https://github.com/fluencelabs/aquavm/pull/160)
Some parts of the interpreter has been refactored to make it more modular.
[PR 144](https://github.com/fluencelabs/aquavm/pull/144)
The interpreter changed to be built with `unwind` panic handler and some other debug options were turned on.
## Version 0.15.0 (2021-10-04)
[PR 140](https://github.com/fluencelabs/aquavm/pull/130):
- the interpreter become async, now it's a pure function without any imports from a peer. Instead of calling import `call_service` from a peer, it now returns call results in the outcome structure, and receives their result in the `invoke` export.
- data structure now includes a new field to track last call request id to give peer more freedom.
- AVM server was completely refactored to support the new interpreter model and to expose a new trait storing data for a node.
[PR 144](https://github.com/fluencelabs/aquavm/pull/144)
The interpreter changed to be built with `unwind` panic handler and some other debug options were turned on.
[PR 139](https://github.com/fluencelabs/aquavm/pull/139)
Senders in `RequestSentBy` could be different now.

112
Cargo.lock generated
View File

@ -13,11 +13,13 @@ dependencies = [
[[package]]
name = "air"
version = "0.15.0"
version = "0.16.0"
dependencies = [
"air-execution-info-collector",
"air-interpreter-data",
"air-interpreter-interface",
"air-lambda-ast",
"air-lambda-parser",
"air-log-targets",
"air-parser",
"air-test-utils",
@ -27,7 +29,6 @@ dependencies = [
"csv",
"env_logger",
"fluence-app-service",
"jsonpath_lib-fl",
"log",
"maplit",
"marine-rs-sdk",
@ -48,7 +49,7 @@ version = "0.1.0"
[[package]]
name = "air-interpreter"
version = "0.15.0"
version = "0.16.0"
dependencies = [
"air",
"air-log-targets",
@ -79,6 +80,32 @@ dependencies = [
"serde_json",
]
[[package]]
name = "air-lambda-ast"
version = "0.1.0"
dependencies = [
"non-empty-vec",
"serde",
"serde_json",
]
[[package]]
name = "air-lambda-parser"
version = "0.1.0"
dependencies = [
"air-lambda-ast",
"codespan",
"codespan-reporting",
"itertools 0.10.1",
"lalrpop",
"lalrpop-util",
"multimap",
"regex",
"serde",
"serde_json",
"thiserror",
]
[[package]]
name = "air-log-targets"
version = "0.1.0"
@ -87,6 +114,7 @@ version = "0.1.0"
name = "air-parser"
version = "0.7.1"
dependencies = [
"air-lambda-parser",
"codespan",
"codespan-reporting",
"criterion",
@ -139,12 +167,6 @@ version = "1.0.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61604a8f862e1d5c3229fdd78f8b02c68dcf73a4c4b05fd636d12240aaa242c1"
[[package]]
name = "array_tool"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f8cb5d814eb646a863c4f24978cff2880c4be96ad8cde2c0f0678732902e271"
[[package]]
name = "arrayref"
version = "0.3.6"
@ -301,9 +323,9 @@ dependencies = [
[[package]]
name = "cc"
version = "1.0.70"
version = "1.0.71"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d26a6ce4b6a484fa3edb70f7efa6fc430fd2b87285fe8b84304fd0936faa0dc0"
checksum = "79c2681d6594606957bbb8631c4b90a7fcaaa72cdb714743a437b156d6a7eedd"
[[package]]
name = "cfg-if"
@ -845,9 +867,9 @@ dependencies = [
[[package]]
name = "half"
version = "1.7.1"
version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62aca2aba2d62b4a7f5b33f3712cb1b0692779a56fb510499d5c0aa594daeaf3"
checksum = "ac5956d4e63858efaec57e0d6c1c2f6a41e1487f830314a324ccd7e2223a7ca0"
[[package]]
name = "hashbrown"
@ -993,19 +1015,6 @@ dependencies = [
"wasm-bindgen",
]
[[package]]
name = "jsonpath_lib-fl"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33dcf980221b25366e8f0df601cf0df6ffcc97242cbbe4d139a79a7f0de5107f"
dependencies = [
"array_tool",
"env_logger",
"log",
"serde",
"serde_json",
]
[[package]]
name = "lalrpop"
version = "0.19.6"
@ -1151,18 +1160,18 @@ dependencies = [
[[package]]
name = "marine-macro"
version = "0.6.13"
version = "0.6.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "679663e087698f1048f23fed9b51ed82f6fa75781d3747ce29ea2f3ad78a6534"
checksum = "94b05da94255c230b7bf139c39c04447677b5d238579bf38ca7b3fcdc3e04993"
dependencies = [
"marine-macro-impl",
]
[[package]]
name = "marine-macro-impl"
version = "0.6.13"
version = "0.6.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ba83fc29fec3b96374094a94396d3fe13c97468ffe196123b78555bdae1093e"
checksum = "7846a749f35186f923f71b3132a15add0735376e73978f41f552b92e9d43cbab"
dependencies = [
"proc-macro2",
"quote",
@ -1208,9 +1217,9 @@ dependencies = [
[[package]]
name = "marine-rs-sdk"
version = "0.6.13"
version = "0.6.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8866fc6f24b92342f15d2816298d3de6377b685df245e38a36ddcde993c8f1d5"
checksum = "5b349c8b5a09045a2a509f7ee5106a52c70ef6782febe9be6748044c6f8c477a"
dependencies = [
"marine-macro",
"marine-rs-sdk-main",
@ -1220,9 +1229,9 @@ dependencies = [
[[package]]
name = "marine-rs-sdk-main"
version = "0.6.13"
version = "0.6.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4980a0c01121844419c0146e776d24e35fdf7cb2e90a33d19ecf52756e400196"
checksum = "1e9d9b9ecd87f0dfa8ad9ee594e71f71d8b4f0b819d3b025632a1a6597063088"
dependencies = [
"log",
"marine-macro",
@ -1261,9 +1270,9 @@ dependencies = [
[[package]]
name = "marine-timestamp-macro"
version = "0.6.13"
version = "0.6.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5656745923b99d73f945e26cf191efa70e906c7f55b0d4c1fc176b4b8087e85b"
checksum = "cb6e81c03cc0c2f546680bf21e10f3514f69acfa4d4d84a8f2b17b2fcebe8ce9"
dependencies = [
"chrono",
"quote",
@ -1339,6 +1348,15 @@ dependencies = [
"version_check",
]
[[package]]
name = "non-empty-vec"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e411c9b40f7c0048d9d78283d94954defc6ae6d432184f4b8ef491ea87d9a882"
dependencies = [
"serde",
]
[[package]]
name = "num-integer"
version = "0.1.44"
@ -1556,9 +1574,9 @@ checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5"
[[package]]
name = "proc-macro2"
version = "1.0.29"
version = "1.0.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9f5105d4fdaab20335ca9565e106a5d9b82b6219b5ba735731124ac6711d23d"
checksum = "edc3358ebc67bc8b7fa0c007f945b0b18226f78437d61bec735a9eb96b61ee70"
dependencies = [
"unicode-xid",
]
@ -1821,7 +1839,6 @@ version = "1.0.68"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f690853975602e1bfe1ccbf50504d67174e3bcf340f23b5ea9992e0587a52d8"
dependencies = [
"indexmap",
"itoa",
"ryu",
"serde",
@ -1847,12 +1864,13 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]]
name = "string_cache"
version = "0.8.1"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ddb1139b5353f96e429e1a5e19fbaf663bddedaa06d1dbd49f82e352601209a"
checksum = "923f0f39b6267d37d23ce71ae7235602134b250ace715dd2c90421998ddac0c6"
dependencies = [
"lazy_static",
"new_debug_unreachable",
"parking_lot 0.11.2",
"phf_shared",
"precomputed-hash",
]
@ -1883,9 +1901,9 @@ checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601"
[[package]]
name = "syn"
version = "1.0.78"
version = "1.0.80"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4eac2e6c19f5c3abc0c229bea31ff0b9b091c7b14990e8924b92902a303a0c0"
checksum = "d010a1623fbd906d51d650a9916aaefc05ffa0e4053ff7fe601167f3e715d194"
dependencies = [
"proc-macro2",
"quote",
@ -1929,18 +1947,18 @@ dependencies = [
[[package]]
name = "thiserror"
version = "1.0.29"
version = "1.0.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "602eca064b2d83369e2b2f34b09c70b605402801927c65c11071ac911d299b88"
checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.29"
version = "1.0.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bad553cc2c78e8de258400763a647e80e6d1b31ee237275d756f6836d204494c"
checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b"
dependencies = [
"proc-macro2",
"quote",

View File

@ -7,6 +7,8 @@ members = [
"crates/air-lib/execution-info-collector",
"crates/air-lib/interpreter-data",
"crates/air-lib/interpreter-interface",
"crates/air-lib/lambda/ast",
"crates/air-lib/lambda/parser",
"crates/air-lib/log-targets",
"crates/air-lib/polyplets",
"crates/air-lib/test-utils",

View File

@ -1,6 +1,6 @@
[package]
name = "air-interpreter"
version = "0.15.0"
version = "0.16.0"
description = "Crate-wrapper for air"
authors = ["Fluence Labs"]
edition = "2018"

View File

@ -1,6 +1,6 @@
[package]
name = "air"
version = "0.15.0"
version = "0.16.0"
description = "Interpreter of AIR scripts intended to coordinate request flow in the Fluence network"
authors = ["Fluence Labs"]
edition = "2018"
@ -20,6 +20,8 @@ air-execution-info-collector = { path = "../crates/air-lib/execution-info-collec
air-interpreter-data = { path = "../crates/air-lib/interpreter-data" }
air-interpreter-interface = { path = "../crates/air-lib/interpreter-interface" }
air-log-targets = { path = "../crates/air-lib/log-targets" }
air-lambda-ast = { path = "../crates/air-lib/lambda/ast" }
air-lambda-parser = { path = "../crates/air-lib/lambda/parser" }
air-trace-handler = { path = "../crates/air-lib/trace-handler" }
polyplets = { path = "../crates/air-lib/polyplets" }
marine-rs-sdk = { version = "0.6.11", features = ["logger"] }
@ -27,8 +29,6 @@ marine-rs-sdk = { version = "0.6.11", features = ["logger"] }
serde = { version = "1.0.118", features = [ "derive", "rc" ] }
serde_json = "1.0.61"
jsonpath_lib-fl = "=0.3.7"
boolinator = "2.4.0"
log = "0.4.11"
thiserror = "1.0.23"

View File

@ -1,8 +1,4 @@
use air_test_utils::create_avm;
use air_test_utils::unit_call_service;
use air_test_utils::AVMError;
use air_test_utils::InterpreterOutcome;
use air_test_utils::AVM;
use air_test_utils::prelude::*;
use criterion::criterion_group;
use criterion::criterion_main;
@ -10,7 +6,7 @@ use criterion::Criterion;
use std::cell::RefCell;
thread_local!(static VM: RefCell<AVM> = RefCell::new(create_avm(unit_call_service(), "test_peer_id")));
thread_local!(static VM: RefCell<TestRunner> = RefCell::new(create_avm(unit_call_service(), "test_peer_id")));
thread_local!(static SCRIPT: String = String::from(
r#"
(call "test_peer_id" ("local_service_id" "local_fn_name") [] result_name)
@ -18,8 +14,8 @@ thread_local!(static SCRIPT: String = String::from(
)
);
fn current_peer_id_call() -> Result<InterpreterOutcome, AVMError> {
VM.with(|vm| SCRIPT.with(|script| vm.borrow_mut().call_with_prev_data("", script.clone(), "", "")))
fn current_peer_id_call() -> Result<RawAVMOutcome, String> {
VM.with(|vm| SCRIPT.with(|script| vm.borrow_mut().call(script, "", "", "")))
}
fn criterion_benchmark(c: &mut Criterion) {

View File

@ -1,39 +1,26 @@
use air_test_utils::create_avm;
use air_test_utils::unit_call_service;
use air_test_utils::AVMError;
use air_test_utils::CallServiceClosure;
use air_test_utils::IValue;
use air_test_utils::InterpreterOutcome;
use air_test_utils::NEVec;
use air_test_utils::AVM;
use air_test_utils::prelude::*;
use criterion::criterion_group;
use criterion::criterion_main;
use criterion::Criterion;
use serde_json::json;
use std::cell::RefCell;
thread_local!(static RELAY_1_VM: RefCell<AVM> = RefCell::new(create_avm(unit_call_service(), "Relay1")));
thread_local!(static RELAY_2_VM: RefCell<AVM> = RefCell::new(create_avm(unit_call_service(), "Relay2")));
thread_local!(static REMOTE_VM: RefCell<AVM> = RefCell::new({
let members_call_service: CallServiceClosure = Box::new(|_, _| -> Option<IValue> {
Some(IValue::Record(
NEVec::new(vec![
IValue::S32(0),
IValue::String(String::from(r#"[["A", "Relay1"], ["B", "Relay2"]]"#)),
])
.unwrap(),
))
thread_local!(static RELAY_1_VM: RefCell<TestRunner> = RefCell::new(create_avm(unit_call_service(), "Relay1")));
thread_local!(static RELAY_2_VM: RefCell<TestRunner> = RefCell::new(create_avm(unit_call_service(), "Relay2")));
thread_local!(static REMOTE_VM: RefCell<TestRunner> = RefCell::new({
let members_call_service: CallServiceClosure = Box::new(|_| -> CallServiceResult {
CallServiceResult::ok(json!([["A", "Relay1"], ["B", "Relay2"]]))
});
create_avm(members_call_service, "Remote")
}));
thread_local!(static CLIENT_1_VM: RefCell<AVM> = RefCell::new(create_avm(unit_call_service(), "A")));
thread_local!(static CLIENT_2_VM: RefCell<AVM> = RefCell::new(create_avm(unit_call_service(), "B")));
thread_local!(static CLIENT_1_VM: RefCell<TestRunner> = RefCell::new(create_avm(unit_call_service(), "A")));
thread_local!(static CLIENT_2_VM: RefCell<TestRunner> = RefCell::new(create_avm(unit_call_service(), "B")));
fn chat_sent_message_benchmark() -> Result<InterpreterOutcome, AVMError> {
let script = String::from(
r#"
fn chat_sent_message_benchmark() -> Result<RawAVMOutcome, String> {
let script = r#"
(seq
(call "Relay1" ("identity" "") [] $void1)
(seq
@ -52,29 +39,26 @@ fn chat_sent_message_benchmark() -> Result<InterpreterOutcome, AVMError> {
)
)
)
"#,
);
"#;
let result = CLIENT_1_VM
.with(|vm| vm.borrow_mut().call_with_prev_data("", script.clone(), "", ""))
.unwrap();
let result = CLIENT_1_VM.with(|vm| vm.borrow_mut().call(script, "", "", "")).unwrap();
let result = RELAY_1_VM
.with(|vm| vm.borrow_mut().call_with_prev_data("", script.clone(), "", result.data))
.with(|vm| vm.borrow_mut().call(script, "", result.data, ""))
.unwrap();
let result = REMOTE_VM
.with(|vm| vm.borrow_mut().call_with_prev_data("", script.clone(), "", result.data))
.with(|vm| vm.borrow_mut().call(script, "", result.data, ""))
.unwrap();
let res_data = result.data.clone();
let res1 = RELAY_1_VM
.with(|vm| vm.borrow_mut().call_with_prev_data("", script.clone(), "", res_data))
.with(|vm| vm.borrow_mut().call(script, "", res_data, ""))
.unwrap();
CLIENT_1_VM
.with(|vm| vm.borrow_mut().call_with_prev_data("", script.clone(), "", res1.data))
.with(|vm| vm.borrow_mut().call(script, "", res1.data, ""))
.unwrap();
let res2 = RELAY_2_VM
.with(|vm| vm.borrow_mut().call_with_prev_data("", script.clone(), "", result.data))
.with(|vm| vm.borrow_mut().call(script, "", result.data, ""))
.unwrap();
CLIENT_2_VM.with(|vm| vm.borrow_mut().call_with_prev_data("", script.clone(), "", res2.data))
CLIENT_2_VM.with(|vm| vm.borrow_mut().call(script, "", res2.data, ""))
}
fn criterion_benchmark(c: &mut Criterion) {

View File

@ -1,11 +1,4 @@
use air_test_utils::create_avm;
use air_test_utils::set_variables_call_service;
use air_test_utils::AVMError;
use air_test_utils::CallServiceClosure;
use air_test_utils::IValue;
use air_test_utils::InterpreterOutcome;
use air_test_utils::NEVec;
use air_test_utils::AVM;
use air_test_utils::prelude::*;
use serde_json::json;
@ -15,33 +8,26 @@ use criterion::Criterion;
use std::cell::RefCell;
thread_local!(static VM: RefCell<AVM> = RefCell::new({
thread_local!(static VM: RefCell<TestRunner> = RefCell::new({
let add_module_response = String::from("add_module response");
let add_blueprint_response = String::from("add_blueprint response");
let create_response = String::from("create response");
let call_service: CallServiceClosure = Box::new(move |_, args| -> Option<IValue> {
let builtin_service = match &args[0] {
IValue::String(str) => str,
_ => unreachable!(),
};
let response = match builtin_service.as_str() {
let call_service: CallServiceClosure = Box::new(move |args| -> CallServiceResult {
let response = match args.service_id.as_str() {
"add_module" => add_module_response.clone(),
"add_blueprint" => add_blueprint_response.clone(),
"create" => create_response.clone(),
_ => String::from("unknown response"),
};
Some(IValue::Record(
NEVec::new(vec![IValue::S32(0), IValue::String(format!("\"{}\"", response))]).unwrap(),
))
CallServiceResult::ok(json!(response))
});
create_avm(call_service, "A")
}));
thread_local!(static SET_VARIABLES_VM: RefCell<AVM> = RefCell::new({
thread_local!(static SET_VARIABLES_VM: RefCell<TestRunner> = RefCell::new({
let module = "greeting";
let module_config = json!(
{
@ -60,17 +46,16 @@ thread_local!(static SET_VARIABLES_VM: RefCell<AVM> = RefCell::new({
let blueprint = json!({ "name": "blueprint", "dependencies": [module]});
let variables_mapping = maplit::hashmap!(
String::from("module_bytes") => module_bytes.to_string(),
String::from("module_config") => module_config.to_string(),
String::from("blueprint") => blueprint.to_string(),
String::from("module_bytes") => json!(module_bytes),
String::from("module_config") => json!(module_config),
String::from("blueprint") => json!(blueprint),
);
create_avm(set_variables_call_service(variables_mapping), "set_variables")
}));
fn create_service_benchmark() -> Result<InterpreterOutcome, AVMError> {
let script = String::from(
r#"
fn create_service_benchmark() -> Result<RawAVMOutcome, String> {
let script = r#"
(seq
(seq
(seq
@ -89,13 +74,12 @@ fn create_service_benchmark() -> Result<InterpreterOutcome, AVMError> {
)
)
)
)"#,
);
)"#;
let result = SET_VARIABLES_VM
.with(|vm| vm.borrow_mut().call_with_prev_data("", script.clone(), "", ""))
.with(|vm| vm.borrow_mut().call(script, "", "", ""))
.unwrap();
VM.with(|vm| vm.borrow_mut().call_with_prev_data("", script, "", result.data))
VM.with(|vm| vm.borrow_mut().call(script, "", result.data, ""))
}
fn criterion_benchmark(c: &mut Criterion) {

View File

@ -24,7 +24,7 @@ use super::ExecutionResult;
use super::TraceHandler;
use crate::execution_step::air::ResolvedCallResult;
use crate::execution_step::boxed_value::Variable;
use crate::execution_step::utils::apply_json_path;
use crate::execution_step::utils::apply_lambda;
use crate::trace_to_exec_err;
use crate::JValue;
use crate::SecurityTetraplet;
@ -33,7 +33,7 @@ use utils::*;
use air_parser::ast::ApArgument;
use air_parser::ast::AstVariable;
use air_parser::ast::JsonPath;
use air_parser::ast::VariableWithLambda;
use air_parser::ast::{Ap, LastErrorPath};
use air_trace_handler::MergerApResult;
@ -84,7 +84,7 @@ fn save_result<'ctx>(
fn should_touch_trace(ap: &Ap<'_>) -> bool {
match (&ap.argument, &ap.result) {
(_, AstVariable::Stream(_)) => true,
(ApArgument::JsonPath(json_path), _) => match &json_path.variable {
(ApArgument::VariableWithLambda(vl), _) => match &vl.variable {
AstVariable::Scalar(_) => false,
AstVariable::Stream(_) => true,
},

View File

@ -24,7 +24,7 @@ pub(super) fn apply_to_arg(
) -> ExecutionResult<ResolvedCallResult> {
let result = match argument {
ApArgument::ScalarVariable(scalar_name) => apply_scalar(scalar_name, exec_ctx, trace_ctx, should_touch_trace)?,
ApArgument::JsonPath(json_arg) => apply_json_argument(json_arg, exec_ctx, trace_ctx)?,
ApArgument::VariableWithLambda(vl) => apply_json_argument(vl, exec_ctx, trace_ctx)?,
ApArgument::LastError(error_path) => apply_last_error(error_path, exec_ctx, trace_ctx)?,
ApArgument::Literal(value) => apply_const(value.to_string(), exec_ctx, trace_ctx),
ApArgument::Number(value) => apply_const(value, exec_ctx, trace_ctx),
@ -89,12 +89,12 @@ fn apply_last_error(
}
fn apply_json_argument(
json_arg: &JsonPath<'_>,
vl: &VariableWithLambda<'_>,
exec_ctx: &ExecutionCtx<'_>,
trace_ctx: &TraceHandler,
) -> ExecutionResult<ResolvedCallResult> {
let variable = Variable::from_ast(&json_arg.variable);
let (jvalue, mut tetraplets) = apply_json_path(variable, json_arg.path, json_arg.should_flatten, exec_ctx)?;
let variable = Variable::from_ast(&vl.variable);
let (jvalue, mut tetraplets) = apply_lambda(variable, &vl.lambda, exec_ctx)?;
let tetraplet = tetraplets
.pop()

View File

@ -20,7 +20,7 @@ use super::ExecutionResult;
use crate::exec_err;
use crate::JValue;
use air_parser::ast::{CallInstrValue, FunctionPart, PeerPart};
use air_parser::ast::{AstVariable, CallInstrValue, FunctionPart, PeerPart};
use polyplets::ResolvedTriplet;
/// Triplet represents a location of the executable code in the network.
@ -87,13 +87,10 @@ fn resolve_to_string<'i>(value: &CallInstrValue<'i>, ctx: &ExecutionCtx<'i>) ->
let jvalue = resolved.into_jvalue();
jvalue_to_string(jvalue)?
}
CallInstrValue::JsonPath(json_path) => {
// this is checked on the parsing stage
debug_assert!(json_path.should_flatten);
let resolved = resolve_ast_variable(&json_path.variable, ctx)?;
let resolved = resolved.apply_json_path(json_path.path)?;
vec_to_string(resolved, json_path.path)?
CallInstrValue::VariableWithLambda(vl) => {
let resolved = resolve_ast_variable(&vl.variable, ctx)?;
let resolved = resolved.apply_lambda(&vl.lambda)?;
vec_to_string(resolved, &vl.variable)?
}
};
@ -109,13 +106,13 @@ fn jvalue_to_string(jvalue: JValue) -> ExecutionResult<String> {
}
}
fn vec_to_string(values: Vec<&JValue>, json_path: &str) -> ExecutionResult<String> {
fn vec_to_string(values: Vec<&JValue>, variable: &AstVariable<'_>) -> ExecutionResult<String> {
if values.is_empty() {
return exec_err!(ExecutionError::VariableNotFound(json_path.to_string()));
}
if values.len() != 1 {
return exec_err!(ExecutionError::MultipleValuesInJsonPath(json_path.to_string()));
let variable_name = match variable {
AstVariable::Stream(name) => name,
AstVariable::Scalar(name) => name,
};
return exec_err!(ExecutionError::VariableNotFound(variable_name.to_string()));
}
jvalue_to_string(values[0].clone())

View File

@ -61,17 +61,12 @@ pub(crate) fn are_matchable_eq<'ctx>(
Ok(left_value == right_value)
}
(JsonPath(lhs), JsonPath(rhs)) => {
// TODO: improve comparison
if lhs.should_flatten != rhs.should_flatten {
return Ok(false);
}
(VariableWithLambda(lhs), VariableWithLambda(rhs)) => {
let left_jvaluable = resolve_ast_variable(&lhs.variable, exec_ctx)?;
let left_value = left_jvaluable.apply_json_path(lhs.path)?;
let left_value = left_jvaluable.apply_lambda(&lhs.lambda)?;
let right_jvaluable = resolve_ast_variable(&rhs.variable, exec_ctx)?;
let right_value = right_jvaluable.apply_json_path(rhs.path)?;
let right_value = right_jvaluable.apply_lambda(&rhs.lambda)?;
Ok(left_value == right_value)
}
@ -116,23 +111,14 @@ fn compare_matchable<'ctx>(
let jvalue = jvaluable.as_jvalue();
Ok(comparator(jvalue))
}
JsonPath(json_path) => {
let jvaluable = resolve_ast_variable(&json_path.variable, exec_ctx)?;
let jvalues = jvaluable.apply_json_path(json_path.path)?;
VariableWithLambda(vl) => {
let jvaluable = resolve_ast_variable(&vl.variable, exec_ctx)?;
let jvalues = jvaluable.apply_lambda(&vl.lambda)?;
let jvalue = if json_path.should_flatten {
if jvalues.len() != 1 {
return Ok(false);
}
Cow::Borrowed(jvalues[0])
} else {
let jvalue = jvalues.into_iter().cloned().collect::<Vec<_>>();
let jvalue = JValue::Array(jvalue);
Cow::Owned(jvalue)
};
Ok(comparator(jvalue))
Ok(comparator(Cow::Owned(jvalue)))
}
}
}

View File

@ -18,12 +18,11 @@ use super::*;
use crate::exec_err;
use crate::execution_step::RSecurityTetraplet;
use crate::JValue;
use crate::LambdaAST;
use air_parser::ast;
use jsonpath_lib::select;
use std::ops::Deref;
use std::rc::Rc;
// TODO: refactor this file after switching to boxed value
@ -46,11 +45,9 @@ pub(crate) fn construct_scalar_iterable_value<'ctx>(
) -> ExecutionResult<FoldIterableScalar> {
match ast_iterable {
ast::IterableScalarValue::ScalarVariable(scalar_name) => create_scalar_iterable(exec_ctx, scalar_name),
ast::IterableScalarValue::JsonPath {
scalar_name,
path,
should_flatten,
} => create_scalar_json_path_iterable(exec_ctx, scalar_name, path, *should_flatten),
ast::IterableScalarValue::VariableWithLambda { scalar_name, lambda } => {
create_scalar_lambda_iterable(exec_ctx, scalar_name, lambda)
}
}
}
@ -124,79 +121,58 @@ fn from_call_result(call_result: ResolvedCallResult) -> ExecutionResult<FoldIter
Ok(iterable)
}
fn create_scalar_json_path_iterable<'ctx>(
fn create_scalar_lambda_iterable<'ctx>(
exec_ctx: &ExecutionCtx<'ctx>,
scalar_name: &str,
json_path: &str,
should_flatten: bool,
lambda: &LambdaAST<'_>,
) -> ExecutionResult<FoldIterableScalar> {
use crate::execution_step::lambda_applier::select;
match exec_ctx.scalars.get(scalar_name) {
Some(Scalar::JValueRef(variable)) => {
let jvalues = apply_json_path(&variable.result, json_path)?;
from_jvalues(jvalues, variable.tetraplet.clone(), json_path, should_flatten)
let jvalues = select(&variable.result, lambda.iter())?;
from_jvalue(jvalues, variable.tetraplet.clone(), lambda)
}
Some(Scalar::JValueFoldCursor(fold_state)) => {
let iterable_value = fold_state.iterable.peek().unwrap();
let jvalues = iterable_value.apply_json_path(json_path)?;
let jvalues = iterable_value.apply_lambda(lambda)?;
let tetraplet = as_tetraplet(&iterable_value);
from_jvalues(jvalues, tetraplet, json_path, should_flatten)
from_jvalue(jvalues[0], tetraplet, lambda)
}
_ => return exec_err!(ExecutionError::VariableNotFound(scalar_name.to_string())),
}
}
fn apply_json_path<'jvalue, 'str>(
jvalue: &'jvalue JValue,
json_path: &'str str,
) -> ExecutionResult<Vec<&'jvalue JValue>> {
use ExecutionError::JValueJsonPathError;
select(jvalue, json_path).map_err(|e| Rc::new(JValueJsonPathError(jvalue.clone(), json_path.to_string(), e)))
}
/// Applies json_path to provided jvalues and construct IterableValue from the result and given triplet.
fn from_jvalues(
jvalues: Vec<&JValue>,
/// Construct IterableValue from the result and given triplet.
fn from_jvalue(
jvalue: &JValue,
tetraplet: RSecurityTetraplet,
json_path: &str,
should_flatten: bool,
lambda: &LambdaAST<'_>,
) -> ExecutionResult<FoldIterableScalar> {
let jvalues = construct_iterable_jvalues(jvalues, should_flatten)?;
let formatted_lambda_ast = air_lambda_ast::format_ast(lambda);
tetraplet.borrow_mut().add_lambda(&formatted_lambda_ast);
if jvalues.is_empty() {
let iterable = match jvalue {
JValue::Array(array) => array,
_ => {
return exec_err!(ExecutionError::FoldIteratesOverNonArray(
jvalue.clone(),
formatted_lambda_ast
))
}
};
if iterable.is_empty() {
return Ok(FoldIterableScalar::Empty);
}
tetraplet.borrow_mut().add_json_path(json_path);
let foldable = IterableJsonPathResult::init(jvalues, tetraplet);
let iterable = iterable.to_vec();
let foldable = IterableLambdaResult::init(iterable, tetraplet);
let iterable = FoldIterableScalar::Scalar(Box::new(foldable));
Ok(iterable)
}
fn construct_iterable_jvalues(jvalues: Vec<&JValue>, should_flatten: bool) -> ExecutionResult<Vec<JValue>> {
if !should_flatten {
let jvalues = jvalues.into_iter().cloned().collect();
return Ok(jvalues);
}
if jvalues.len() != 1 {
let jvalues = jvalues.into_iter().cloned().collect();
let jvalue = JValue::Array(jvalues);
return exec_err!(ExecutionError::FlatteningError(jvalue));
}
match jvalues[0] {
JValue::Array(values) => Ok(values.clone()),
_ => {
let jvalues = jvalues.into_iter().cloned().collect();
let jvalue = JValue::Array(jvalues);
exec_err!(ExecutionError::FlatteningError(jvalue))
}
}
}
fn as_tetraplet(iterable: &IterableItem<'_>) -> RSecurityTetraplet {
use IterableItem::*;

View File

@ -19,7 +19,7 @@ mod resolved_call;
mod vec_json_path_result;
mod vec_resolved_call;
pub(crate) use json_path_result::IterableJsonPathResult;
pub(crate) use json_path_result::IterableLambdaResult;
pub(crate) use resolved_call::IterableResolvedCall;
pub(crate) use vec_resolved_call::IterableVecResolvedCall;

View File

@ -21,16 +21,16 @@ use crate::foldable_next;
use crate::foldable_prev;
use crate::JValue;
/// Used for iterating over a result of applied to a JValue json path.
/// Used for iterating over a result of applied to a JValue lambda.
#[derive(Clone, Debug, Eq, PartialEq)]
pub(crate) struct IterableJsonPathResult {
pub(crate) struct IterableLambdaResult {
pub(crate) jvalues: Vec<JValue>,
// consider adding index for each tetraplet
pub(crate) tetraplet: RSecurityTetraplet,
pub(crate) cursor: usize,
}
impl IterableJsonPathResult {
impl IterableLambdaResult {
pub(crate) fn init(jvalues: Vec<JValue>, tetraplet: RSecurityTetraplet) -> Self {
Self {
jvalues,
@ -40,7 +40,7 @@ impl IterableJsonPathResult {
}
}
impl<'ctx> Iterable<'ctx> for IterableJsonPathResult {
impl<'ctx> Iterable<'ctx> for IterableLambdaResult {
type Item = IterableItem<'ctx>;
fn next(&mut self) -> bool {

View File

@ -21,7 +21,7 @@ use crate::foldable_next;
use crate::foldable_prev;
use crate::JValue;
/// Used for iterating over a result of applied to an stream json path.
/// Used for iterating over a result of applied to a stream lambda.
#[derive(Clone, Debug, Eq, PartialEq)]
pub(crate) struct IterableVecJsonPathResult {
pub(crate) jvalues: Vec<JValue>,

View File

@ -24,8 +24,10 @@ use super::iterable::IterableItem;
use super::ExecutionError;
use super::ExecutionResult;
use super::ResolvedCallResult;
use crate::execution_step::lambda_applier::*;
use crate::execution_step::SecurityTetraplets;
use crate::JValue;
use crate::LambdaAST;
pub(crate) use stream::StreamJvaluableIngredients;
@ -33,11 +35,14 @@ use std::borrow::Cow;
/// Represent a value that could be transform to a JValue with or without tetraplets.
pub(crate) trait JValuable {
/// Applies json path to the internal value, produces JValue.
fn apply_json_path(&self, json_path: &str) -> ExecutionResult<Vec<&JValue>>;
/// Applies lambda to the internal value, produces JValue.
fn apply_lambda(&self, lambda: &LambdaAST<'_>) -> ExecutionResult<Vec<&JValue>>;
/// Applies json path to the internal value, produces JValue with tetraplet.
fn apply_json_path_with_tetraplets(&self, json_path: &str) -> ExecutionResult<(Vec<&JValue>, SecurityTetraplets)>;
/// Applies lambda to the internal value, produces JValue with tetraplet.
fn apply_lambda_with_tetraplets(
&self,
lambda: &LambdaAST<'_>,
) -> ExecutionResult<(Vec<&JValue>, SecurityTetraplets)>;
/// Return internal value as borrowed if it's possible, owned otherwise.
fn as_jvalue(&self) -> Cow<'_, JValue>;

View File

@ -14,45 +14,37 @@
* limitations under the License.
*/
use super::ExecutionError::GenerationStreamJsonPathError;
use super::select_from_stream;
use super::ExecutionResult;
use super::JValuable;
use super::ResolvedCallResult;
use crate::execution_step::SecurityTetraplets;
use crate::JValue;
use crate::LambdaAST;
use jsonpath_lib::select_with_iter;
use air_lambda_ast::format_ast;
use std::borrow::Cow;
use std::ops::Deref;
impl JValuable for std::cell::Ref<'_, Vec<ResolvedCallResult>> {
fn apply_json_path(&self, json_path: &str) -> ExecutionResult<Vec<&JValue>> {
let acc_iter = self.iter().map(|r| r.result.deref());
let (selected_values, _) = select_with_iter(acc_iter, json_path).map_err(|e| {
GenerationStreamJsonPathError(self.iter().cloned().collect::<Vec<_>>(), json_path.to_string(), e)
})?;
Ok(selected_values)
fn apply_lambda(&self, lambda: &LambdaAST<'_>) -> ExecutionResult<Vec<&JValue>> {
let stream_iter = self.iter().map(|r| r.result.deref());
let select_result = select_from_stream(stream_iter, lambda)?;
Ok(vec![select_result.result])
}
fn apply_json_path_with_tetraplets(&self, json_path: &str) -> ExecutionResult<(Vec<&JValue>, SecurityTetraplets)> {
let acc_iter = self.iter().map(|r| r.result.deref());
fn apply_lambda_with_tetraplets(
&self,
lambda: &LambdaAST<'_>,
) -> ExecutionResult<(Vec<&JValue>, SecurityTetraplets)> {
let stream_iter = self.iter().map(|r| r.result.deref());
let select_result = select_from_stream(stream_iter, lambda)?;
let (selected_values, tetraplet_indices) = select_with_iter(acc_iter, json_path).map_err(|e| {
GenerationStreamJsonPathError(self.iter().cloned().collect::<Vec<_>>(), json_path.to_string(), e)
})?;
let tetraplet = self[select_result.tetraplet_idx].tetraplet.clone();
tetraplet.borrow_mut().add_lambda(&format_ast(lambda));
let tetraplets = tetraplet_indices
.into_iter()
.map(|id| {
let tetraplet = self[id].tetraplet.clone();
tetraplet.borrow_mut().add_json_path(json_path);
tetraplet
})
.collect::<Vec<_>>();
Ok((selected_values, tetraplets))
Ok((vec![select_result.result], vec![tetraplet]))
}
fn as_jvalue(&self) -> Cow<'_, JValue> {

View File

@ -17,6 +17,7 @@
use super::ExecutionError;
use super::ExecutionResult;
use super::JValuable;
use super::LambdaAST;
use crate::exec_err;
use crate::execution_step::SecurityTetraplets;
use crate::JValue;
@ -24,14 +25,17 @@ use crate::JValue;
use std::borrow::Cow;
impl JValuable for () {
fn apply_json_path(&self, json_path: &str) -> ExecutionResult<Vec<&JValue>> {
// applying json path to an empty stream will produce a join behaviour
exec_err!(ExecutionError::EmptyStreamJsonPathError(json_path.to_string()))
fn apply_lambda(&self, _lambda: &LambdaAST<'_>) -> ExecutionResult<Vec<&JValue>> {
// applying lambda to an empty stream will produce a join behaviour
exec_err!(ExecutionError::EmptyStreamLambdaError)
}
fn apply_json_path_with_tetraplets(&self, json_path: &str) -> ExecutionResult<(Vec<&JValue>, SecurityTetraplets)> {
// applying json path to an empty stream will produce a join behaviour
exec_err!(ExecutionError::EmptyStreamJsonPathError(json_path.to_string()))
fn apply_lambda_with_tetraplets(
&self,
_lambda: &LambdaAST<'_>,
) -> ExecutionResult<(Vec<&JValue>, SecurityTetraplets)> {
// applying lambda to an empty stream will produce a join behaviour
exec_err!(ExecutionError::EmptyStreamLambdaError)
}
fn as_jvalue(&self) -> Cow<'_, JValue> {

View File

@ -14,20 +14,19 @@
* limitations under the License.
*/
use super::ExecutionError::JValueJsonPathError as JsonPathError;
use super::select;
use super::ExecutionResult;
use super::IterableItem;
use super::JValuable;
use super::LambdaAST;
use crate::execution_step::SecurityTetraplets;
use crate::JValue;
use jsonpath_lib::select;
use std::borrow::Cow;
use std::ops::Deref;
impl<'ctx> JValuable for IterableItem<'ctx> {
fn apply_json_path(&self, json_path: &str) -> ExecutionResult<Vec<&JValue>> {
fn apply_lambda(&self, lambda: &LambdaAST<'_>) -> ExecutionResult<Vec<&JValue>> {
use super::IterableItem::*;
let jvalue = match self {
@ -36,12 +35,14 @@ impl<'ctx> JValuable for IterableItem<'ctx> {
RcValue((jvalue, ..)) => jvalue.deref(),
};
let selected_jvalues =
select(jvalue, json_path).map_err(|e| JsonPathError(jvalue.clone(), String::from(json_path), e))?;
Ok(selected_jvalues)
let selected_value = select(jvalue, lambda.iter())?;
Ok(vec![selected_value])
}
fn apply_json_path_with_tetraplets(&self, json_path: &str) -> ExecutionResult<(Vec<&JValue>, SecurityTetraplets)> {
fn apply_lambda_with_tetraplets(
&self,
lambda: &LambdaAST<'_>,
) -> ExecutionResult<(Vec<&JValue>, SecurityTetraplets)> {
use super::IterableItem::*;
let (jvalue, tetraplet) = match self {
@ -50,9 +51,8 @@ impl<'ctx> JValuable for IterableItem<'ctx> {
RcValue((jvalue, tetraplet, _)) => (jvalue.deref(), tetraplet),
};
let selected_jvalues =
select(jvalue, json_path).map_err(|e| JsonPathError(jvalue.clone(), String::from(json_path), e))?;
Ok((selected_jvalues, vec![tetraplet.clone()]))
let selected_value = select(jvalue, lambda.iter())?;
Ok((vec![selected_value], vec![tetraplet.clone()]))
}
fn as_jvalue(&self) -> Cow<'_, JValue> {

View File

@ -14,37 +14,34 @@
* limitations under the License.
*/
use super::select;
use super::ExecutionResult;
use super::JValuable;
use super::LambdaAST;
use super::ResolvedCallResult;
use crate::execution_step::SecurityTetraplets;
use crate::JValue;
use jsonpath_lib::select;
use air_lambda_ast::format_ast;
use std::borrow::Cow;
use std::ops::Deref;
impl JValuable for ResolvedCallResult {
fn apply_json_path(&self, json_path: &str) -> ExecutionResult<Vec<&JValue>> {
use super::ExecutionError::JValueJsonPathError as JsonPathError;
let selected_jvalues = select(&self.result, json_path)
.map_err(|e| JsonPathError(self.result.deref().clone(), String::from(json_path), e))?;
Ok(selected_jvalues)
fn apply_lambda(&self, lambda: &LambdaAST<'_>) -> ExecutionResult<Vec<&JValue>> {
let selected_value = select(&self.result, lambda.iter())?;
Ok(vec![selected_value])
}
fn apply_json_path_with_tetraplets(&self, json_path: &str) -> ExecutionResult<(Vec<&JValue>, SecurityTetraplets)> {
use super::ExecutionError::JValueJsonPathError as JsonPathError;
is_json_path_allowed(&self.result)?;
let selected_jvalues = select(&self.result, json_path)
.map_err(|e| JsonPathError(self.result.deref().clone(), String::from(json_path), e))?;
fn apply_lambda_with_tetraplets(
&self,
lambda: &LambdaAST<'_>,
) -> ExecutionResult<(Vec<&JValue>, SecurityTetraplets)> {
let selected_value = select(&self.result, lambda.iter())?;
let tetraplet = self.tetraplet.clone();
tetraplet.borrow_mut().add_json_path(json_path);
tetraplet.borrow_mut().add_lambda(&format_ast(lambda));
Ok((selected_jvalues, vec![tetraplet]))
Ok((vec![selected_value], vec![tetraplet]))
}
fn as_jvalue(&self) -> Cow<'_, JValue> {
@ -59,14 +56,3 @@ impl JValuable for ResolvedCallResult {
vec![self.tetraplet.clone()]
}
}
fn is_json_path_allowed(value: &JValue) -> ExecutionResult<()> {
use super::ExecutionError;
use crate::exec_err;
match value {
JValue::Array(_) => Ok(()),
JValue::Object(_) => Ok(()),
value => exec_err!(ExecutionError::JsonPathVariableTypeError(value.clone())),
}
}

View File

@ -14,7 +14,7 @@
* limitations under the License.
*/
use super::ExecutionError::StreamJsonPathError;
use super::select_from_stream;
use super::ExecutionResult;
use super::JValuable;
use crate::exec_err;
@ -22,8 +22,9 @@ use crate::execution_step::boxed_value::Generation;
use crate::execution_step::boxed_value::Stream;
use crate::execution_step::SecurityTetraplets;
use crate::JValue;
use crate::LambdaAST;
use jsonpath_lib::select_with_iter;
use air_lambda_ast::format_ast;
use std::borrow::Cow;
use std::ops::Deref;
@ -37,31 +38,26 @@ pub(crate) struct StreamJvaluableIngredients<'stream> {
// TODO: this will be deleted soon, because it would be impossible to use streams without
// canonicalization as an arg of a call
impl JValuable for StreamJvaluableIngredients<'_> {
fn apply_json_path(&self, json_path: &str) -> ExecutionResult<Vec<&JValue>> {
fn apply_lambda(&self, lambda: &LambdaAST<'_>) -> ExecutionResult<Vec<&JValue>> {
let iter = self.iter()?.map(|v| v.result.deref());
let select_result = select_from_stream(iter, lambda)?;
let (selected_values, _) = select_with_iter(iter, json_path)
.map_err(|e| StreamJsonPathError(self.stream.deref().clone(), json_path.to_string(), e))?;
Ok(selected_values)
Ok(vec![select_result.result])
}
fn apply_json_path_with_tetraplets(&self, json_path: &str) -> ExecutionResult<(Vec<&JValue>, SecurityTetraplets)> {
fn apply_lambda_with_tetraplets(
&self,
lambda: &LambdaAST<'_>,
) -> ExecutionResult<(Vec<&JValue>, SecurityTetraplets)> {
let iter = self.iter()?.map(|v| v.result.deref());
let select_result = select_from_stream(iter, lambda)?;
let (selected_values, tetraplet_indices) = select_with_iter(iter, json_path)
.map_err(|e| StreamJsonPathError(self.stream.deref().clone(), json_path.to_string(), e))?;
let mut tetraplets = Vec::with_capacity(tetraplet_indices.len());
for idx in tetraplet_indices.iter() {
let resolved_call = self.iter()?.nth(*idx).unwrap();
// unwrap is safe here because each value has a tetraplet and a lambda always returns a valid index
let resolved_call = self.iter()?.nth(select_result.tetraplet_idx).unwrap();
let tetraplet = resolved_call.tetraplet.clone();
tetraplet.borrow_mut().add_json_path(json_path);
tetraplets.push(tetraplet);
}
tetraplet.borrow_mut().add_lambda(&format_ast(lambda));
Ok((selected_values, tetraplets))
Ok((vec![select_result.result], vec![tetraplet]))
}
fn as_jvalue(&self) -> Cow<'_, JValue> {

View File

@ -20,14 +20,13 @@ mod joinable;
pub(crate) use catchable::Catchable;
pub(crate) use joinable::Joinable;
use super::ResolvedCallResult;
use super::Stream;
use crate::execution_step::lambda_applier::LambdaError;
use crate::JValue;
use air_interpreter_interface::CallResults;
use air_trace_handler::MergerApResult;
use air_trace_handler::TraceHandlerError;
use jsonpath_lib::JsonPathError;
use strum::IntoEnumIterator;
use strum_macros::EnumDiscriminants;
use strum_macros::EnumIter;
@ -55,21 +54,13 @@ pub(crate) enum ExecutionError {
#[error("multiple variables found for name '{0}' in data")]
MultipleVariablesFound(String),
/// An error occurred while trying to apply json path to this JValue.
#[error("variable with path '{1}' not found in '{0}' with an error: '{2}'")]
JValueJsonPathError(JValue, String, JsonPathError),
/// An error occurred while trying to apply lambda to a value.
#[error(transparent)]
LambdaApplierError(#[from] LambdaError),
/// An error occurred while trying to apply json path to this stream generation with JValue's.
#[error("variable with path '{1}' not found in '{0:?}' with error: '{2}'")]
GenerationStreamJsonPathError(Vec<ResolvedCallResult>, String, JsonPathError),
/// An error occurred while trying to apply json path to this stream with JValue's.
#[error("variable with path '{1}' not found in '{0:?}' with error: '{2}'")]
StreamJsonPathError(Stream, String, JsonPathError),
/// An error occurred while trying to apply json path to an empty stream.
#[error("json path {0} is applied to an empty stream")]
EmptyStreamJsonPathError(String),
/// An error occurred while trying to apply lambda to an empty stream.
#[error("lambda is applied to an empty stream")]
EmptyStreamLambdaError,
/// Provided JValue has incompatible with target type.
#[error("expected JValue type '{1}', but got '{0}' JValue")]
@ -79,10 +70,6 @@ pub(crate) enum ExecutionError {
#[error("expected AValue type '{1}', but got '{0}' AValue")]
IncompatibleAValueType(String, String),
/// Multiple values found for such json path.
#[error("multiple variables found for this json path '{0}'")]
MultipleValuesInJsonPath(String),
/// Fold state wasn't found for such iterator name.
#[error("fold state not found for this iterable '{0}'")]
FoldStateNotFound(String),
@ -91,6 +78,10 @@ pub(crate) enum ExecutionError {
#[error("multiple fold states found for iterable '{0}'")]
MultipleFoldStates(String),
/// A fold instruction must iterate over array value.
#[error("lambda '{1}' returned non-array value '{0}' for fold instruction")]
FoldIteratesOverNonArray(JValue, String),
/// Errors encountered while shadowing non-scalar values.
#[error("variable with name '{0}' can't be shadowed, shadowing isn't supported for iterables")]
IterableShadowing(String),
@ -103,17 +94,6 @@ pub(crate) enum ExecutionError {
#[error("mismatch is used without corresponding xor")]
MismatchWithoutXorError,
/// This error type is produced by a mismatch to notify xor that compared values aren't equal.
#[error("jvalue '{0}' can't be flattened, to be flattened a jvalue should have an array type and consist of zero or one values")]
FlatteningError(JValue),
/// Json path is applied to scalar that have inappropriate type.
#[error(
"json path can't be applied to scalar '{0}',\
it could be applied only to streams and variables of array and object types"
)]
JsonPathVariableTypeError(JValue),
/// Errors bubbled from a trace handler.
#[error(transparent)]
TraceError(#[from] TraceHandlerError),
@ -134,6 +114,12 @@ pub(crate) enum ExecutionError {
CallResultsNotEmpty(CallResults),
}
impl From<LambdaError> for Rc<ExecutionError> {
fn from(e: LambdaError) -> Self {
Rc::new(ExecutionError::LambdaApplierError(e))
}
}
/// This macro is needed because it's impossible to implement
/// From<TraceHandlerError> for Rc<ExecutionError> due to the orphan rule.
#[macro_export]
@ -171,12 +157,12 @@ impl Joinable for ExecutionError {
log_join!(" waiting for an argument with name '{}'", var_name);
true
}
StreamJsonPathError(stream, json_path, _) => {
log_join!(" waiting for an argument with path '{}' on stream '{:?}'", json_path, stream);
LambdaApplierError(LambdaError::StreamNotHaveEnoughValues { stream_size, idx }) => {
log_join!(" waiting for an argument with idx '{}' on stream with size '{}'", idx, stream_size);
true
}
EmptyStreamJsonPathError(json_path) => {
log_join!(" waiting on empty stream for path '{}'", json_path);
EmptyStreamLambdaError => {
log_join!(" waiting on empty stream for path ");
true
}

View File

@ -0,0 +1,84 @@
/*
* Copyright 2021 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use super::utils::*;
use super::LambdaError;
use super::LambdaResult;
use crate::JValue;
use crate::LambdaAST;
use air_lambda_parser::ValueAccessor;
pub(crate) struct StreamSelectResult<'value> {
pub(crate) result: &'value JValue,
pub(crate) tetraplet_idx: usize,
}
pub(crate) fn select_from_stream<'value>(
stream: impl ExactSizeIterator<Item = &'value JValue> + 'value,
lambda: &LambdaAST<'_>,
) -> LambdaResult<StreamSelectResult<'value>> {
use ValueAccessor::*;
let (prefix, body) = lambda.split_first();
let idx = match prefix {
ArrayAccess { idx } => *idx,
FieldAccess { field_name } => {
return Err(LambdaError::FieldAccessorAppliedToStream {
field_name: field_name.to_string(),
})
}
_ => unreachable!("should not execute if parsing succeeded. QED."),
};
let stream_size = stream.len();
let value = stream
.peekable()
.nth(idx as usize)
.ok_or(LambdaError::StreamNotHaveEnoughValues { stream_size, idx })?;
let result = select(value, body.iter())?;
let select_result = StreamSelectResult::new(result, idx);
Ok(select_result)
}
pub(crate) fn select<'value, 'algebra>(
mut value: &'value JValue,
lambda: impl Iterator<Item = &'algebra ValueAccessor<'algebra>>,
) -> LambdaResult<&'value JValue> {
for value_algebra in lambda {
match value_algebra {
ValueAccessor::ArrayAccess { idx } => {
value = try_jvalue_with_idx(value, *idx)?;
}
ValueAccessor::FieldAccess { field_name } => {
value = try_jvalue_with_field_name(value, *field_name)?;
}
ValueAccessor::Error => unreachable!("should not execute if parsing succeeded. QED."),
}
}
Ok(value)
}
impl<'value> StreamSelectResult<'value> {
pub(self) fn new(result: &'value JValue, tetraplet_idx: u32) -> Self {
Self {
result,
tetraplet_idx: tetraplet_idx as usize,
}
}
}

View File

@ -0,0 +1,40 @@
/*
* Copyright 2021 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use crate::JValue;
use thiserror::Error as ThisError;
#[derive(Debug, Clone, ThisError)]
pub(crate) enum LambdaError {
#[error("lambda is applied to a stream that have only '{stream_size}' elements, but '{idx}' requested")]
StreamNotHaveEnoughValues { stream_size: usize, idx: u32 },
#[error("field algebra (with field name = '{field_name}') can't be applied to a stream")]
FieldAccessorAppliedToStream { field_name: String },
#[error("value '{value}' is not an array-type to match array algebra with idx = '{idx}'")]
ArrayAccessorNotMatchValue { value: JValue, idx: u32 },
#[error("value '{value}' does not contain element for idx = '{idx}'")]
ValueNotContainSuchArrayIdx { value: JValue, idx: u32 },
#[error("value '{value}' is not an map-type to match field algebra with field_name = '{field_name}'")]
FieldAccessorNotMatchValue { value: JValue, field_name: String },
#[error("value '{value}' does not contain element with field name = '{field_name}'")]
JValueNotContainSuchField { value: JValue, field_name: String },
}

View File

@ -0,0 +1,25 @@
/*
* Copyright 2021 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
mod applier;
mod errors;
mod utils;
pub(crate) type LambdaResult<T> = std::result::Result<T, LambdaError>;
pub(crate) use applier::select;
pub(crate) use applier::select_from_stream;
pub(crate) use errors::LambdaError;

View File

@ -0,0 +1,54 @@
/*
* Copyright 2021 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use super::LambdaError;
use super::LambdaResult;
use crate::JValue;
pub(super) fn try_jvalue_with_idx(jvalue: &JValue, idx: u32) -> LambdaResult<&JValue> {
match jvalue {
JValue::Array(values) => values
.get(idx as usize)
.ok_or_else(|| LambdaError::ValueNotContainSuchArrayIdx {
value: jvalue.clone(),
idx,
}),
_ => Err(LambdaError::ArrayAccessorNotMatchValue {
value: jvalue.clone(),
idx,
}),
}
}
pub(super) fn try_jvalue_with_field_name<'value>(
jvalue: &'value JValue,
field_name: &str,
) -> LambdaResult<&'value JValue> {
match jvalue {
JValue::Object(values_map) => {
values_map
.get(field_name)
.ok_or_else(|| LambdaError::JValueNotContainSuchField {
value: jvalue.clone(),
field_name: field_name.to_string(),
})
}
_ => Err(LambdaError::FieldAccessorNotMatchValue {
value: jvalue.clone(),
field_name: field_name.to_string(),
}),
}
}

View File

@ -18,6 +18,7 @@ mod air;
mod boxed_value;
mod errors;
pub(crate) mod execution_context;
mod lambda_applier;
mod utils;
pub(super) use self::air::ExecutableInstruction;

View File

@ -22,11 +22,13 @@ use crate::execution_step::execution_context::LastErrorWithTetraplet;
use crate::execution_step::ExecutionError;
use crate::execution_step::ExecutionResult;
use crate::JValue;
use crate::LambdaAST;
use crate::SecurityTetraplet;
use air_parser::ast::AstVariable;
use air_parser::ast::CallInstrArgValue;
use air_parser::ast::LastErrorPath;
use serde_json::json;
use std::cell::RefCell;
use std::rc::Rc;
@ -47,9 +49,9 @@ pub(crate) fn resolve_to_args<'i>(
let variable = Variable::from_ast(variable);
prepare_variable(variable, ctx)
}
CallInstrArgValue::JsonPath(json_path) => {
let variable = Variable::from_ast(&json_path.variable);
apply_json_path(variable, json_path.path, json_path.should_flatten, ctx)
CallInstrArgValue::VariableWithLambda(var_with_lambda) => {
let variable = Variable::from_ast(&var_with_lambda.variable);
apply_lambda(variable, &var_with_lambda.lambda, ctx)
}
}
}
@ -126,30 +128,16 @@ pub(crate) fn resolve_ast_variable<'ctx, 'i>(
resolve_variable(variable, ctx)
}
pub(crate) fn apply_json_path<'i>(
pub(crate) fn apply_lambda<'i>(
variable: Variable<'_>,
json_path: &str,
should_flatten: bool,
lambda: &LambdaAST<'i>,
ctx: &ExecutionCtx<'i>,
) -> ExecutionResult<(JValue, SecurityTetraplets)> {
let resolved = resolve_variable(variable, ctx)?;
let (jvalue, tetraplets) = resolved.apply_json_path_with_tetraplets(json_path)?;
let (jvalue, tetraplets) = resolved.apply_lambda_with_tetraplets(lambda)?;
let jvalue = if should_flatten {
match jvalue.len() {
0 => JValue::Array(vec![]),
1 => jvalue[0].clone(),
_ => {
let jvalue = jvalue.into_iter().cloned().collect::<Vec<_>>();
return crate::exec_err!(ExecutionError::FlatteningError(JValue::Array(jvalue)));
}
}
} else {
let jvalue = jvalue.into_iter().cloned().collect::<Vec<_>>();
JValue::Array(jvalue)
};
Ok((jvalue, tetraplets))
// it's known that apply_lambda_with_tetraplets returns vec of one value
Ok((jvalue[0].clone(), tetraplets))
}
/// Constructs jvaluable result from scalars by name.

View File

@ -53,3 +53,4 @@ pub mod parser {
}
pub(crate) type JValue = serde_json::Value;
use air_lambda_parser::LambdaAST;

View File

@ -157,7 +157,7 @@ fn inner_fold_with_same_iterator() {
let result = call_vm!(vm, "", script, "", "");
assert_eq!(result.ret_code, 1012);
assert_eq!(result.ret_code, 1009);
}
#[test]
@ -239,7 +239,7 @@ fn fold_with_join() {
}
#[test]
fn json_path() {
fn lambda() {
let mut vm = create_avm(echo_call_service(), "A");
let mut set_variable_vm = create_avm(
set_variable_call_service(json!({ "array": ["1","2","3","4","5"] })),

View File

@ -192,11 +192,11 @@ fn match_without_xor() {
let result = call_vm!(set_variable_vm, "", &script, "", "");
let result = call_vm!(vm, "", &script, "", result.data);
assert_eq!(result.ret_code, 1014);
assert_eq!(result.ret_code, 1012);
let result = call_vm!(vm, "", script, "", result.data);
assert_eq!(result.ret_code, 1014);
assert_eq!(result.ret_code, 1012);
}
#[test]

View File

@ -143,11 +143,11 @@ fn mismatch_without_xor() {
let result = call_vm!(set_variable_vm, "asd", &script, "", "");
let result = call_vm!(vm, "asd", &script, "", result.data);
assert_eq!(result.ret_code, 1015);
assert_eq!(result.ret_code, 1013);
let result = call_vm!(vm, "asd", script, "", result.data);
assert_eq!(result.ret_code, 1015);
assert_eq!(result.ret_code, 1013);
}
#[test]

View File

@ -64,7 +64,7 @@ fn flattening_scalar_arrays() {
(call "{0}" ("" "") [] scalar_array)
(fold scalar_array.$.iterable! v
(seq
(call v.$.peer_id! (v.$.service_id! v.$.function_name!) [v.$.args[0]! v.$.args[1]!])
(call v.$.peer_id! (v.$.service_id! v.$.function_name!) [v.$.args.[0]! v.$.args.[1]!])
(next v)
)
)
@ -155,7 +155,7 @@ fn flattening_empty_values() {
r#"
(seq
(call "{0}" ("" "") [] $stream)
(call "{1}" ("" "") [$stream.$.args!]) ; here $stream.$.args returns an empty array
(call "{1}" ("" "") [$stream.$.[1]!]) ; here $stream.$.[1] returns an empty array
)
"#,
set_variable_peer_id, local_peer_id

View File

@ -172,7 +172,7 @@ fn dont_wait_on_json_path_on_scalars() {
assert_eq!(array_result.ret_code, 1004);
assert_eq!(
array_result.error_message,
r#"variable with path '$.[5]' not found in '[1,2,3,4,5]' with an error: 'json value not set'"#
r#"value '[1,2,3,4,5]' does not contain element for idx = '5'"#
);
let script = format!(
@ -191,6 +191,6 @@ fn dont_wait_on_json_path_on_scalars() {
assert_eq!(object_result.ret_code, 1004);
assert_eq!(
object_result.error_message,
r#"variable with path '$.non_exist_path' not found in '{"err_msg":"","is_authenticated":1,"ret_code":0}' with an error: 'json value not set'"#
r#"value '{"err_msg":"","is_authenticated":1,"ret_code":0}' does not contain element with field name = 'non_exist_path'"#
);
}

View File

@ -37,5 +37,5 @@ fn json_path_not_allowed_for_non_objects_and_arrays() {
let result = checked_call_vm!(set_variable_vm, "asd", &script, "", "");
let result = call_vm!(local_vm, "asd", script, "", result.data);
assert_eq!(result.ret_code, 1017);
assert_eq!(result.ret_code, 1004);
}

View File

@ -144,7 +144,7 @@ fn fold_json_path() {
};
let first_arg_tetraplet = SecurityTetraplet {
triplet: first_arg_triplet,
json_path: String::from("$.args"),
json_path: String::from(".args"),
};
let second_arg_triplet = ResolvedTriplet {

View File

@ -140,7 +140,7 @@ fn par_early_exit() {
];
let setter_3_malicious_data = raw_data_from_trace(setter_3_malicious_trace);
let init_result_3 = call_vm!(init, "", &script, init_result_2.data.clone(), setter_3_malicious_data);
assert_eq!(init_result_3.ret_code, 1018);
assert_eq!(init_result_3.ret_code, 1014);
let actual_trace = trace_from_result(&init_result_3);
let expected_trace = trace_from_result(&init_result_2);

View File

@ -13,6 +13,8 @@ categories = ["wasm"]
lalrpop = "0.19.6"
[dependencies]
air-lambda-parser = { path = "../lambda/parser" }
lalrpop-util = "0.19.6"
regex = "1.5.4"
codespan = "0.11.1"

View File

@ -25,6 +25,7 @@ use criterion::Criterion;
use air_parser::AIRLexer;
use air_parser::AIRParser;
use air_parser::VariableValidator;
const SOURCE_CODE_BAD: &'static str = r#"(seq
(seq
@ -75,22 +76,6 @@ fn create_parser(c: &mut Criterion) {
c.bench_function("create_parser", move |b| b.iter(move || AIRParser::new()));
}
fn clone_parser(c: &mut Criterion) {
let parser = AIRParser::new();
c.bench_function("clone_parser", move |b| {
let parser = parser.clone();
b.iter(move || parser.clone())
});
}
fn clone_parser_rc(c: &mut Criterion) {
let parser = Rc::new(AIRParser::new());
c.bench_function("clone_parser_rc", move |b| {
let parser = parser.clone();
b.iter(move || parser.clone())
});
}
fn parse(c: &mut Criterion) {
let parser = Rc::new(AIRParser::new());
c.bench_function(
@ -98,11 +83,12 @@ fn parse(c: &mut Criterion) {
move |b| {
let parser = parser.clone();
b.iter(move || {
let mut validator = VariableValidator::new();
let lexer = AIRLexer::new(SOURCE_CODE_GOOD);
parser
.clone()
.parse("", &mut Vec::new(), lexer)
.parse("", &mut Vec::new(), &mut validator, lexer)
.expect("success")
})
},
@ -116,8 +102,12 @@ fn parse_to_fail(c: &mut Criterion) {
move |b| {
let parser = parser.clone();
b.iter(move || {
let mut validator = VariableValidator::new();
let lexer = AIRLexer::new(SOURCE_CODE_BAD);
parser.clone().parse("", &mut Vec::new(), lexer)
parser
.clone()
.parse("", &mut Vec::new(), &mut validator, lexer)
})
},
);
@ -138,11 +128,12 @@ fn parse_deep(c: &mut Criterion) {
let parser = parser.clone();
let code = &source_code[*i];
b.iter(move || {
let mut validator = VariableValidator::new();
let lexer = AIRLexer::new(code);
parser
.clone()
.parse("", &mut Vec::new(), lexer)
.parse("", &mut Vec::new(), &mut validator, lexer)
.expect("success")
});
},
@ -152,18 +143,20 @@ fn parse_deep(c: &mut Criterion) {
fn parse_dashboard_script(c: &mut Criterion) {
let parser = Rc::new(AIRParser::new());
const DASHBOARD_SCRIPT: &str = include_str!("../../../air/tests/scripts/dashboard.clj");
const DASHBOARD_SCRIPT: &str =
include_str!("../../../../air/tests/test_module/integration/scripts/dashboard.clj");
c.bench_function(
format!("parse {} bytes", DASHBOARD_SCRIPT.len()).as_str(),
move |b| {
let parser = parser.clone();
b.iter(move || {
let mut validator = VariableValidator::new();
let lexer = AIRLexer::new(DASHBOARD_SCRIPT);
parser
.clone()
.parse("", &mut Vec::new(), lexer)
.parse("", &mut Vec::new(), &mut validator, lexer)
.expect("success")
})
},

View File

@ -30,7 +30,11 @@ pub use parser::ast;
pub use parser::parse;
pub use parser::AIRLexer;
pub use parser::AIRParser;
pub use parser::VariableValidator;
#[cfg(test)]
#[macro_use]
extern crate fstrings;
use air_lambda_parser::parse as parse_lambda;
use air_lambda_parser::LambdaAST;

View File

@ -1,11 +1,11 @@
use crate::parser::ast::*;
use crate::parser::air_parser::make_flattened_error;
use crate::parser::air_parser::make_stream_iterable_error;
use crate::parser::ParserError;
use crate::parser::VariableValidator;
use crate::parser::Span;
use crate::parser::lexer::Token;
use air_lambda_parser::LambdaAST;
use lalrpop_util::ErrorRecovery;
use std::rc::Rc;
@ -26,17 +26,11 @@ Instr: Box<Instruction<'input>> = {
},
<left: @L> "(" ap <arg:ApArgument> <res:Output> ")" <right: @R> => {
if let ApArgument::JsonPath(json_path) = &arg {
if let AstVariable::Stream(_) = &json_path.variable {
let token = Token::VariableWithJsonPath(json_path.variable.clone(), json_path.path, json_path.should_flatten);
if let ApArgument::VariableWithLambda(vl) = &arg {
if let AstVariable::Stream(_) = &vl.variable {
let token = Token::VariableWithLambda(vl.variable.clone(), vl.lambda.clone());
errors.push(make_stream_iterable_error(left, token, right));
};
// Due the json path constraints json path should be flattened in a apply arguments.
if !json_path.should_flatten {
let token = Token::VariableWithJsonPath(json_path.variable.clone(), json_path.path, json_path.should_flatten);
errors.push(make_flattened_error(left, token, right));
}
}
let apply = Ap::new(arg, res);
@ -124,17 +118,11 @@ CallInstrValue: CallInstrValue<'input> = {
<l:Literal> => CallInstrValue::Literal(l),
<a:Alphanumeric> => CallInstrValue::Variable(AstVariable::Scalar(a)),
<s:Stream> => CallInstrValue::Variable(AstVariable::Stream(s)),
<left: @L> <j:JsonPath> <right: @R> => {
let variable = j.0;
let path = j.1;
let should_flatten = j.2;
// Due the json path constraints json path should be flattened in a call triplet.
if !should_flatten {
let token = Token::VariableWithJsonPath(variable.clone(), path, should_flatten);
errors.push(make_flattened_error(left, token, right));
}
<left: @L> <vl:VariableWithLambda> <right: @R> => {
let variable = vl.0;
let lambda = vl.1;
CallInstrValue::JsonPath(JsonPath::new(variable, path, should_flatten))
CallInstrValue::VariableWithLambda(VariableWithLambda::new(variable, lambda))
},
InitPeerId => CallInstrValue::InitPeerId,
}
@ -145,7 +133,7 @@ CallInstrArgValue: CallInstrArgValue<'input> = {
<s:Literal> => CallInstrArgValue::Literal(s),
<v:Alphanumeric> => CallInstrArgValue::Variable(AstVariable::Scalar(v)),
<v:Stream> => CallInstrArgValue::Variable(AstVariable::Stream(v)),
<j:JsonPath> => CallInstrArgValue::JsonPath(JsonPath::new(j.0, j.1, j.2)),
<vl:VariableWithLambda> => CallInstrArgValue::VariableWithLambda(VariableWithLambda::new(vl.0, vl.1)),
<n:Number> => CallInstrArgValue::Number(n),
<b:Boolean> => CallInstrArgValue::Boolean(b),
InitPeerId => CallInstrArgValue::InitPeerId,
@ -155,7 +143,7 @@ CallInstrArgValue: CallInstrArgValue<'input> = {
ApArgument: ApArgument<'input> = {
<a:Alphanumeric> => ApArgument::ScalarVariable(a),
<j:JsonPath> => ApArgument::JsonPath(JsonPath::new(j.0, j.1, j.2)),
<vl:VariableWithLambda> => ApArgument::VariableWithLambda(VariableWithLambda::new(vl.0, vl.1)),
<n:Number> => ApArgument::Number(n),
<b:Boolean> => ApArgument::Boolean(b),
EmptyArray => ApArgument::EmptyArray,
@ -165,22 +153,21 @@ ApArgument: ApArgument<'input> = {
ScalarIterable: IterableScalarValue<'input> = {
<v:Alphanumeric> => IterableScalarValue::ScalarVariable(v),
<l:@L> <j:JsonPath> <r:@R> => {
<l:@L> <vl:VariableWithLambda> <r:@R> => {
use crate::parser::air::AstVariable::*;
let variable = j.0;
let path = j.1;
let should_flatten = j.2;
let variable = vl.0;
let lambda = vl.1;
let scalar_name = match variable {
Stream(name) => {
let token = Token::VariableWithJsonPath(variable, path, should_flatten);
let token = Token::VariableWithLambda(variable, lambda.clone());
errors.push(make_stream_iterable_error(l, token, r));
name
}
Scalar(name) => name,
};
IterableScalarValue::JsonPath { scalar_name, path, should_flatten }
IterableScalarValue::VariableWithLambda { scalar_name, lambda }
}
}
@ -192,7 +179,7 @@ Matchable: MatchableValue<'input> = {
<b:Boolean> => MatchableValue::Boolean(b),
<n:Number> => MatchableValue::Number(n),
EmptyArray => MatchableValue::EmptyArray,
<j:JsonPath> => MatchableValue::JsonPath(JsonPath::new(j.0, j.1, j.2)),
<vl:VariableWithLambda> => MatchableValue::VariableWithLambda(VariableWithLambda::new(vl.0, vl.1)),
}
extern {
@ -209,7 +196,7 @@ extern {
Literal => Token::StringLiteral(<&'input str>),
Alphanumeric => Token::Alphanumeric(<&'input str>),
Stream => Token::Stream(<&'input str>),
JsonPath => Token::VariableWithJsonPath(<AstVariable<'input>>, <&'input str>, <bool>),
VariableWithLambda => Token::VariableWithLambda(<AstVariable<'input>>, <LambdaAST<'input>>),
Number => Token::Number(<Number>),
Boolean => Token::Boolean(<bool>),

View File

@ -1,12 +1,12 @@
// auto-generated: "lalrpop 0.19.6"
// sha3: bcd19a478f6a51d6d816bf63f4b9bd31c8eb721de974cf77626c92ea437d66
// sha3: dce2765d82d72fa2e95c6d8a8a3bd84838971ba5ae153715dd724be731cecdba
use crate::parser::ast::*;
use crate::parser::air_parser::make_flattened_error;
use crate::parser::air_parser::make_stream_iterable_error;
use crate::parser::ParserError;
use crate::parser::VariableValidator;
use crate::parser::Span;
use crate::parser::lexer::Token;
use air_lambda_parser::LambdaAST;
use lalrpop_util::ErrorRecovery;
use std::rc::Rc;
#[allow(unused_extern_crates)]
@ -21,12 +21,12 @@ mod __parse__AIR {
#![allow(non_snake_case, non_camel_case_types, unused_mut, unused_variables, unused_imports, unused_parens)]
use crate::parser::ast::*;
use crate::parser::air_parser::make_flattened_error;
use crate::parser::air_parser::make_stream_iterable_error;
use crate::parser::ParserError;
use crate::parser::VariableValidator;
use crate::parser::Span;
use crate::parser::lexer::Token;
use air_lambda_parser::LambdaAST;
use lalrpop_util::ErrorRecovery;
use std::rc::Rc;
#[allow(unused_extern_crates)]
@ -42,9 +42,9 @@ mod __parse__AIR {
Variant0(Token<'input>),
Variant1(&'input str),
Variant2(bool),
Variant3((AstVariable<'input>, &'input str, bool)),
Variant4(LastErrorPath),
Variant5(Number),
Variant3(LastErrorPath),
Variant4(Number),
Variant5((AstVariable<'input>, LambdaAST<'input>)),
Variant6(__lalrpop_util::ErrorRecovery<usize, Token<'input>, ParserError>),
Variant7(CallInstrArgValue<'input>),
Variant8(alloc::vec::Vec<CallInstrArgValue<'input>>),
@ -64,15 +64,15 @@ mod __parse__AIR {
// State 0
31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32,
// State 1
0, 0, 0, 0, 35, 36, 37, 0, 38, 39, 40, 41, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 35, 36, 37, 0, 38, 39, 40, 0, 41, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 2
12, 0, 0, 0, 44, 0, 0, 45, 46, 0, 47, 0, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
12, 0, 0, 0, 44, 0, 0, 45, 0, 46, 0, 47, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 3
0, 0, 0, 0, 50, 0, 0, 0, 51, 0, 0, 0, 52, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 50, 0, 0, 0, 0, 0, 0, 51, 52, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 4
0, 0, 0, 0, 53, 54, 55, 56, 57, 0, 58, 59, 60, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 53, 54, 55, 56, 0, 57, 58, 59, 60, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 5
0, 0, 0, 0, 53, 54, 55, 56, 57, 0, 58, 59, 60, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 53, 54, 55, 56, 0, 57, 58, 59, 60, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 6
31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32,
// State 7
@ -80,15 +80,15 @@ mod __parse__AIR {
// State 8
31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32,
// State 9
0, 0, 0, 0, 64, 0, 0, 0, 0, 0, 0, 0, 65, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 64, 0, 0, 0, 0, 0, 0, 65, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 10
19, 0, 0, 0, 44, 0, 0, 45, 46, 0, 47, 0, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
19, 0, 0, 0, 44, 0, 0, 45, 0, 46, 0, 47, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 11
0, 0, 0, 0, 44, 0, 0, 45, 46, 0, 47, 0, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 44, 0, 0, 45, 0, 46, 0, 47, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 12
0, 0, 0, 0, 53, 54, 55, 56, 57, 0, 58, 59, 60, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 53, 54, 55, 56, 0, 57, 58, 59, 60, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 13
0, 0, 0, 0, 53, 54, 55, 56, 57, 0, 58, 59, 60, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 53, 54, 55, 56, 0, 57, 58, 59, 60, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 14
31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32,
// State 15
@ -98,9 +98,9 @@ mod __parse__AIR {
// State 17
0, 0, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 18
0, 0, 0, 0, 44, 0, 0, 45, 46, 0, 47, 0, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 44, 0, 0, 45, 0, 46, 0, 47, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 19
0, 0, 0, 0, 44, 0, 0, 45, 46, 0, 47, 0, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 44, 0, 0, 45, 0, 46, 0, 47, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 20
31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32,
// State 21
@ -110,11 +110,11 @@ mod __parse__AIR {
// State 23
31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32,
// State 24
0, 83, 0, 0, 64, 0, 0, 0, 0, 0, 0, 0, 65, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 83, 0, 0, 64, 0, 0, 0, 0, 0, 0, 65, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 25
0, 0, 0, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 26
0, 0, 0, 0, 44, 0, 0, 45, 46, 0, 47, 0, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 44, 0, 0, 45, 0, 46, 0, 47, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 27
0, 0, 0, 104, 87, 88, 89, 90, 91, 92, 93, 94, 95, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 28
@ -130,57 +130,57 @@ mod __parse__AIR {
// State 33
0, 62, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 34
0, 0, 0, 0, -9, 0, 0, 0, 0, 0, 0, 0, -9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, -9, 0, 0, 0, 0, 0, 0, -9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 35
0, 0, 0, 0, -12, 0, 0, 0, 0, 0, 0, 0, -12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, -12, 0, 0, 0, 0, 0, 0, -12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 36
0, 0, 0, 0, -13, 0, 0, 0, 0, 0, 0, 0, -13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, -13, 0, 0, 0, 0, 0, 0, -13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 37
0, 0, 0, 0, -10, 0, 0, 0, 0, 0, 0, 0, -10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, -15, 0, 0, 0, 0, 0, 0, -15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 38
0, 0, 0, 0, -15, 0, 0, 0, 0, 0, 0, 0, -15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, -14, 0, 0, 0, 0, 0, 0, -14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 39
0, 0, 0, 0, -14, 0, 0, 0, 0, 0, 0, 0, -14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, -11, 0, 0, 0, 0, 0, 0, -11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 40
0, 0, 0, 0, -11, 0, 0, 0, 0, 0, 0, 0, -11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, -10, 0, 0, 0, 0, 0, 0, -10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 41
-61, 0, 0, 0, -61, 0, 0, -61, -61, 0, -61, 0, -61, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-61, 0, 0, 0, -61, 0, 0, -61, 0, -61, 0, -61, -61, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 42
-62, 0, 0, 0, -62, 0, 0, -62, -62, 0, -62, 0, -62, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-62, 0, 0, 0, -62, 0, 0, -62, 0, -62, 0, -62, -62, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 43
-29, -29, -29, 0, -29, 0, 0, -29, -29, 0, -29, 0, -29, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-29, -29, -29, 0, -29, 0, 0, -29, 0, -29, 0, -29, -29, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 44
-32, -32, -32, 0, -32, 0, 0, -32, -32, 0, -32, 0, -32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-32, -32, -32, 0, -32, 0, 0, -32, 0, -32, 0, -32, -32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 45
-31, -31, -31, 0, -31, 0, 0, -31, -31, 0, -31, 0, -31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-28, -28, -28, 0, -28, 0, 0, -28, 0, -28, 0, -28, -28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 46
-28, -28, -28, 0, -28, 0, 0, -28, -28, 0, -28, 0, -28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-30, -30, -30, 0, -30, 0, 0, -30, 0, -30, 0, -30, -30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 47
-30, -30, -30, 0, -30, 0, 0, -30, -30, 0, -30, 0, -30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-31, -31, -31, 0, -31, 0, 0, -31, 0, -31, 0, -31, -31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 48
0, 0, 0, 0, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 49
0, 0, 0, 0, -64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 50
0, 0, 0, 0, -65, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 51
0, 0, 0, 0, 22, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 51
0, 0, 0, 0, -65, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 52
-50, 0, 0, 0, -50, -50, -50, -50, -50, 0, -50, -50, -50, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -50,
-50, 0, 0, 0, -50, -50, -50, -50, 0, -50, -50, -50, -50, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -50,
// State 53
-53, 0, 0, 0, -53, -53, -53, -53, -53, 0, -53, -53, -53, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -53,
-53, 0, 0, 0, -53, -53, -53, -53, 0, -53, -53, -53, -53, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -53,
// State 54
-55, 0, 0, 0, -55, -55, -55, -55, -55, 0, -55, -55, -55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -55,
-55, 0, 0, 0, -55, -55, -55, -55, 0, -55, -55, -55, -55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -55,
// State 55
-49, 0, 0, 0, -49, -49, -49, -49, -49, 0, -49, -49, -49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -49,
-49, 0, 0, 0, -49, -49, -49, -49, 0, -49, -49, -49, -49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -49,
// State 56
-56, 0, 0, 0, -56, -56, -56, -56, -56, 0, -56, -56, -56, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -56,
-52, 0, 0, 0, -52, -52, -52, -52, 0, -52, -52, -52, -52, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -52,
// State 57
-52, 0, 0, 0, -52, -52, -52, -52, -52, 0, -52, -52, -52, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -52,
-54, 0, 0, 0, -54, -54, -54, -54, 0, -54, -54, -54, -54, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -54,
// State 58
-54, 0, 0, 0, -54, -54, -54, -54, -54, 0, -54, -54, -54, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -54,
-51, 0, 0, 0, -51, -51, -51, -51, 0, -51, -51, -51, -51, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -51,
// State 59
-51, 0, 0, 0, -51, -51, -51, -51, -51, 0, -51, -51, -51, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -51,
-56, 0, 0, 0, -56, -56, -56, -56, 0, -56, -56, -56, -56, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -56,
// State 60
0, 68, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 61
@ -206,7 +206,7 @@ mod __parse__AIR {
// State 71
-38, -38, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -38,
// State 72
0, -66, 0, 0, -66, 0, 0, -66, -66, 0, -66, 0, -66, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, -66, 0, 0, -66, 0, 0, -66, 0, -66, 0, -66, -66, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 73
0, 97, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 74
@ -232,7 +232,7 @@ mod __parse__AIR {
// State 84
0, 0, 0, -16, -16, -16, -16, -16, -16, -16, -16, -16, -16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 85
0, -17, 0, 0, -17, 0, 0, 0, 0, 0, 0, 0, -17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, -17, 0, 0, -17, 0, 0, 0, 0, 0, 0, -17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 86
0, 0, 0, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 87
@ -242,19 +242,19 @@ mod __parse__AIR {
// State 89
0, 0, 0, -25, -25, -25, -25, -25, -25, -25, -25, -25, -25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 90
0, 0, 0, -22, -22, -22, -22, -22, -22, -22, -22, -22, -22, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 91
0, 0, 0, -27, -27, -27, -27, -27, -27, -27, -27, -27, -27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 92
// State 91
0, 0, 0, -19, -19, -19, -19, -19, -19, -19, -19, -19, -19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 93
// State 92
0, 0, 0, -23, -23, -23, -23, -23, -23, -23, -23, -23, -23, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 94
// State 93
0, 0, 0, -21, -21, -21, -21, -21, -21, -21, -21, -21, -21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 94
0, 0, 0, -22, -22, -22, -22, -22, -22, -22, -22, -22, -22, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 95
0, 105, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 96
-63, 0, 0, 0, -63, 0, 0, -63, -63, 0, -63, 0, -63, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-63, 0, 0, 0, -63, 0, 0, -63, 0, -63, 0, -63, -63, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 97
-42, -42, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -42,
// State 98
@ -268,7 +268,7 @@ mod __parse__AIR {
// State 102
0, 0, 0, -5, -5, -5, -5, -5, -5, -5, -5, -5, -5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 103
0, -18, 0, 0, -18, 0, 0, 0, 0, 0, 0, 0, -18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, -18, 0, 0, -18, 0, 0, 0, 0, 0, 0, -18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
// State 104
0, 0, -34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
];
@ -554,11 +554,11 @@ mod __parse__AIR {
r###"Boolean"###,
r###"EmptyArray"###,
r###"InitPeerId"###,
r###"JsonPath"###,
r###"LastError"###,
r###"Literal"###,
r###"Number"###,
r###"Stream"###,
r###"VariableWithLambda"###,
r###"ap"###,
r###"call"###,
r###"fold"###,
@ -698,11 +698,11 @@ mod __parse__AIR {
Token::Boolean(_) if true => Some(5),
Token::SquareBrackets if true => Some(6),
Token::InitPeerId if true => Some(7),
Token::VariableWithJsonPath(_, _, _) if true => Some(8),
Token::LastError(_) if true => Some(9),
Token::StringLiteral(_) if true => Some(10),
Token::Number(_) if true => Some(11),
Token::Stream(_) if true => Some(12),
Token::LastError(_) if true => Some(8),
Token::StringLiteral(_) if true => Some(9),
Token::Number(_) if true => Some(10),
Token::Stream(_) if true => Some(11),
Token::VariableWithLambda(_, _) if true => Some(12),
Token::Ap if true => Some(13),
Token::Call if true => Some(14),
Token::Fold if true => Some(15),
@ -728,7 +728,7 @@ mod __parse__AIR {
{
match __token_index {
0 | 1 | 2 | 3 | 6 | 7 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 => __Symbol::Variant0(__token),
4 | 10 | 12 => match __token {
4 | 9 | 11 => match __token {
Token::Alphanumeric(__tok0) | Token::StringLiteral(__tok0) | Token::Stream(__tok0) if true => __Symbol::Variant1(__tok0),
_ => unreachable!(),
},
@ -737,15 +737,15 @@ mod __parse__AIR {
_ => unreachable!(),
},
8 => match __token {
Token::VariableWithJsonPath(__tok0, __tok1, __tok2) if true => __Symbol::Variant3((__tok0, __tok1, __tok2)),
Token::LastError(__tok0) if true => __Symbol::Variant3(__tok0),
_ => unreachable!(),
},
9 => match __token {
Token::LastError(__tok0) if true => __Symbol::Variant4(__tok0),
10 => match __token {
Token::Number(__tok0) if true => __Symbol::Variant4(__tok0),
_ => unreachable!(),
},
11 => match __token {
Token::Number(__tok0) if true => __Symbol::Variant5(__tok0),
12 => match __token {
Token::VariableWithLambda(__tok0, __tok1) if true => __Symbol::Variant5((__tok0, __tok1)),
_ => unreachable!(),
},
_ => unreachable!(),
@ -1476,14 +1476,14 @@ mod __parse__AIR {
fn __symbol_type_mismatch() -> ! {
panic!("symbol type mismatch")
}
fn __pop_Variant3<
fn __pop_Variant5<
'input,
>(
__symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)>
) -> (usize, (AstVariable<'input>, &'input str, bool), usize)
) -> (usize, (AstVariable<'input>, LambdaAST<'input>), usize)
{
match __symbols.pop() {
Some((__l, __Symbol::Variant3(__v), __r)) => (__l, __v, __r),
Some((__l, __Symbol::Variant5(__v), __r)) => (__l, __v, __r),
_ => __symbol_type_mismatch()
}
}
@ -1564,14 +1564,14 @@ mod __parse__AIR {
_ => __symbol_type_mismatch()
}
}
fn __pop_Variant4<
fn __pop_Variant3<
'input,
>(
__symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)>
) -> (usize, LastErrorPath, usize)
{
match __symbols.pop() {
Some((__l, __Symbol::Variant4(__v), __r)) => (__l, __v, __r),
Some((__l, __Symbol::Variant3(__v), __r)) => (__l, __v, __r),
_ => __symbol_type_mismatch()
}
}
@ -1586,14 +1586,14 @@ mod __parse__AIR {
_ => __symbol_type_mismatch()
}
}
fn __pop_Variant5<
fn __pop_Variant4<
'input,
>(
__symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)>
) -> (usize, Number, usize)
{
match __symbols.pop() {
Some((__l, __Symbol::Variant5(__v), __r)) => (__l, __v, __r),
Some((__l, __Symbol::Variant4(__v), __r)) => (__l, __v, __r),
_ => __symbol_type_mismatch()
}
}
@ -1897,8 +1897,8 @@ mod __parse__AIR {
_: core::marker::PhantomData<(&'err (), &'input (), &'v ())>,
) -> (usize, usize)
{
// ApArgument = JsonPath => ActionFn(40);
let __sym0 = __pop_Variant3(__symbols);
// ApArgument = VariableWithLambda => ActionFn(40);
let __sym0 = __pop_Variant5(__symbols);
let __start = __sym0.0.clone();
let __end = __sym0.2.clone();
let __nt = super::__action40::<>(input, errors, validator, __sym0);
@ -1919,7 +1919,7 @@ mod __parse__AIR {
) -> (usize, usize)
{
// ApArgument = Number => ActionFn(41);
let __sym0 = __pop_Variant5(__symbols);
let __sym0 = __pop_Variant4(__symbols);
let __start = __sym0.0.clone();
let __end = __sym0.2.clone();
let __nt = super::__action41::<>(input, errors, validator, __sym0);
@ -2003,7 +2003,7 @@ mod __parse__AIR {
) -> (usize, usize)
{
// ApArgument = LastError => ActionFn(45);
let __sym0 = __pop_Variant4(__symbols);
let __sym0 = __pop_Variant3(__symbols);
let __start = __sym0.0.clone();
let __end = __sym0.2.clone();
let __nt = super::__action45::<>(input, errors, validator, __sym0);
@ -2154,8 +2154,8 @@ mod __parse__AIR {
_: core::marker::PhantomData<(&'err (), &'input (), &'v ())>,
) -> (usize, usize)
{
// CallInstrArgValue = JsonPath => ActionFn(33);
let __sym0 = __pop_Variant3(__symbols);
// CallInstrArgValue = VariableWithLambda => ActionFn(33);
let __sym0 = __pop_Variant5(__symbols);
let __start = __sym0.0.clone();
let __end = __sym0.2.clone();
let __nt = super::__action33::<>(input, errors, validator, __sym0);
@ -2176,7 +2176,7 @@ mod __parse__AIR {
) -> (usize, usize)
{
// CallInstrArgValue = Number => ActionFn(34);
let __sym0 = __pop_Variant5(__symbols);
let __sym0 = __pop_Variant4(__symbols);
let __start = __sym0.0.clone();
let __end = __sym0.2.clone();
let __nt = super::__action34::<>(input, errors, validator, __sym0);
@ -2260,7 +2260,7 @@ mod __parse__AIR {
) -> (usize, usize)
{
// CallInstrArgValue = LastError => ActionFn(38);
let __sym0 = __pop_Variant4(__symbols);
let __sym0 = __pop_Variant3(__symbols);
let __start = __sym0.0.clone();
let __end = __sym0.2.clone();
let __nt = super::__action38::<>(input, errors, validator, __sym0);
@ -2343,8 +2343,8 @@ mod __parse__AIR {
_: core::marker::PhantomData<(&'err (), &'input (), &'v ())>,
) -> (usize, usize)
{
// CallInstrValue = JsonPath => ActionFn(78);
let __sym0 = __pop_Variant3(__symbols);
// CallInstrValue = VariableWithLambda => ActionFn(78);
let __sym0 = __pop_Variant5(__symbols);
let __start = __sym0.0.clone();
let __end = __sym0.2.clone();
let __nt = super::__action78::<>(input, errors, validator, __sym0);
@ -2895,7 +2895,7 @@ mod __parse__AIR {
) -> (usize, usize)
{
// Matchable = Number => ActionFn(53);
let __sym0 = __pop_Variant5(__symbols);
let __sym0 = __pop_Variant4(__symbols);
let __start = __sym0.0.clone();
let __end = __sym0.2.clone();
let __nt = super::__action53::<>(input, errors, validator, __sym0);
@ -2936,8 +2936,8 @@ mod __parse__AIR {
_: core::marker::PhantomData<(&'err (), &'input (), &'v ())>,
) -> (usize, usize)
{
// Matchable = JsonPath => ActionFn(55);
let __sym0 = __pop_Variant3(__symbols);
// Matchable = VariableWithLambda => ActionFn(55);
let __sym0 = __pop_Variant5(__symbols);
let __start = __sym0.0.clone();
let __end = __sym0.2.clone();
let __nt = super::__action55::<>(input, errors, validator, __sym0);
@ -3128,8 +3128,8 @@ mod __parse__AIR {
_: core::marker::PhantomData<(&'err (), &'input (), &'v ())>,
) -> (usize, usize)
{
// ScalarIterable = JsonPath => ActionFn(86);
let __sym0 = __pop_Variant3(__symbols);
// ScalarIterable = VariableWithLambda => ActionFn(86);
let __sym0 = __pop_Variant5(__symbols);
let __start = __sym0.0.clone();
let __end = __sym0.2.clone();
let __nt = super::__action86::<>(input, errors, validator, __sym0);
@ -3240,17 +3240,11 @@ fn __action3<
) -> Box<Instruction<'input>>
{
{
if let ApArgument::JsonPath(json_path) = &arg {
if let AstVariable::Stream(_) = &json_path.variable {
let token = Token::VariableWithJsonPath(json_path.variable.clone(), json_path.path, json_path.should_flatten);
if let ApArgument::VariableWithLambda(vl) = &arg {
if let AstVariable::Stream(_) = &vl.variable {
let token = Token::VariableWithLambda(vl.variable.clone(), vl.lambda.clone());
errors.push(make_stream_iterable_error(left, token, right));
};
// Due the json path constraints json path should be flattened in a apply arguments.
if !json_path.should_flatten {
let token = Token::VariableWithJsonPath(json_path.variable.clone(), json_path.path, json_path.should_flatten);
errors.push(make_flattened_error(left, token, right));
}
}
let apply = Ap::new(arg, res);
@ -3703,21 +3697,15 @@ fn __action27<
errors: &'err mut Vec<ErrorRecovery<usize, Token<'input>, ParserError>>,
validator: &'v mut VariableValidator<'input>,
(_, left, _): (usize, usize, usize),
(_, j, _): (usize, (AstVariable<'input>, &'input str, bool), usize),
(_, vl, _): (usize, (AstVariable<'input>, LambdaAST<'input>), usize),
(_, right, _): (usize, usize, usize),
) -> CallInstrValue<'input>
{
{
let variable = j.0;
let path = j.1;
let should_flatten = j.2;
// Due the json path constraints json path should be flattened in a call triplet.
if !should_flatten {
let token = Token::VariableWithJsonPath(variable.clone(), path, should_flatten);
errors.push(make_flattened_error(left, token, right));
}
let variable = vl.0;
let lambda = vl.1;
CallInstrValue::JsonPath(JsonPath::new(variable, path, should_flatten))
CallInstrValue::VariableWithLambda(VariableWithLambda::new(variable, lambda))
}
}
@ -3805,10 +3793,10 @@ fn __action33<
input: &'input str,
errors: &'err mut Vec<ErrorRecovery<usize, Token<'input>, ParserError>>,
validator: &'v mut VariableValidator<'input>,
(_, j, _): (usize, (AstVariable<'input>, &'input str, bool), usize),
(_, vl, _): (usize, (AstVariable<'input>, LambdaAST<'input>), usize),
) -> CallInstrArgValue<'input>
{
CallInstrArgValue::JsonPath(JsonPath::new(j.0, j.1, j.2))
CallInstrArgValue::VariableWithLambda(VariableWithLambda::new(vl.0, vl.1))
}
#[allow(unused_variables)]
@ -3910,10 +3898,10 @@ fn __action40<
input: &'input str,
errors: &'err mut Vec<ErrorRecovery<usize, Token<'input>, ParserError>>,
validator: &'v mut VariableValidator<'input>,
(_, j, _): (usize, (AstVariable<'input>, &'input str, bool), usize),
(_, vl, _): (usize, (AstVariable<'input>, LambdaAST<'input>), usize),
) -> ApArgument<'input>
{
ApArgument::JsonPath(JsonPath::new(j.0, j.1, j.2))
ApArgument::VariableWithLambda(VariableWithLambda::new(vl.0, vl.1))
}
#[allow(unused_variables)]
@ -4016,26 +4004,25 @@ fn __action47<
errors: &'err mut Vec<ErrorRecovery<usize, Token<'input>, ParserError>>,
validator: &'v mut VariableValidator<'input>,
(_, l, _): (usize, usize, usize),
(_, j, _): (usize, (AstVariable<'input>, &'input str, bool), usize),
(_, vl, _): (usize, (AstVariable<'input>, LambdaAST<'input>), usize),
(_, r, _): (usize, usize, usize),
) -> IterableScalarValue<'input>
{
{
use crate::parser::air::AstVariable::*;
let variable = j.0;
let path = j.1;
let should_flatten = j.2;
let variable = vl.0;
let lambda = vl.1;
let scalar_name = match variable {
Stream(name) => {
let token = Token::VariableWithJsonPath(variable, path, should_flatten);
let token = Token::VariableWithLambda(variable, lambda.clone());
errors.push(make_stream_iterable_error(l, token, r));
name
}
Scalar(name) => name,
};
IterableScalarValue::JsonPath { scalar_name, path, should_flatten }
IterableScalarValue::VariableWithLambda { scalar_name, lambda }
}
}
@ -4153,10 +4140,10 @@ fn __action55<
input: &'input str,
errors: &'err mut Vec<ErrorRecovery<usize, Token<'input>, ParserError>>,
validator: &'v mut VariableValidator<'input>,
(_, j, _): (usize, (AstVariable<'input>, &'input str, bool), usize),
(_, vl, _): (usize, (AstVariable<'input>, LambdaAST<'input>), usize),
) -> MatchableValue<'input>
{
MatchableValue::JsonPath(JsonPath::new(j.0, j.1, j.2))
MatchableValue::VariableWithLambda(VariableWithLambda::new(vl.0, vl.1))
}
#[allow(unused_variables)]
@ -4434,7 +4421,7 @@ fn __action69<
input: &'input str,
errors: &'err mut Vec<ErrorRecovery<usize, Token<'input>, ParserError>>,
validator: &'v mut VariableValidator<'input>,
__0: (usize, (AstVariable<'input>, &'input str, bool), usize),
__0: (usize, (AstVariable<'input>, LambdaAST<'input>), usize),
__1: (usize, usize, usize),
) -> CallInstrValue<'input>
{
@ -4764,7 +4751,7 @@ fn __action77<
input: &'input str,
errors: &'err mut Vec<ErrorRecovery<usize, Token<'input>, ParserError>>,
validator: &'v mut VariableValidator<'input>,
__0: (usize, (AstVariable<'input>, &'input str, bool), usize),
__0: (usize, (AstVariable<'input>, LambdaAST<'input>), usize),
__1: (usize, usize, usize),
) -> IterableScalarValue<'input>
{
@ -4797,7 +4784,7 @@ fn __action78<
input: &'input str,
errors: &'err mut Vec<ErrorRecovery<usize, Token<'input>, ParserError>>,
validator: &'v mut VariableValidator<'input>,
__0: (usize, (AstVariable<'input>, &'input str, bool), usize),
__0: (usize, (AstVariable<'input>, LambdaAST<'input>), usize),
) -> CallInstrValue<'input>
{
let __start0 = __0.2.clone();
@ -5111,7 +5098,7 @@ fn __action86<
input: &'input str,
errors: &'err mut Vec<ErrorRecovery<usize, Token<'input>, ParserError>>,
validator: &'v mut VariableValidator<'input>,
__0: (usize, (AstVariable<'input>, &'input str, bool), usize),
__0: (usize, (AstVariable<'input>, LambdaAST<'input>), usize),
) -> IterableScalarValue<'input>
{
let __start0 = __0.2.clone();

View File

@ -51,11 +51,11 @@ pub fn parse(air_script: &str) -> Result<Box<Instruction<'_>>, String> {
match result {
Ok(r) if errors.is_empty() => Ok(r),
Ok(_) => Err(report_errors(file_id, files, errors)),
Err(err) => Err(report_errors(
Err(error) => Err(report_errors(
file_id,
files,
vec![ErrorRecovery {
error: err,
error,
dropped_tokens: vec![],
}],
)),
@ -124,10 +124,7 @@ fn parser_error_to_label(file_id: usize, error: ParserError) -> Label<usize> {
match error {
LexerError(error) => lexical_error_to_label(file_id, error),
CallArgsNotFlattened(start, end) => {
Label::primary(file_id, start..end).with_message(error.to_string())
}
JsonPathAppliedToStream(start, end) => {
LambdaAppliedToStream(start, end) => {
Label::primary(file_id, start..end).with_message(error.to_string())
}
UndefinedIterable(start, end, _) => {
@ -157,7 +154,7 @@ fn lexical_error_to_label(file_id: usize, error: LexerError) -> Label<usize> {
EmptyVariableOrConst(start, end) => {
Label::primary(file_id, start..end).with_message(error.to_string())
}
InvalidJsonPath(start, end) => {
InvalidLambda(start, end) => {
Label::primary(file_id, start..end).with_message(error.to_string())
}
UnallowedCharInNumber(start, end) => {
@ -169,6 +166,9 @@ fn lexical_error_to_label(file_id: usize, error: LexerError) -> Label<usize> {
ParseFloatError(start, end, _) => {
Label::primary(file_id, start..end).with_message(error.to_string())
}
LambdaParserError(start, end, _) => {
Label::primary(file_id, start..end).with_message(error.to_string())
}
LastErrorPathError(start, end, _) => {
Label::primary(file_id, start..end).with_message(error.to_string())
}
@ -195,18 +195,10 @@ macro_rules! make_user_error(
}}
);
pub(super) fn make_flattened_error(
start_pos: usize,
token: Token<'_>,
end_pos: usize,
) -> ErrorRecovery<usize, Token<'_>, ParserError> {
make_user_error!(CallArgsNotFlattened, start_pos, token, end_pos)
}
pub(super) fn make_stream_iterable_error(
start_pos: usize,
token: Token<'_>,
end_pos: usize,
) -> ErrorRecovery<usize, Token<'_>, ParserError> {
make_user_error!(JsonPathAppliedToStream, start_pos, token, end_pos)
make_user_error!(LambdaAppliedToStream, start_pos, token, end_pos)
}

View File

@ -21,6 +21,7 @@ pub use crate::parser::lexer::AstVariable;
pub use crate::parser::lexer::LastErrorPath;
pub use crate::parser::lexer::Number;
use air_lambda_parser::LambdaAST;
use serde::Deserialize;
use serde::Serialize;
@ -66,7 +67,7 @@ pub struct Call<'i> {
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
pub enum ApArgument<'i> {
ScalarVariable(&'i str),
JsonPath(JsonPath<'i>),
VariableWithLambda(VariableWithLambda<'i>),
Number(Number),
Boolean(bool),
Literal(&'i str),
@ -85,7 +86,7 @@ pub enum CallInstrValue<'i> {
InitPeerId,
Literal(&'i str),
Variable(AstVariable<'i>),
JsonPath(JsonPath<'i>),
VariableWithLambda(VariableWithLambda<'i>),
}
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
@ -97,16 +98,15 @@ pub enum CallInstrArgValue<'i> {
Boolean(bool),
EmptyArray, // only empty arrays are allowed now
Variable(AstVariable<'i>),
JsonPath(JsonPath<'i>),
VariableWithLambda(VariableWithLambda<'i>),
}
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
pub enum IterableScalarValue<'i> {
ScalarVariable(&'i str),
JsonPath {
VariableWithLambda {
scalar_name: &'i str,
path: &'i str,
should_flatten: bool,
lambda: LambdaAST<'i>,
},
}
@ -118,7 +118,7 @@ pub enum MatchableValue<'i> {
Boolean(bool),
EmptyArray,
Variable(AstVariable<'i>),
JsonPath(JsonPath<'i>),
VariableWithLambda(VariableWithLambda<'i>),
}
#[derive(Serialize, Debug, PartialEq, Clone)]
@ -171,8 +171,8 @@ pub struct Next<'i>(pub &'i str);
pub struct Null;
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
pub struct JsonPath<'i> {
pub struct VariableWithLambda<'i> {
pub variable: AstVariable<'i>,
pub path: &'i str,
pub should_flatten: bool,
#[serde(borrow)]
pub lambda: LambdaAST<'i>,
}

View File

@ -15,6 +15,7 @@
*/
use super::*;
use air_lambda_parser::ValueAccessor;
impl<'i> Ap<'i> {
pub fn new(argument: ApArgument<'i>, result: AstVariable<'i>) -> Self {
@ -22,12 +23,25 @@ impl<'i> Ap<'i> {
}
}
impl<'i> JsonPath<'i> {
pub fn new(variable: AstVariable<'i>, path: &'i str, should_flatten: bool) -> Self {
Self {
variable,
path,
should_flatten,
impl<'i> VariableWithLambda<'i> {
pub fn new(variable: AstVariable<'i>, lambda: LambdaAST<'i>) -> Self {
Self { variable, lambda }
}
// This function is unsafe and lambda must be non-empty, although it's used only for tests
pub fn from_raw_algebras(variable: AstVariable<'i>, lambda: Vec<ValueAccessor<'i>>) -> Self {
let lambda = unsafe { LambdaAST::new_unchecked(lambda) };
Self { variable, lambda }
}
}
impl<'i> IterableScalarValue<'i> {
// This function is unsafe and lambda must be non-empty, although it's used only for tests
pub fn new_vl(scalar_name: &'i str, lambda: Vec<ValueAccessor<'i>>) -> Self {
let lambda = unsafe { LambdaAST::new_unchecked(lambda) };
Self::VariableWithLambda {
scalar_name,
lambda,
}
}
}

View File

@ -24,13 +24,13 @@ impl fmt::Display for CallInstrArgValue<'_> {
match self {
InitPeerId => write!(f, "%init_peer_id%"),
LastError(json_path) => write!(f, "%last_error%{}", json_path),
LastError(error_path) => write!(f, "%last_error%{}", error_path),
Literal(str) => write!(f, r#""{}""#, str),
Number(number) => write!(f, "{}", number),
Boolean(bool) => write!(f, "{}", bool),
EmptyArray => write!(f, "[]"),
Variable(str) => write!(f, "{}", str),
JsonPath(json_path) => write!(f, "{}", json_path),
VariableWithLambda(vl) => write!(f, "{}", vl),
}
}
}
@ -43,7 +43,7 @@ impl fmt::Display for CallInstrValue<'_> {
InitPeerId => write!(f, "%init_peer_id%"),
Literal(str) => write!(f, r#""{}""#, str),
Variable(str) => write!(f, "{}", str),
JsonPath(json_path) => write!(f, "{}", json_path),
VariableWithLambda(vl) => write!(f, "{}", vl),
}
}
}
@ -54,17 +54,10 @@ impl fmt::Display for IterableScalarValue<'_> {
match self {
ScalarVariable(str) => write!(f, "{}", str),
JsonPath {
VariableWithLambda {
scalar_name,
path,
should_flatten,
} => write!(
f,
"{}.{}{}",
scalar_name,
path,
maybe_flatten_char(*should_flatten)
),
lambda,
} => write!(f, "{}.$.{:?}", scalar_name, lambda),
}
}
}
@ -80,7 +73,7 @@ impl fmt::Display for MatchableValue<'_> {
Boolean(bool) => write!(f, "{}", bool),
EmptyArray => write!(f, "[]"),
Variable(str) => write!(f, "{}", str),
JsonPath(json_path) => write!(f, "{}", json_path),
VariableWithLambda(vl) => write!(f, "{}", vl),
}
}
}
@ -164,7 +157,7 @@ impl fmt::Display for ApArgument<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ApArgument::ScalarVariable(name) => write!(f, "{}", name),
ApArgument::JsonPath(json_path) => write!(f, "{}", json_path),
ApArgument::VariableWithLambda(vl) => write!(f, "{}", vl),
ApArgument::LastError(error_path) => write!(f, "{}", error_path),
ApArgument::Number(value) => write!(f, "{}", value),
ApArgument::Boolean(value) => write!(f, "{}", value),
@ -228,22 +221,8 @@ impl fmt::Display for Next<'_> {
}
}
impl fmt::Display for JsonPath<'_> {
impl fmt::Display for VariableWithLambda<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{}.{}{}",
self.variable,
self.path,
maybe_flatten_char(self.should_flatten)
)
}
}
fn maybe_flatten_char(should_flatten: bool) -> &'static str {
if should_flatten {
"!"
} else {
""
write!(f, "{}.$.{:?}", self.variable, self.lambda,)
}
}

View File

@ -22,13 +22,8 @@ pub enum ParserError {
#[error(transparent)]
LexerError(#[from] LexerError),
#[error(
"while using json path in this position, result should be flattened, add ! at the end"
)]
CallArgsNotFlattened(usize, usize),
#[error("json path can't be applied to streams in this position")]
JsonPathAppliedToStream(usize, usize),
#[error("lambda can't be applied to streams in this position")]
LambdaAppliedToStream(usize, usize),
#[error("variable '{2}' wasn't defined")]
UndefinedVariable(usize, usize, String),

View File

@ -215,7 +215,7 @@ fn parse_last_error(input: &str, start_pos: usize) -> LexerResult<Token<'_>> {
let last_error_size = LAST_ERROR.len();
let last_error_path = match &input[last_error_size..] {
"" => LastErrorPath::None,
// The second option with ! is needed for compatibility with flattening in "standard" json path used in AIR.
// The second option with ! is needed for compatibility with flattening in "standard" lambda used in AIR.
// However version without ! returns just a error, because the opposite is unsound.
".$.instruction" | ".$.instruction!" => LastErrorPath::Instruction,
".$.msg" | ".$.msg!" => LastErrorPath::Message,

View File

@ -18,6 +18,7 @@ use super::AstVariable;
use super::LexerError;
use super::LexerResult;
use super::Token;
use crate::LambdaAST;
use std::convert::TryInto;
use std::iter::Peekable;
@ -210,7 +211,7 @@ impl<'input> CallVariableParser<'input> {
fn try_parse_as_json_path(&mut self) -> LexerResult<()> {
if !self.json_path_allowed_char() && !self.try_parse_as_flattening() {
let error_pos = self.pos_in_string_to_parse();
return Err(LexerError::InvalidJsonPath(error_pos, error_pos));
return Err(LexerError::InvalidLambda(error_pos, error_pos));
}
Ok(())
@ -281,6 +282,22 @@ impl<'input> CallVariableParser<'input> {
}
}
fn try_to_variable_and_lambda(
&self,
pos: usize,
) -> LexerResult<(&'input str, LambdaAST<'input>)> {
// +2 to ignore ".$" prefix
let lambda = crate::parse_lambda(&self.string_to_parse[pos + 2..]).map_err(|e| {
LexerError::LambdaParserError(
self.start_pos + pos,
self.start_pos + self.string_to_parse.len(),
e.to_string(),
)
})?;
Ok((&self.string_to_parse[0..pos], lambda))
}
fn to_token(&self) -> LexerResult<Token<'input>> {
use super::token::UnparsedNumber;
@ -303,29 +320,12 @@ impl<'input> CallVariableParser<'input> {
}
}
(false, true) => {
let json_path_start_pos = self.state.first_dot_met_pos.unwrap();
let should_flatten = self.state.flattening_met;
let (variable, json_path) =
to_variable_and_path(self.string_to_parse, json_path_start_pos, should_flatten);
let lambda_start_pos = self.state.first_dot_met_pos.unwrap();
let (variable, lambda) = self.try_to_variable_and_lambda(lambda_start_pos)?;
let variable = self.to_variable(variable);
Ok(Token::VariableWithJsonPath(
variable,
json_path,
should_flatten,
))
Ok(Token::VariableWithLambda(variable, lambda))
}
}
}
}
fn to_variable_and_path(str: &str, pos: usize, should_flatten: bool) -> (&str, &str) {
let json_path = if should_flatten {
// -1 to not include the flattening symbol ! to the resulted json path
&str[pos + 1..str.len() - 1]
} else {
&str[pos + 1..]
};
(&str[0..pos], json_path)
}

View File

@ -36,8 +36,8 @@ pub enum LexerError {
#[error("this variable or constant shouldn't have empty name")]
EmptyVariableOrConst(usize, usize),
#[error("invalid character in json path")]
InvalidJsonPath(usize, usize),
#[error("invalid character in lambda")]
InvalidLambda(usize, usize),
#[error("a digit could contain only digits or one dot")]
UnallowedCharInNumber(usize, usize),
@ -48,13 +48,17 @@ pub enum LexerError {
#[error("{2}")]
ParseFloatError(usize, usize, #[source] ParseFloatError),
// TODO: use LambdaParserError directly here (it'll require introducing a lifetime)
#[error("{2}")]
LambdaParserError(usize, usize, String),
#[error("{2} is an incorrect path for %last_error%, only .$.instruction, .$.msg, and .$.peer_id are allowed")]
LastErrorPathError(usize, usize, String),
#[error("this float is too big, a float could contain less than 12 digits")]
TooBigFloat(usize, usize),
#[error("leading dot without any symbols before - please write 0 if it's float or variable name if it's json path")]
#[error("leading dot without any symbols before - please write 0 if it's float or variable name if it's a lambda")]
LeadingDot(usize, usize),
}

View File

@ -22,6 +22,8 @@ use super::LexerError;
use super::Number;
use super::Token;
use air_lambda_parser::{LambdaAST, ValueAccessor};
fn run_lexer(input: &str) -> Vec<Spanned<Token<'_>, usize, LexerError>> {
let lexer = AIRLexer::new(input);
lexer.collect()
@ -262,34 +264,38 @@ fn too_big_float_number() {
}
#[test]
fn json_path() {
// this json path contains all allowed in json path characters
const JSON_PATH: &str = r#"value.$[$@[]():?.*,"]"#;
fn lambda() {
// this lambda contains all allowed in lambda characters
const LAMBDA: &str = r#"value.$.field[1]"#;
let variable = AstVariable::Scalar("value");
lexer_test(
JSON_PATH,
LAMBDA,
Single(Ok((
0,
Token::VariableWithJsonPath(variable, r#"$[$@[]():?.*,"]"#, false),
JSON_PATH.len(),
Token::VariableWithLambda(variable, unsafe {
LambdaAST::new_unchecked(vec![
ValueAccessor::FieldAccess {
field_name: "field",
},
ValueAccessor::ArrayAccess { idx: 1 },
])
}),
LAMBDA.len(),
))),
);
}
#[test]
fn json_path_numbers() {
const JSON_PATH: &str = r#"12345.$[$@[]():?.*,"]"#;
fn lambda_path_numbers() {
const LAMBDA: &str = r#"12345.$[$@[]():?.*,"]"#;
lexer_test(LAMBDA, Single(Err(LexerError::UnallowedCharInNumber(6, 6))));
const LAMBDA1: &str = r#"+12345.$[$@[]():?.*,"]"#;
lexer_test(
JSON_PATH,
Single(Err(LexerError::UnallowedCharInNumber(6, 6))),
);
const JSON_PATH1: &str = r#"+12345.$[$@[]():?.*,"]"#;
lexer_test(
JSON_PATH1,
LAMBDA1,
Single(Err(LexerError::UnallowedCharInNumber(7, 7))),
);
}
@ -320,7 +326,7 @@ fn unclosed_quote() {
#[test]
fn bad_value() {
// value contains ! that only allowed at the end of a json path
// value contains ! that only allowed at the end of a lambda expression
const INVALID_VALUE: &str = r#"val!ue.$[$@[]():?.*,"\]"#;
lexer_test(
@ -328,31 +334,25 @@ fn bad_value() {
Single(Err(LexerError::IsNotAlphanumeric(3, 3))),
);
// value contains ! that only allowed at the end of a json path
// value contains ! that only allowed at the end of a lambda expression
const INVALID_VALUE2: &str = r#"value.$![$@[]():?.*,"\]"#;
lexer_test(
INVALID_VALUE2,
Single(Err(LexerError::InvalidJsonPath(7, 7))),
);
lexer_test(INVALID_VALUE2, Single(Err(LexerError::InvalidLambda(7, 7))));
}
#[test]
fn invalid_json_path() {
const INVALID_JSON_PATH: &str = r#"value.$%"#;
fn invalid_lambda() {
const INVALID_LAMBDA: &str = r#"value.$%"#;
lexer_test(
INVALID_JSON_PATH,
Single(Err(LexerError::InvalidJsonPath(7, 7))),
);
lexer_test(INVALID_LAMBDA, Single(Err(LexerError::InvalidLambda(7, 7))));
}
#[test]
fn invalid_json_path_numbers() {
// this json path contains all allowed in json path charactes
const JSON_PATH: &str = r#"+12345$[$@[]():?.*,"!]"#;
fn invalid_lambda_numbers() {
// this lambda contains all allowed in lambda characters
const LAMBDA: &str = r#"+12345$[$@[]():?.*,"!]"#;
lexer_test(JSON_PATH, Single(Err(LexerError::IsNotAlphanumeric(6, 6))));
lexer_test(LAMBDA, Single(Err(LexerError::IsNotAlphanumeric(6, 6))));
}
#[test]

View File

@ -18,6 +18,7 @@ mod traits;
use super::LexerError;
use super::LexerResult;
use crate::LambdaAST;
use serde::Deserialize;
use serde::Serialize;
@ -33,7 +34,7 @@ pub enum Token<'input> {
StringLiteral(&'input str),
Alphanumeric(&'input str),
Stream(&'input str),
VariableWithJsonPath(AstVariable<'input>, &'input str, bool),
VariableWithLambda(AstVariable<'input>, LambdaAST<'input>),
Number(Number),
Boolean(bool),

View File

@ -32,9 +32,9 @@ pub mod tests;
pub use self::air_parser::parse;
pub use air::AIRParser;
pub use lexer::AIRLexer;
pub use validator::VariableValidator;
use errors::ParserError;
use validator::VariableValidator;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct Span {

View File

@ -25,6 +25,8 @@ use ast::CallInstrArgValue;
use ast::CallInstrValue;
use ast::Instruction;
use air_lambda_parser::ValueAccessor;
use fstrings::f;
use lalrpop_util::ParseError;
use std::rc::Rc;
@ -140,11 +142,12 @@ fn parse_json_path() {
"#;
let instruction = parse(source_code);
let expected = Instruction::Call(Call {
peer_part: PeerPk(CallInstrValue::JsonPath(ast::JsonPath::new(
peer_part: PeerPk(CallInstrValue::VariableWithLambda(
ast::VariableWithLambda::from_raw_algebras(
Scalar("id"),
"$.a",
true,
))),
vec![ValueAccessor::FieldAccess { field_name: "a" }],
),
)),
function_part: FuncName(CallInstrValue::Literal("f")),
args: Rc::new(vec![
CallInstrArgValue::Literal("hello"),
@ -289,9 +292,9 @@ fn parse_undefined_stream_without_json_path() {
}
#[test]
fn parse_undefined_stream_with_json_path() {
fn parse_undefined_stream_with_lambda() {
let source_code = r#"
(call "" "" [$stream.$json_path])
(call "" "" [$stream.$.json_path])
"#;
let lexer = crate::AIRLexer::new(source_code);
@ -324,27 +327,37 @@ fn parse_json_path_complex() {
let source_code = r#"
(seq
(call m.$.[1]! "f" [] void)
(call m.$.abc["c"].cde[a][0].cde["bcd"]! "f" [] void)
(call m.$.abc[0].cde[1][0].cde[1]! "f" [] void)
)
"#;
let instruction = parse(source_code);
let expected = seq(
Instruction::Call(Call {
peer_part: PeerPk(CallInstrValue::JsonPath(ast::JsonPath::new(
peer_part: PeerPk(CallInstrValue::VariableWithLambda(
ast::VariableWithLambda::from_raw_algebras(
Scalar("m"),
"$.[1]",
true,
))),
vec![ValueAccessor::ArrayAccess { idx: 1 }],
),
)),
function_part: FuncName(CallInstrValue::Literal("f")),
args: Rc::new(vec![]),
output: Variable(Scalar("void")),
}),
Instruction::Call(Call {
peer_part: PeerPk(CallInstrValue::JsonPath(ast::JsonPath::new(
peer_part: PeerPk(CallInstrValue::VariableWithLambda(
ast::VariableWithLambda::from_raw_algebras(
Scalar("m"),
r#"$.abc["c"].cde[a][0].cde["bcd"]"#,
true,
))),
vec![
ValueAccessor::FieldAccess { field_name: "abc" },
ValueAccessor::ArrayAccess { idx: 0 },
ValueAccessor::FieldAccess { field_name: "cde" },
ValueAccessor::ArrayAccess { idx: 1 },
ValueAccessor::ArrayAccess { idx: 0 },
ValueAccessor::FieldAccess { field_name: "cde" },
ValueAccessor::ArrayAccess { idx: 1 },
],
),
)),
function_part: FuncName(CallInstrValue::Literal("f")),
args: Rc::new(vec![]),
output: Variable(Scalar("void")),
@ -360,26 +373,37 @@ fn json_path_square_braces() {
use ast::PeerPart::*;
let source_code = r#"
(call u.$["peer_id"]! ("return" "") [u.$["peer_id"].cde[0]["abc"].abc u.$["name"]] $void)
(call u.$.peer_id! ("return" "") [u.$[1].cde[0][0].abc u.$.name] $void)
"#;
let instruction = parse(source_code);
let expected = Instruction::Call(Call {
peer_part: PeerPk(CallInstrValue::JsonPath(ast::JsonPath::new(
peer_part: PeerPk(CallInstrValue::VariableWithLambda(
ast::VariableWithLambda::from_raw_algebras(
Scalar("u"),
r#"$["peer_id"]"#,
true,
))),
vec![ValueAccessor::FieldAccess {
field_name: "peer_id",
}],
),
)),
function_part: ServiceIdWithFuncName(
CallInstrValue::Literal("return"),
CallInstrValue::Literal(""),
),
args: Rc::new(vec![
CallInstrArgValue::JsonPath(ast::JsonPath::new(
CallInstrArgValue::VariableWithLambda(ast::VariableWithLambda::from_raw_algebras(
Scalar("u"),
r#"$["peer_id"].cde[0]["abc"].abc"#,
false,
vec![
ValueAccessor::ArrayAccess { idx: 1 },
ValueAccessor::FieldAccess { field_name: "cde" },
ValueAccessor::ArrayAccess { idx: 0 },
ValueAccessor::ArrayAccess { idx: 0 },
ValueAccessor::FieldAccess { field_name: "abc" },
],
)),
CallInstrArgValue::VariableWithLambda(ast::VariableWithLambda::from_raw_algebras(
Scalar("u"),
vec![ValueAccessor::FieldAccess { field_name: "name" }],
)),
CallInstrArgValue::JsonPath(ast::JsonPath::new(Scalar("u"), r#"$["name"]"#, false)),
]),
output: Variable(Stream("$void")),
});
@ -886,20 +910,19 @@ fn ap_with_last_error() {
#[test]
fn fold_json_path() {
use ast::FoldScalar;
use ast::IterableScalarValue::*;
use ast::IterableScalarValue;
let source_code = r#"
; comment
(fold members.$.["users"] m (null)) ;;; comment
(fold members.$.[123321] m (null)) ;;; comment
;;; comment
"#;
let instruction = parse(source_code);
let expected = Instruction::FoldScalar(FoldScalar {
iterable: JsonPath {
scalar_name: "members",
path: "$.[\"users\"]",
should_flatten: false,
},
iterable: IterableScalarValue::new_vl(
"members",
vec![ValueAccessor::ArrayAccess { idx: 123321 }],
),
iterator: "m",
instruction: Rc::new(null()),
});
@ -925,20 +948,24 @@ fn fold_on_stream() {
#[test]
fn comments() {
use ast::FoldScalar;
use ast::IterableScalarValue::*;
use ast::IterableScalarValue;
let source_code = r#"
; comment
(fold members.$.["users"] m (null)) ;;; comment ;;?()()
(fold members.$.field[1] m (null)) ;;; comment ;;?()()
;;; comme;?!.$. nt[][][][()()()null;$::!
"#;
let instruction = parse(source_code);
let expected = Instruction::FoldScalar(FoldScalar {
iterable: JsonPath {
scalar_name: "members",
path: "$.[\"users\"]",
should_flatten: false,
iterable: IterableScalarValue::new_vl(
"members",
vec![
ValueAccessor::FieldAccess {
field_name: "field",
},
ValueAccessor::ArrayAccess { idx: 1 },
],
),
iterator: "m",
instruction: Rc::new(null()),
});

View File

@ -50,7 +50,7 @@ pub struct VariableValidator<'i> {
}
impl<'i> VariableValidator<'i> {
pub(super) fn new() -> Self {
pub fn new() -> Self {
<_>::default()
}
@ -96,7 +96,7 @@ impl<'i> VariableValidator<'i> {
pub(super) fn met_ap(&mut self, ap: &Ap<'i>, span: Span) {
match &ap.argument {
ApArgument::ScalarVariable(name) => self.met_variable(&AstVariable::Scalar(name), span),
ApArgument::JsonPath(json_path) => self.met_variable(&json_path.variable, span),
ApArgument::VariableWithLambda(vl) => self.met_variable(&vl.variable, span),
ApArgument::Number(_)
| ApArgument::Boolean(_)
| ApArgument::Literal(_)
@ -151,7 +151,7 @@ impl<'i> VariableValidator<'i> {
fn met_instr_value(&mut self, instr_value: &CallInstrValue<'i>, span: Span) {
match instr_value {
CallInstrValue::JsonPath(json_path) => self.met_variable(&json_path.variable, span),
CallInstrValue::VariableWithLambda(vl) => self.met_variable(&vl.variable, span),
CallInstrValue::Variable(variable) => self.met_variable(variable, span),
_ => {}
}
@ -159,7 +159,7 @@ impl<'i> VariableValidator<'i> {
fn met_instr_arg_value(&mut self, instr_arg_value: &CallInstrArgValue<'i>, span: Span) {
match instr_arg_value {
CallInstrArgValue::JsonPath(json_path) => self.met_variable(&json_path.variable, span),
CallInstrArgValue::VariableWithLambda(vl) => self.met_variable(&vl.variable, span),
CallInstrArgValue::Variable(variable) => {
// skipping streams here allows treating non-defined streams as empty arrays
if let AstVariable::Scalar(_) = variable {
@ -224,7 +224,7 @@ impl<'i> VariableValidator<'i> {
| MatchableValue::Literal(_)
| MatchableValue::EmptyArray => {}
MatchableValue::Variable(variable) => self.met_variable(variable, span),
MatchableValue::JsonPath(json_path) => self.met_variable(&json_path.variable, span),
MatchableValue::VariableWithLambda(vl) => self.met_variable(&vl.variable, span),
}
}
@ -242,7 +242,7 @@ impl<'i> VariableValidator<'i> {
fn met_iterable_value(&mut self, iterable_value: &IterableScalarValue<'i>, span: Span) {
match iterable_value {
IterableScalarValue::JsonPath { scalar_name, .. } => {
IterableScalarValue::VariableWithLambda { scalar_name, .. } => {
self.met_variable(&AstVariable::Scalar(scalar_name), span)
}
IterableScalarValue::ScalarVariable(name) => {

View File

@ -0,0 +1,20 @@
[package]
name = "air-lambda-ast"
description = "Parser of AIR value algebra values"
version = "0.1.0"
authors = ["Fluence Labs"]
edition = "2018"
license = "Apache-2.0"
publish = false
keywords = ["fluence", "air", "lambda", "ast"]
categories = ["wasm"]
[lib]
name = "air_lambda_ast"
path = "src/lib.rs"
[dependencies]
non-empty-vec = { version = "0.2.0", features = ["serde"] }
serde = { version = "1.0.118", features = ["rc", "derive"] }
serde_json = "1.0.61"

View File

@ -0,0 +1,3 @@
## AIR lambda AST
AIR supports lambda expressions that could be applied both to scalars and streams. This crate defines an AST of such expressions, it has a array-like structure, because such a structure is easier to handle.

View File

@ -0,0 +1,36 @@
/*
* Copyright 2021 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
mod traits;
use non_empty_vec::NonEmpty;
use serde::Deserialize;
use serde::Serialize;
pub type LambdaAST<'input> = NonEmpty<ValueAccessor<'input>>;
#[derive(Debug, PartialEq, Clone, Copy, Serialize, Deserialize)]
pub enum ValueAccessor<'input> {
// (.)?[$idx]
ArrayAccess { idx: u32 },
// .field
FieldAccess { field_name: &'input str },
// needed to allow parser catch all errors from a lambda expression without stopping
// on the very first one. Although, this variant is guaranteed not to be present in a lambda.
Error,
}

View File

@ -0,0 +1,31 @@
/*
* Copyright 2021 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use super::*;
use std::fmt;
impl fmt::Display for ValueAccessor<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use ValueAccessor::*;
match self {
ArrayAccess { idx } => write!(f, ".[{}]", idx),
FieldAccess { field_name } => write!(f, ".{}", field_name),
Error => write!(f, "a parser error occurred while parsing lambda expression"),
}
}
}

View File

@ -0,0 +1,38 @@
/*
* Copyright 2021 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#![deny(
dead_code,
nonstandard_style,
unused_imports,
unused_mut,
unused_variables,
unused_unsafe,
unreachable_patterns
)]
mod ast;
pub use ast::*;
pub fn format_ast(lambda_ast: &LambdaAST<'_>) -> String {
let mut formatted_ast = String::new();
for algebra in lambda_ast.iter() {
formatted_ast.push_str(&algebra.to_string());
}
formatted_ast
}

View File

@ -0,0 +1,33 @@
[package]
name = "air-lambda-parser"
description = "Parser of an AIR lambda"
version = "0.1.0"
authors = ["Fluence Labs"]
edition = "2018"
license = "Apache-2.0"
publish = false
keywords = ["fluence", "air", "parser", "lalrpop"]
categories = ["wasm"]
[lib]
name = "air_lambda_parser"
path = "src/lib.rs"
[build-dependencies]
lalrpop = "0.19.6"
[dependencies]
air-lambda-ast = { path = "../ast" }
lalrpop-util = "0.19.6"
regex = "1.5.4"
codespan = "0.11.1"
codespan-reporting = "0.11.1"
multimap = "0.8.3"
# TODO: hide serde behind a feature
serde = { version = "1.0.118", features = ["rc", "derive"] }
serde_json = "1.0.61"
itertools = "0.10.0"
thiserror = "1.0.23"

View File

@ -0,0 +1,24 @@
/*
* Copyright 2020 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
extern crate lalrpop;
fn main() {
lalrpop::Configuration::new()
.generate_in_source_tree()
.process()
.unwrap();
}

View File

@ -0,0 +1,36 @@
/*
* Copyright 2021 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#![deny(
dead_code,
nonstandard_style,
unused_imports,
unused_mut,
unused_variables,
unused_unsafe,
unreachable_patterns
)]
mod parser;
pub use parser::parse;
pub use parser::AlgebraLexer;
pub use parser::LambdaParser;
pub use parser::LambdaParserError;
pub use parser::LexerError;
pub use air_lambda_ast::LambdaAST;
pub use air_lambda_ast::ValueAccessor;

View File

@ -0,0 +1,51 @@
/*
* Copyright 2021 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use crate::parser::lexer::LexerError;
use crate::parser::lexer::Token;
use lalrpop_util::ErrorRecovery;
use lalrpop_util::ParseError;
use thiserror::Error as ThisError;
#[derive(ThisError, Debug, Clone, PartialEq, Eq)]
pub enum LambdaParserError<'input> {
#[error(transparent)]
LexerError(#[from] LexerError),
#[error("provided lambda expression doesn't contain any algebras")]
EmptyLambda,
#[error("{0:?}")]
ParseError(ParseError<usize, Token<'input>, LexerError>),
#[error("{0:?}")]
RecoveryErrors(Vec<ErrorRecovery<usize, Token<'input>, LexerError>>),
}
impl<'input> From<ParseError<usize, Token<'input>, LexerError>> for LambdaParserError<'input> {
fn from(e: ParseError<usize, Token<'input>, LexerError>) -> Self {
Self::ParseError(e)
}
}
impl<'input> From<Vec<ErrorRecovery<usize, Token<'input>, LexerError>>>
for LambdaParserError<'input>
{
fn from(errors: Vec<ErrorRecovery<usize, Token<'input>, LexerError>>) -> Self {
Self::RecoveryErrors(errors)
}
}

View File

@ -0,0 +1,52 @@
/*
* Copyright 2021 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use super::lexer::AlgebraLexer;
use super::va_lambda;
use super::LambdaParserError;
use super::LambdaParserResult;
use crate::LambdaAST;
use crate::ValueAccessor;
use va_lambda::LambdaParser;
// Caching parser to cache internal regexes, which are expensive to instantiate
// See also https://github.com/lalrpop/lalrpop/issues/269
thread_local!(static PARSER: LambdaParser = LambdaParser::new());
/// Parse AIR `source_code` to `Box<Instruction>`
pub fn parse(lambda: &str) -> LambdaParserResult<'_, LambdaAST> {
PARSER.with(|parser| {
let mut errors = Vec::new();
let lexer = AlgebraLexer::new(lambda);
let result = parser.parse(lambda, &mut errors, lexer);
match result {
Ok(algebras) if errors.is_empty() => try_to_lambda(algebras),
Ok(_) => Err(errors.into()),
Err(e) => Err(e.into()),
}
})
}
fn try_to_lambda(algebras: Vec<ValueAccessor>) -> LambdaParserResult<'_, LambdaAST> {
if algebras.is_empty() {
return Err(LambdaParserError::EmptyLambda);
}
let ast = unsafe { LambdaAST::new_unchecked(algebras) };
Ok(ast)
}

View File

@ -0,0 +1,108 @@
/*
* Copyright 2021 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use super::errors::LexerError;
use super::token::Token;
use crate::parser::lexer::is_air_alphanumeric;
use std::iter::Peekable;
use std::str::CharIndices;
const ARRAY_IDX_BASE: u32 = 10;
pub type Spanned<Token, Loc, Error> = Result<(Loc, Token, Loc), Error>;
pub struct AlgebraLexer<'input> {
input: &'input str,
chars: Peekable<CharIndices<'input>>,
}
impl<'input> Iterator for AlgebraLexer<'input> {
type Item = Spanned<Token<'input>, usize, LexerError>;
fn next(&mut self) -> Option<Self::Item> {
self.next_token()
}
}
impl<'input> AlgebraLexer<'input> {
pub fn new(input: &'input str) -> Self {
Self {
input,
chars: input.char_indices().peekable(),
}
}
pub fn next_token(&mut self) -> Option<Spanned<Token<'input>, usize, LexerError>> {
self.chars.next().map(|(start_pos, ch)| match ch {
'[' => Ok((start_pos, Token::OpenSquareBracket, start_pos + 1)),
']' => Ok((start_pos, Token::CloseSquareBracket, start_pos + 1)),
'.' => Ok((start_pos, Token::Selector, start_pos + 1)),
d if d.is_digit(ARRAY_IDX_BASE) => self.tokenize_arrays_idx(start_pos),
s if is_air_alphanumeric(s) => self.tokenize_field_name(start_pos),
'!' => Ok((start_pos, Token::FlatteningSign, start_pos + 1)),
_ => Err(LexerError::UnexpectedSymbol(start_pos, start_pos + 1)),
})
}
fn tokenize_arrays_idx(
&mut self,
start_pos: usize,
) -> Spanned<Token<'input>, usize, LexerError> {
let array_idx = self.tokenize_until(start_pos, |ch| ch.is_digit(ARRAY_IDX_BASE));
match array_idx
.parse::<u32>()
.map_err(|e| LexerError::ParseIntError(start_pos, start_pos + array_idx.len(), e))
{
Ok(idx) => Ok((start_pos, Token::ArrayIdx(idx), start_pos + array_idx.len())),
Err(e) => Err(e),
}
}
fn tokenize_field_name(
&mut self,
start_pos: usize,
) -> Spanned<Token<'input>, usize, LexerError> {
let field_name = self.tokenize_until(start_pos, is_air_alphanumeric);
Ok((
start_pos,
Token::FieldName(field_name),
start_pos + field_name.len(),
))
}
fn tokenize_until(
&mut self,
start_pos: usize,
condition: impl Fn(char) -> bool,
) -> &'input str {
let mut end_pos = start_pos;
while let Some((pos, ch)) = self.chars.peek() {
if !condition(*ch) {
break;
}
end_pos = *pos;
self.chars.next();
}
&self.input[start_pos..end_pos + 1]
}
}

View File

@ -0,0 +1,28 @@
/*
* Copyright 2021 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use thiserror::Error as ThisError;
use std::num::ParseIntError;
#[derive(ThisError, Debug, Clone, PartialEq, Eq)]
pub enum LexerError {
#[error("unexpected symbol for value algebra")]
UnexpectedSymbol(usize, usize),
#[error("{2}")]
ParseIntError(usize, usize, #[source] ParseIntError),
}

View File

@ -0,0 +1,29 @@
/*
* Copyright 2021 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
mod algebra_lexer;
mod errors;
mod token;
mod utils;
#[cfg(test)]
mod tests;
pub use algebra_lexer::AlgebraLexer;
pub use errors::LexerError;
pub use token::Token;
pub(self) use utils::is_air_alphanumeric;

View File

@ -0,0 +1,52 @@
/*
* Copyright 2021 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use super::algebra_lexer::Spanned;
use super::AlgebraLexer;
use super::LexerError;
use super::Token;
fn run_lexer(input: &str) -> Vec<Spanned<Token<'_>, usize, LexerError>> {
let lexer = AlgebraLexer::new(input);
lexer.collect()
}
#[test]
fn array_access() {
let array_access: &str = ".[0]";
let actual = run_lexer(array_access);
let expected = vec![
Spanned::Ok((0, Token::Selector, 1)),
Spanned::Ok((1, Token::OpenSquareBracket, 2)),
Spanned::Ok((2, Token::ArrayIdx(0), 3)),
Spanned::Ok((3, Token::CloseSquareBracket, 4)),
];
assert_eq!(actual, expected);
}
#[test]
fn field_access() {
let field_name = "some_field_name";
let field_access = format!(".{}", field_name);
let actual = run_lexer(&field_access);
let expected = vec![
Spanned::Ok((0, Token::Selector, 1)),
Spanned::Ok((1, Token::FieldName(field_name), 1 + field_name.len())),
];
assert_eq!(actual, expected);
}

View File

@ -0,0 +1,33 @@
/*
* Copyright 2021 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use serde::Deserialize;
use serde::Serialize;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub enum Token<'input> {
// .
Selector,
OpenSquareBracket,
CloseSquareBracket,
ArrayIdx(u32),
FieldName(&'input str),
// !
FlatteningSign,
}

View File

@ -0,0 +1,20 @@
/*
* Copyright 2021 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// TODO: decouple it to a separate crate
pub(super) fn is_air_alphanumeric(ch: char) -> bool {
ch.is_alphanumeric() || ch == '_' || ch == '-'
}

View File

@ -0,0 +1,42 @@
/*
* Copyright 2021 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
pub mod lambda_parser;
mod lexer;
// air is auto-generated, so exclude it from `cargo fmt -- --check` and `cargo clippy`
#[rustfmt::skip]
#[allow(clippy::all)]
mod va_lambda;
mod errors;
#[cfg(test)]
pub mod tests;
pub type LambdaParserResult<'input, T> = std::result::Result<T, LambdaParserError<'input>>;
pub use errors::LambdaParserError;
pub use lambda_parser::parse;
pub use lexer::AlgebraLexer;
pub use lexer::LexerError;
pub use va_lambda::LambdaParser;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct Span {
pub left: usize,
pub right: usize,
}

View File

@ -0,0 +1,134 @@
/*
* Copyright 2021 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use crate::parser::LambdaParser;
use crate::ValueAccessor;
thread_local!(static TEST_PARSER: LambdaParser = LambdaParser::new());
fn parse(source_code: &str) -> Vec<ValueAccessor<'_>> {
TEST_PARSER.with(|parser| {
let mut errors = Vec::new();
let lexer = crate::parser::AlgebraLexer::new(source_code);
parser
.parse(source_code, &mut errors, lexer)
.expect("parsing should be successful")
})
}
#[test]
fn field_access() {
let field_name = "some_field_name";
let lambda = format!(".{}", field_name);
let actual = parse(&lambda);
let expected = vec![ValueAccessor::FieldAccess { field_name }];
assert_eq!(actual, expected);
}
#[test]
fn field_access_with_flattening() {
let field_name = "some_field_name";
let lambda = format!(".{}!", field_name);
let actual = parse(&lambda);
let expected = vec![ValueAccessor::FieldAccess { field_name }];
assert_eq!(actual, expected);
}
#[test]
fn array_access() {
let idx = 0;
let lambda = format!(".[{}]", idx);
let actual = parse(&lambda);
let expected = vec![ValueAccessor::ArrayAccess { idx }];
assert_eq!(actual, expected);
}
#[test]
fn array_access_with_flattening() {
let idx = 0;
let lambda = format!(".[{}]!", idx);
let actual = parse(&lambda);
let expected = vec![ValueAccessor::ArrayAccess { idx }];
assert_eq!(actual, expected);
}
#[test]
fn field_array_access() {
let field_name = "some_field_name";
let idx = 1;
let lambda = format!(".{}.[{}]", field_name, idx);
let actual = parse(&lambda);
let expected = vec![
ValueAccessor::FieldAccess { field_name },
ValueAccessor::ArrayAccess { idx },
];
assert_eq!(actual, expected);
}
#[test]
fn field_array_access_without_dot() {
let field_name = "some_field_name";
let idx = 1;
let lambda = format!(".{}[{}]", field_name, idx);
let actual = parse(&lambda);
let expected = vec![
ValueAccessor::FieldAccess { field_name },
ValueAccessor::ArrayAccess { idx },
];
assert_eq!(actual, expected);
}
#[test]
fn array_field_access() {
let field_name = "some_field_name";
let idx = 1;
let lambda = format!(".[{}].{}", idx, field_name);
let actual = parse(&lambda);
let expected = vec![
ValueAccessor::ArrayAccess { idx },
ValueAccessor::FieldAccess { field_name },
];
assert_eq!(actual, expected);
}
#[test]
fn many_array_field_access() {
let field_name_1 = "some_field_name_1";
let field_name_2 = "some_field_name_2";
let idx_1 = 1;
let idx_2 = u32::MAX;
let lambda = format!(".[{}].{}.[{}].{}", idx_1, field_name_1, idx_2, field_name_2);
let actual = parse(&lambda);
let expected = vec![
ValueAccessor::ArrayAccess { idx: idx_1 },
ValueAccessor::FieldAccess {
field_name: field_name_1,
},
ValueAccessor::ArrayAccess { idx: idx_2 },
ValueAccessor::FieldAccess {
field_name: field_name_2,
},
];
assert_eq!(actual, expected);
}

View File

@ -0,0 +1,39 @@
use crate::ValueAlgebra;
use crate::parser::lexer::LexerError;
use crate::parser::lexer::Token;
use lalrpop_util::ErrorRecovery;
// the only thing why input matters here is just introducing lifetime for Token
grammar<'err, 'input>(input: &'input str, errors: &'err mut Vec<ErrorRecovery<usize, Token<'input>, LexerError>>);
pub Lambda: Vec<ValueAlgebra<'input>> = <ValueAlgebra*> => <>;
ValueAlgebra: ValueAlgebra<'input> = {
<maybe_dot_selector:"."?> "[" <idx: array_idx> "]" <maybe_flatten_sign:"!"?> => {
ValueAlgebra::ArrayAccess { idx }
},
"." <field_name: field_name> <maybe_flatten_sign:"!"?> => {
ValueAlgebra::FieldAccess { field_name }
},
! => { errors.push(<>); ValueAlgebra::Error },
}
extern {
type Location = usize;
type Error = LexerError;
enum Token<'input> {
"." => Token::Selector,
"[" => Token::OpenSquareBracket,
"]" => Token::CloseSquareBracket,
array_idx => Token::ArrayIdx(<u32>),
field_name => Token::FieldName(<&'input str>),
"!" => Token::FlatteningSign,
}
}

File diff suppressed because it is too large Load Diff

View File

@ -27,6 +27,7 @@ pub struct SecurityTetraplet {
pub triplet: ResolvedTriplet,
/// Value was produced by applying this `json_path` to the output from `call_service`.
// TODO: since it's not a json path anymore, it's needed to rename it to lambda
pub json_path: String,
}
@ -55,7 +56,7 @@ impl SecurityTetraplet {
}
}
pub fn add_json_path(&mut self, json_path: &str) {
pub fn add_lambda(&mut self, json_path: &str) {
self.json_path.push_str(json_path)
}
}