diff --git a/CHANGELOG.md b/CHANGELOG.md index f8221e7b..8a538aca 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,13 +1,21 @@ -## Version 0.15.0 (2021-09-30) +## Version 0.16.0 (2021-10-18) + +[PR 154](https://github.com/fluencelabs/aquavm/pull/154) +The json path crate has been removed and changed to custom lambda scheme that have a subset of functionality of json path used by the Aqua compiler. The flattening sign `!` is still allowed now, but does nothing. + +[PR 150](https://github.com/fluencelabs/aquavm/pull/150), [PR 152](https://github.com/fluencelabs/aquavm/pull/152), [PR 153](https://github.com/fluencelabs/aquavm/pull/153) [PR 160](https://github.com/fluencelabs/aquavm/pull/160) +Some parts of the interpreter has been refactored to make it more modular. + +[PR 144](https://github.com/fluencelabs/aquavm/pull/144) +The interpreter changed to be built with `unwind` panic handler and some other debug options were turned on. + +## Version 0.15.0 (2021-10-04) [PR 140](https://github.com/fluencelabs/aquavm/pull/130): - the interpreter become async, now it's a pure function without any imports from a peer. Instead of calling import `call_service` from a peer, it now returns call results in the outcome structure, and receives their result in the `invoke` export. - data structure now includes a new field to track last call request id to give peer more freedom. - AVM server was completely refactored to support the new interpreter model and to expose a new trait storing data for a node. -[PR 144](https://github.com/fluencelabs/aquavm/pull/144) - The interpreter changed to be built with `unwind` panic handler and some other debug options were turned on. - [PR 139](https://github.com/fluencelabs/aquavm/pull/139) Senders in `RequestSentBy` could be different now. diff --git a/Cargo.lock b/Cargo.lock index 0ab7db60..b2a28641 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -13,11 +13,13 @@ dependencies = [ [[package]] name = "air" -version = "0.15.0" +version = "0.16.0" dependencies = [ "air-execution-info-collector", "air-interpreter-data", "air-interpreter-interface", + "air-lambda-ast", + "air-lambda-parser", "air-log-targets", "air-parser", "air-test-utils", @@ -27,7 +29,6 @@ dependencies = [ "csv", "env_logger", "fluence-app-service", - "jsonpath_lib-fl", "log", "maplit", "marine-rs-sdk", @@ -48,7 +49,7 @@ version = "0.1.0" [[package]] name = "air-interpreter" -version = "0.15.0" +version = "0.16.0" dependencies = [ "air", "air-log-targets", @@ -79,6 +80,32 @@ dependencies = [ "serde_json", ] +[[package]] +name = "air-lambda-ast" +version = "0.1.0" +dependencies = [ + "non-empty-vec", + "serde", + "serde_json", +] + +[[package]] +name = "air-lambda-parser" +version = "0.1.0" +dependencies = [ + "air-lambda-ast", + "codespan", + "codespan-reporting", + "itertools 0.10.1", + "lalrpop", + "lalrpop-util", + "multimap", + "regex", + "serde", + "serde_json", + "thiserror", +] + [[package]] name = "air-log-targets" version = "0.1.0" @@ -87,6 +114,7 @@ version = "0.1.0" name = "air-parser" version = "0.7.1" dependencies = [ + "air-lambda-parser", "codespan", "codespan-reporting", "criterion", @@ -139,12 +167,6 @@ version = "1.0.44" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61604a8f862e1d5c3229fdd78f8b02c68dcf73a4c4b05fd636d12240aaa242c1" -[[package]] -name = "array_tool" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f8cb5d814eb646a863c4f24978cff2880c4be96ad8cde2c0f0678732902e271" - [[package]] name = "arrayref" version = "0.3.6" @@ -301,9 +323,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.0.70" +version = "1.0.71" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d26a6ce4b6a484fa3edb70f7efa6fc430fd2b87285fe8b84304fd0936faa0dc0" +checksum = "79c2681d6594606957bbb8631c4b90a7fcaaa72cdb714743a437b156d6a7eedd" [[package]] name = "cfg-if" @@ -845,9 +867,9 @@ dependencies = [ [[package]] name = "half" -version = "1.7.1" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62aca2aba2d62b4a7f5b33f3712cb1b0692779a56fb510499d5c0aa594daeaf3" +checksum = "ac5956d4e63858efaec57e0d6c1c2f6a41e1487f830314a324ccd7e2223a7ca0" [[package]] name = "hashbrown" @@ -993,19 +1015,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "jsonpath_lib-fl" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33dcf980221b25366e8f0df601cf0df6ffcc97242cbbe4d139a79a7f0de5107f" -dependencies = [ - "array_tool", - "env_logger", - "log", - "serde", - "serde_json", -] - [[package]] name = "lalrpop" version = "0.19.6" @@ -1151,18 +1160,18 @@ dependencies = [ [[package]] name = "marine-macro" -version = "0.6.13" +version = "0.6.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "679663e087698f1048f23fed9b51ed82f6fa75781d3747ce29ea2f3ad78a6534" +checksum = "94b05da94255c230b7bf139c39c04447677b5d238579bf38ca7b3fcdc3e04993" dependencies = [ "marine-macro-impl", ] [[package]] name = "marine-macro-impl" -version = "0.6.13" +version = "0.6.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ba83fc29fec3b96374094a94396d3fe13c97468ffe196123b78555bdae1093e" +checksum = "7846a749f35186f923f71b3132a15add0735376e73978f41f552b92e9d43cbab" dependencies = [ "proc-macro2", "quote", @@ -1208,9 +1217,9 @@ dependencies = [ [[package]] name = "marine-rs-sdk" -version = "0.6.13" +version = "0.6.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8866fc6f24b92342f15d2816298d3de6377b685df245e38a36ddcde993c8f1d5" +checksum = "5b349c8b5a09045a2a509f7ee5106a52c70ef6782febe9be6748044c6f8c477a" dependencies = [ "marine-macro", "marine-rs-sdk-main", @@ -1220,9 +1229,9 @@ dependencies = [ [[package]] name = "marine-rs-sdk-main" -version = "0.6.13" +version = "0.6.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4980a0c01121844419c0146e776d24e35fdf7cb2e90a33d19ecf52756e400196" +checksum = "1e9d9b9ecd87f0dfa8ad9ee594e71f71d8b4f0b819d3b025632a1a6597063088" dependencies = [ "log", "marine-macro", @@ -1261,9 +1270,9 @@ dependencies = [ [[package]] name = "marine-timestamp-macro" -version = "0.6.13" +version = "0.6.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5656745923b99d73f945e26cf191efa70e906c7f55b0d4c1fc176b4b8087e85b" +checksum = "cb6e81c03cc0c2f546680bf21e10f3514f69acfa4d4d84a8f2b17b2fcebe8ce9" dependencies = [ "chrono", "quote", @@ -1339,6 +1348,15 @@ dependencies = [ "version_check", ] +[[package]] +name = "non-empty-vec" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e411c9b40f7c0048d9d78283d94954defc6ae6d432184f4b8ef491ea87d9a882" +dependencies = [ + "serde", +] + [[package]] name = "num-integer" version = "0.1.44" @@ -1556,9 +1574,9 @@ checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5" [[package]] name = "proc-macro2" -version = "1.0.29" +version = "1.0.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9f5105d4fdaab20335ca9565e106a5d9b82b6219b5ba735731124ac6711d23d" +checksum = "edc3358ebc67bc8b7fa0c007f945b0b18226f78437d61bec735a9eb96b61ee70" dependencies = [ "unicode-xid", ] @@ -1821,7 +1839,6 @@ version = "1.0.68" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0f690853975602e1bfe1ccbf50504d67174e3bcf340f23b5ea9992e0587a52d8" dependencies = [ - "indexmap", "itoa", "ryu", "serde", @@ -1847,12 +1864,13 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "string_cache" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ddb1139b5353f96e429e1a5e19fbaf663bddedaa06d1dbd49f82e352601209a" +checksum = "923f0f39b6267d37d23ce71ae7235602134b250ace715dd2c90421998ddac0c6" dependencies = [ "lazy_static", "new_debug_unreachable", + "parking_lot 0.11.2", "phf_shared", "precomputed-hash", ] @@ -1883,9 +1901,9 @@ checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" [[package]] name = "syn" -version = "1.0.78" +version = "1.0.80" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4eac2e6c19f5c3abc0c229bea31ff0b9b091c7b14990e8924b92902a303a0c0" +checksum = "d010a1623fbd906d51d650a9916aaefc05ffa0e4053ff7fe601167f3e715d194" dependencies = [ "proc-macro2", "quote", @@ -1929,18 +1947,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.29" +version = "1.0.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "602eca064b2d83369e2b2f34b09c70b605402801927c65c11071ac911d299b88" +checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.29" +version = "1.0.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bad553cc2c78e8de258400763a647e80e6d1b31ee237275d756f6836d204494c" +checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b" dependencies = [ "proc-macro2", "quote", diff --git a/Cargo.toml b/Cargo.toml index 8ec56b39..e3076353 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,6 +7,8 @@ members = [ "crates/air-lib/execution-info-collector", "crates/air-lib/interpreter-data", "crates/air-lib/interpreter-interface", + "crates/air-lib/lambda/ast", + "crates/air-lib/lambda/parser", "crates/air-lib/log-targets", "crates/air-lib/polyplets", "crates/air-lib/test-utils", diff --git a/air-interpreter/Cargo.toml b/air-interpreter/Cargo.toml index 3be9559e..70f8f9ce 100644 --- a/air-interpreter/Cargo.toml +++ b/air-interpreter/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "air-interpreter" -version = "0.15.0" +version = "0.16.0" description = "Crate-wrapper for air" authors = ["Fluence Labs"] edition = "2018" diff --git a/air/Cargo.toml b/air/Cargo.toml index 20e12826..c1fb7512 100644 --- a/air/Cargo.toml +++ b/air/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "air" -version = "0.15.0" +version = "0.16.0" description = "Interpreter of AIR scripts intended to coordinate request flow in the Fluence network" authors = ["Fluence Labs"] edition = "2018" @@ -20,6 +20,8 @@ air-execution-info-collector = { path = "../crates/air-lib/execution-info-collec air-interpreter-data = { path = "../crates/air-lib/interpreter-data" } air-interpreter-interface = { path = "../crates/air-lib/interpreter-interface" } air-log-targets = { path = "../crates/air-lib/log-targets" } +air-lambda-ast = { path = "../crates/air-lib/lambda/ast" } +air-lambda-parser = { path = "../crates/air-lib/lambda/parser" } air-trace-handler = { path = "../crates/air-lib/trace-handler" } polyplets = { path = "../crates/air-lib/polyplets" } marine-rs-sdk = { version = "0.6.11", features = ["logger"] } @@ -27,8 +29,6 @@ marine-rs-sdk = { version = "0.6.11", features = ["logger"] } serde = { version = "1.0.118", features = [ "derive", "rc" ] } serde_json = "1.0.61" -jsonpath_lib-fl = "=0.3.7" - boolinator = "2.4.0" log = "0.4.11" thiserror = "1.0.23" diff --git a/air/benches/call_benchmark.rs b/air/benches/call_benchmark.rs index a4a568e9..be6e0bef 100644 --- a/air/benches/call_benchmark.rs +++ b/air/benches/call_benchmark.rs @@ -1,8 +1,4 @@ -use air_test_utils::create_avm; -use air_test_utils::unit_call_service; -use air_test_utils::AVMError; -use air_test_utils::InterpreterOutcome; -use air_test_utils::AVM; +use air_test_utils::prelude::*; use criterion::criterion_group; use criterion::criterion_main; @@ -10,7 +6,7 @@ use criterion::Criterion; use std::cell::RefCell; -thread_local!(static VM: RefCell = RefCell::new(create_avm(unit_call_service(), "test_peer_id"))); +thread_local!(static VM: RefCell = RefCell::new(create_avm(unit_call_service(), "test_peer_id"))); thread_local!(static SCRIPT: String = String::from( r#" (call "test_peer_id" ("local_service_id" "local_fn_name") [] result_name) @@ -18,8 +14,8 @@ thread_local!(static SCRIPT: String = String::from( ) ); -fn current_peer_id_call() -> Result { - VM.with(|vm| SCRIPT.with(|script| vm.borrow_mut().call_with_prev_data("", script.clone(), "", ""))) +fn current_peer_id_call() -> Result { + VM.with(|vm| SCRIPT.with(|script| vm.borrow_mut().call(script, "", "", ""))) } fn criterion_benchmark(c: &mut Criterion) { diff --git a/air/benches/chat_benchmark.rs b/air/benches/chat_benchmark.rs index 11b508f8..5e3cc2bf 100644 --- a/air/benches/chat_benchmark.rs +++ b/air/benches/chat_benchmark.rs @@ -1,39 +1,26 @@ -use air_test_utils::create_avm; -use air_test_utils::unit_call_service; -use air_test_utils::AVMError; -use air_test_utils::CallServiceClosure; -use air_test_utils::IValue; -use air_test_utils::InterpreterOutcome; -use air_test_utils::NEVec; -use air_test_utils::AVM; +use air_test_utils::prelude::*; use criterion::criterion_group; use criterion::criterion_main; use criterion::Criterion; +use serde_json::json; use std::cell::RefCell; -thread_local!(static RELAY_1_VM: RefCell = RefCell::new(create_avm(unit_call_service(), "Relay1"))); -thread_local!(static RELAY_2_VM: RefCell = RefCell::new(create_avm(unit_call_service(), "Relay2"))); -thread_local!(static REMOTE_VM: RefCell = RefCell::new({ - let members_call_service: CallServiceClosure = Box::new(|_, _| -> Option { - Some(IValue::Record( - NEVec::new(vec![ - IValue::S32(0), - IValue::String(String::from(r#"[["A", "Relay1"], ["B", "Relay2"]]"#)), - ]) - .unwrap(), - )) +thread_local!(static RELAY_1_VM: RefCell = RefCell::new(create_avm(unit_call_service(), "Relay1"))); +thread_local!(static RELAY_2_VM: RefCell = RefCell::new(create_avm(unit_call_service(), "Relay2"))); +thread_local!(static REMOTE_VM: RefCell = RefCell::new({ + let members_call_service: CallServiceClosure = Box::new(|_| -> CallServiceResult { + CallServiceResult::ok(json!([["A", "Relay1"], ["B", "Relay2"]])) }); create_avm(members_call_service, "Remote") })); -thread_local!(static CLIENT_1_VM: RefCell = RefCell::new(create_avm(unit_call_service(), "A"))); -thread_local!(static CLIENT_2_VM: RefCell = RefCell::new(create_avm(unit_call_service(), "B"))); +thread_local!(static CLIENT_1_VM: RefCell = RefCell::new(create_avm(unit_call_service(), "A"))); +thread_local!(static CLIENT_2_VM: RefCell = RefCell::new(create_avm(unit_call_service(), "B"))); -fn chat_sent_message_benchmark() -> Result { - let script = String::from( - r#" +fn chat_sent_message_benchmark() -> Result { + let script = r#" (seq (call "Relay1" ("identity" "") [] $void1) (seq @@ -52,29 +39,26 @@ fn chat_sent_message_benchmark() -> Result { ) ) ) - "#, - ); + "#; - let result = CLIENT_1_VM - .with(|vm| vm.borrow_mut().call_with_prev_data("", script.clone(), "", "")) - .unwrap(); + let result = CLIENT_1_VM.with(|vm| vm.borrow_mut().call(script, "", "", "")).unwrap(); let result = RELAY_1_VM - .with(|vm| vm.borrow_mut().call_with_prev_data("", script.clone(), "", result.data)) + .with(|vm| vm.borrow_mut().call(script, "", result.data, "")) .unwrap(); let result = REMOTE_VM - .with(|vm| vm.borrow_mut().call_with_prev_data("", script.clone(), "", result.data)) + .with(|vm| vm.borrow_mut().call(script, "", result.data, "")) .unwrap(); let res_data = result.data.clone(); let res1 = RELAY_1_VM - .with(|vm| vm.borrow_mut().call_with_prev_data("", script.clone(), "", res_data)) + .with(|vm| vm.borrow_mut().call(script, "", res_data, "")) .unwrap(); CLIENT_1_VM - .with(|vm| vm.borrow_mut().call_with_prev_data("", script.clone(), "", res1.data)) + .with(|vm| vm.borrow_mut().call(script, "", res1.data, "")) .unwrap(); let res2 = RELAY_2_VM - .with(|vm| vm.borrow_mut().call_with_prev_data("", script.clone(), "", result.data)) + .with(|vm| vm.borrow_mut().call(script, "", result.data, "")) .unwrap(); - CLIENT_2_VM.with(|vm| vm.borrow_mut().call_with_prev_data("", script.clone(), "", res2.data)) + CLIENT_2_VM.with(|vm| vm.borrow_mut().call(script, "", res2.data, "")) } fn criterion_benchmark(c: &mut Criterion) { diff --git a/air/benches/create_service_benchmark.rs b/air/benches/create_service_benchmark.rs index 1820643e..18644c5c 100644 --- a/air/benches/create_service_benchmark.rs +++ b/air/benches/create_service_benchmark.rs @@ -1,11 +1,4 @@ -use air_test_utils::create_avm; -use air_test_utils::set_variables_call_service; -use air_test_utils::AVMError; -use air_test_utils::CallServiceClosure; -use air_test_utils::IValue; -use air_test_utils::InterpreterOutcome; -use air_test_utils::NEVec; -use air_test_utils::AVM; +use air_test_utils::prelude::*; use serde_json::json; @@ -15,33 +8,26 @@ use criterion::Criterion; use std::cell::RefCell; -thread_local!(static VM: RefCell = RefCell::new({ +thread_local!(static VM: RefCell = RefCell::new({ let add_module_response = String::from("add_module response"); let add_blueprint_response = String::from("add_blueprint response"); let create_response = String::from("create response"); - let call_service: CallServiceClosure = Box::new(move |_, args| -> Option { - let builtin_service = match &args[0] { - IValue::String(str) => str, - _ => unreachable!(), - }; - - let response = match builtin_service.as_str() { + let call_service: CallServiceClosure = Box::new(move |args| -> CallServiceResult { + let response = match args.service_id.as_str() { "add_module" => add_module_response.clone(), "add_blueprint" => add_blueprint_response.clone(), "create" => create_response.clone(), _ => String::from("unknown response"), }; - Some(IValue::Record( - NEVec::new(vec![IValue::S32(0), IValue::String(format!("\"{}\"", response))]).unwrap(), - )) + CallServiceResult::ok(json!(response)) }); create_avm(call_service, "A") })); -thread_local!(static SET_VARIABLES_VM: RefCell = RefCell::new({ +thread_local!(static SET_VARIABLES_VM: RefCell = RefCell::new({ let module = "greeting"; let module_config = json!( { @@ -60,17 +46,16 @@ thread_local!(static SET_VARIABLES_VM: RefCell = RefCell::new({ let blueprint = json!({ "name": "blueprint", "dependencies": [module]}); let variables_mapping = maplit::hashmap!( - String::from("module_bytes") => module_bytes.to_string(), - String::from("module_config") => module_config.to_string(), - String::from("blueprint") => blueprint.to_string(), + String::from("module_bytes") => json!(module_bytes), + String::from("module_config") => json!(module_config), + String::from("blueprint") => json!(blueprint), ); create_avm(set_variables_call_service(variables_mapping), "set_variables") })); -fn create_service_benchmark() -> Result { - let script = String::from( - r#" +fn create_service_benchmark() -> Result { + let script = r#" (seq (seq (seq @@ -89,13 +74,12 @@ fn create_service_benchmark() -> Result { ) ) ) - )"#, - ); + )"#; let result = SET_VARIABLES_VM - .with(|vm| vm.borrow_mut().call_with_prev_data("", script.clone(), "", "")) + .with(|vm| vm.borrow_mut().call(script, "", "", "")) .unwrap(); - VM.with(|vm| vm.borrow_mut().call_with_prev_data("", script, "", result.data)) + VM.with(|vm| vm.borrow_mut().call(script, "", result.data, "")) } fn criterion_benchmark(c: &mut Criterion) { diff --git a/air/src/execution_step/air/ap.rs b/air/src/execution_step/air/ap.rs index df26fb1c..9a77eaac 100644 --- a/air/src/execution_step/air/ap.rs +++ b/air/src/execution_step/air/ap.rs @@ -24,7 +24,7 @@ use super::ExecutionResult; use super::TraceHandler; use crate::execution_step::air::ResolvedCallResult; use crate::execution_step::boxed_value::Variable; -use crate::execution_step::utils::apply_json_path; +use crate::execution_step::utils::apply_lambda; use crate::trace_to_exec_err; use crate::JValue; use crate::SecurityTetraplet; @@ -33,7 +33,7 @@ use utils::*; use air_parser::ast::ApArgument; use air_parser::ast::AstVariable; -use air_parser::ast::JsonPath; +use air_parser::ast::VariableWithLambda; use air_parser::ast::{Ap, LastErrorPath}; use air_trace_handler::MergerApResult; @@ -84,7 +84,7 @@ fn save_result<'ctx>( fn should_touch_trace(ap: &Ap<'_>) -> bool { match (&ap.argument, &ap.result) { (_, AstVariable::Stream(_)) => true, - (ApArgument::JsonPath(json_path), _) => match &json_path.variable { + (ApArgument::VariableWithLambda(vl), _) => match &vl.variable { AstVariable::Scalar(_) => false, AstVariable::Stream(_) => true, }, diff --git a/air/src/execution_step/air/ap/apply_to_arguments.rs b/air/src/execution_step/air/ap/apply_to_arguments.rs index 7674355a..735f2462 100644 --- a/air/src/execution_step/air/ap/apply_to_arguments.rs +++ b/air/src/execution_step/air/ap/apply_to_arguments.rs @@ -24,7 +24,7 @@ pub(super) fn apply_to_arg( ) -> ExecutionResult { let result = match argument { ApArgument::ScalarVariable(scalar_name) => apply_scalar(scalar_name, exec_ctx, trace_ctx, should_touch_trace)?, - ApArgument::JsonPath(json_arg) => apply_json_argument(json_arg, exec_ctx, trace_ctx)?, + ApArgument::VariableWithLambda(vl) => apply_json_argument(vl, exec_ctx, trace_ctx)?, ApArgument::LastError(error_path) => apply_last_error(error_path, exec_ctx, trace_ctx)?, ApArgument::Literal(value) => apply_const(value.to_string(), exec_ctx, trace_ctx), ApArgument::Number(value) => apply_const(value, exec_ctx, trace_ctx), @@ -89,12 +89,12 @@ fn apply_last_error( } fn apply_json_argument( - json_arg: &JsonPath<'_>, + vl: &VariableWithLambda<'_>, exec_ctx: &ExecutionCtx<'_>, trace_ctx: &TraceHandler, ) -> ExecutionResult { - let variable = Variable::from_ast(&json_arg.variable); - let (jvalue, mut tetraplets) = apply_json_path(variable, json_arg.path, json_arg.should_flatten, exec_ctx)?; + let variable = Variable::from_ast(&vl.variable); + let (jvalue, mut tetraplets) = apply_lambda(variable, &vl.lambda, exec_ctx)?; let tetraplet = tetraplets .pop() diff --git a/air/src/execution_step/air/call/triplet.rs b/air/src/execution_step/air/call/triplet.rs index 7afd91de..cc9f6bf0 100644 --- a/air/src/execution_step/air/call/triplet.rs +++ b/air/src/execution_step/air/call/triplet.rs @@ -20,7 +20,7 @@ use super::ExecutionResult; use crate::exec_err; use crate::JValue; -use air_parser::ast::{CallInstrValue, FunctionPart, PeerPart}; +use air_parser::ast::{AstVariable, CallInstrValue, FunctionPart, PeerPart}; use polyplets::ResolvedTriplet; /// Triplet represents a location of the executable code in the network. @@ -87,13 +87,10 @@ fn resolve_to_string<'i>(value: &CallInstrValue<'i>, ctx: &ExecutionCtx<'i>) -> let jvalue = resolved.into_jvalue(); jvalue_to_string(jvalue)? } - CallInstrValue::JsonPath(json_path) => { - // this is checked on the parsing stage - debug_assert!(json_path.should_flatten); - - let resolved = resolve_ast_variable(&json_path.variable, ctx)?; - let resolved = resolved.apply_json_path(json_path.path)?; - vec_to_string(resolved, json_path.path)? + CallInstrValue::VariableWithLambda(vl) => { + let resolved = resolve_ast_variable(&vl.variable, ctx)?; + let resolved = resolved.apply_lambda(&vl.lambda)?; + vec_to_string(resolved, &vl.variable)? } }; @@ -109,13 +106,13 @@ fn jvalue_to_string(jvalue: JValue) -> ExecutionResult { } } -fn vec_to_string(values: Vec<&JValue>, json_path: &str) -> ExecutionResult { +fn vec_to_string(values: Vec<&JValue>, variable: &AstVariable<'_>) -> ExecutionResult { if values.is_empty() { - return exec_err!(ExecutionError::VariableNotFound(json_path.to_string())); - } - - if values.len() != 1 { - return exec_err!(ExecutionError::MultipleValuesInJsonPath(json_path.to_string())); + let variable_name = match variable { + AstVariable::Stream(name) => name, + AstVariable::Scalar(name) => name, + }; + return exec_err!(ExecutionError::VariableNotFound(variable_name.to_string())); } jvalue_to_string(values[0].clone()) diff --git a/air/src/execution_step/air/compare_matchable/comparator.rs b/air/src/execution_step/air/compare_matchable/comparator.rs index a2573b99..d4d256ab 100644 --- a/air/src/execution_step/air/compare_matchable/comparator.rs +++ b/air/src/execution_step/air/compare_matchable/comparator.rs @@ -61,17 +61,12 @@ pub(crate) fn are_matchable_eq<'ctx>( Ok(left_value == right_value) } - (JsonPath(lhs), JsonPath(rhs)) => { - // TODO: improve comparison - if lhs.should_flatten != rhs.should_flatten { - return Ok(false); - } - + (VariableWithLambda(lhs), VariableWithLambda(rhs)) => { let left_jvaluable = resolve_ast_variable(&lhs.variable, exec_ctx)?; - let left_value = left_jvaluable.apply_json_path(lhs.path)?; + let left_value = left_jvaluable.apply_lambda(&lhs.lambda)?; let right_jvaluable = resolve_ast_variable(&rhs.variable, exec_ctx)?; - let right_value = right_jvaluable.apply_json_path(rhs.path)?; + let right_value = right_jvaluable.apply_lambda(&rhs.lambda)?; Ok(left_value == right_value) } @@ -116,23 +111,14 @@ fn compare_matchable<'ctx>( let jvalue = jvaluable.as_jvalue(); Ok(comparator(jvalue)) } - JsonPath(json_path) => { - let jvaluable = resolve_ast_variable(&json_path.variable, exec_ctx)?; - let jvalues = jvaluable.apply_json_path(json_path.path)?; + VariableWithLambda(vl) => { + let jvaluable = resolve_ast_variable(&vl.variable, exec_ctx)?; + let jvalues = jvaluable.apply_lambda(&vl.lambda)?; - let jvalue = if json_path.should_flatten { - if jvalues.len() != 1 { - return Ok(false); - } - Cow::Borrowed(jvalues[0]) - } else { - let jvalue = jvalues.into_iter().cloned().collect::>(); - let jvalue = JValue::Array(jvalue); + let jvalue = jvalues.into_iter().cloned().collect::>(); + let jvalue = JValue::Array(jvalue); - Cow::Owned(jvalue) - }; - - Ok(comparator(jvalue)) + Ok(comparator(Cow::Owned(jvalue))) } } } diff --git a/air/src/execution_step/air/fold/utils.rs b/air/src/execution_step/air/fold/utils.rs index b2408e82..469b4f28 100644 --- a/air/src/execution_step/air/fold/utils.rs +++ b/air/src/execution_step/air/fold/utils.rs @@ -18,12 +18,11 @@ use super::*; use crate::exec_err; use crate::execution_step::RSecurityTetraplet; use crate::JValue; +use crate::LambdaAST; use air_parser::ast; -use jsonpath_lib::select; use std::ops::Deref; -use std::rc::Rc; // TODO: refactor this file after switching to boxed value @@ -46,11 +45,9 @@ pub(crate) fn construct_scalar_iterable_value<'ctx>( ) -> ExecutionResult { match ast_iterable { ast::IterableScalarValue::ScalarVariable(scalar_name) => create_scalar_iterable(exec_ctx, scalar_name), - ast::IterableScalarValue::JsonPath { - scalar_name, - path, - should_flatten, - } => create_scalar_json_path_iterable(exec_ctx, scalar_name, path, *should_flatten), + ast::IterableScalarValue::VariableWithLambda { scalar_name, lambda } => { + create_scalar_lambda_iterable(exec_ctx, scalar_name, lambda) + } } } @@ -124,79 +121,58 @@ fn from_call_result(call_result: ResolvedCallResult) -> ExecutionResult( +fn create_scalar_lambda_iterable<'ctx>( exec_ctx: &ExecutionCtx<'ctx>, scalar_name: &str, - json_path: &str, - should_flatten: bool, + lambda: &LambdaAST<'_>, ) -> ExecutionResult { + use crate::execution_step::lambda_applier::select; + match exec_ctx.scalars.get(scalar_name) { Some(Scalar::JValueRef(variable)) => { - let jvalues = apply_json_path(&variable.result, json_path)?; - from_jvalues(jvalues, variable.tetraplet.clone(), json_path, should_flatten) + let jvalues = select(&variable.result, lambda.iter())?; + from_jvalue(jvalues, variable.tetraplet.clone(), lambda) } Some(Scalar::JValueFoldCursor(fold_state)) => { let iterable_value = fold_state.iterable.peek().unwrap(); - let jvalues = iterable_value.apply_json_path(json_path)?; + let jvalues = iterable_value.apply_lambda(lambda)?; let tetraplet = as_tetraplet(&iterable_value); - from_jvalues(jvalues, tetraplet, json_path, should_flatten) + from_jvalue(jvalues[0], tetraplet, lambda) } _ => return exec_err!(ExecutionError::VariableNotFound(scalar_name.to_string())), } } -fn apply_json_path<'jvalue, 'str>( - jvalue: &'jvalue JValue, - json_path: &'str str, -) -> ExecutionResult> { - use ExecutionError::JValueJsonPathError; - - select(jvalue, json_path).map_err(|e| Rc::new(JValueJsonPathError(jvalue.clone(), json_path.to_string(), e))) -} - -/// Applies json_path to provided jvalues and construct IterableValue from the result and given triplet. -fn from_jvalues( - jvalues: Vec<&JValue>, +/// Construct IterableValue from the result and given triplet. +fn from_jvalue( + jvalue: &JValue, tetraplet: RSecurityTetraplet, - json_path: &str, - should_flatten: bool, + lambda: &LambdaAST<'_>, ) -> ExecutionResult { - let jvalues = construct_iterable_jvalues(jvalues, should_flatten)?; + let formatted_lambda_ast = air_lambda_ast::format_ast(lambda); + tetraplet.borrow_mut().add_lambda(&formatted_lambda_ast); - if jvalues.is_empty() { + let iterable = match jvalue { + JValue::Array(array) => array, + _ => { + return exec_err!(ExecutionError::FoldIteratesOverNonArray( + jvalue.clone(), + formatted_lambda_ast + )) + } + }; + + if iterable.is_empty() { return Ok(FoldIterableScalar::Empty); } - tetraplet.borrow_mut().add_json_path(json_path); - - let foldable = IterableJsonPathResult::init(jvalues, tetraplet); + let iterable = iterable.to_vec(); + let foldable = IterableLambdaResult::init(iterable, tetraplet); let iterable = FoldIterableScalar::Scalar(Box::new(foldable)); Ok(iterable) } -fn construct_iterable_jvalues(jvalues: Vec<&JValue>, should_flatten: bool) -> ExecutionResult> { - if !should_flatten { - let jvalues = jvalues.into_iter().cloned().collect(); - return Ok(jvalues); - } - - if jvalues.len() != 1 { - let jvalues = jvalues.into_iter().cloned().collect(); - let jvalue = JValue::Array(jvalues); - return exec_err!(ExecutionError::FlatteningError(jvalue)); - } - - match jvalues[0] { - JValue::Array(values) => Ok(values.clone()), - _ => { - let jvalues = jvalues.into_iter().cloned().collect(); - let jvalue = JValue::Array(jvalues); - exec_err!(ExecutionError::FlatteningError(jvalue)) - } - } -} - fn as_tetraplet(iterable: &IterableItem<'_>) -> RSecurityTetraplet { use IterableItem::*; diff --git a/air/src/execution_step/boxed_value/iterable.rs b/air/src/execution_step/boxed_value/iterable.rs index f93444b3..c895464d 100644 --- a/air/src/execution_step/boxed_value/iterable.rs +++ b/air/src/execution_step/boxed_value/iterable.rs @@ -19,7 +19,7 @@ mod resolved_call; mod vec_json_path_result; mod vec_resolved_call; -pub(crate) use json_path_result::IterableJsonPathResult; +pub(crate) use json_path_result::IterableLambdaResult; pub(crate) use resolved_call::IterableResolvedCall; pub(crate) use vec_resolved_call::IterableVecResolvedCall; diff --git a/air/src/execution_step/boxed_value/iterable/json_path_result.rs b/air/src/execution_step/boxed_value/iterable/json_path_result.rs index f1982ad5..9a187beb 100644 --- a/air/src/execution_step/boxed_value/iterable/json_path_result.rs +++ b/air/src/execution_step/boxed_value/iterable/json_path_result.rs @@ -21,16 +21,16 @@ use crate::foldable_next; use crate::foldable_prev; use crate::JValue; -/// Used for iterating over a result of applied to a JValue json path. +/// Used for iterating over a result of applied to a JValue lambda. #[derive(Clone, Debug, Eq, PartialEq)] -pub(crate) struct IterableJsonPathResult { +pub(crate) struct IterableLambdaResult { pub(crate) jvalues: Vec, // consider adding index for each tetraplet pub(crate) tetraplet: RSecurityTetraplet, pub(crate) cursor: usize, } -impl IterableJsonPathResult { +impl IterableLambdaResult { pub(crate) fn init(jvalues: Vec, tetraplet: RSecurityTetraplet) -> Self { Self { jvalues, @@ -40,7 +40,7 @@ impl IterableJsonPathResult { } } -impl<'ctx> Iterable<'ctx> for IterableJsonPathResult { +impl<'ctx> Iterable<'ctx> for IterableLambdaResult { type Item = IterableItem<'ctx>; fn next(&mut self) -> bool { diff --git a/air/src/execution_step/boxed_value/iterable/vec_json_path_result.rs b/air/src/execution_step/boxed_value/iterable/vec_json_path_result.rs index ef1a0d23..319c6b16 100644 --- a/air/src/execution_step/boxed_value/iterable/vec_json_path_result.rs +++ b/air/src/execution_step/boxed_value/iterable/vec_json_path_result.rs @@ -21,7 +21,7 @@ use crate::foldable_next; use crate::foldable_prev; use crate::JValue; -/// Used for iterating over a result of applied to an stream json path. +/// Used for iterating over a result of applied to a stream lambda. #[derive(Clone, Debug, Eq, PartialEq)] pub(crate) struct IterableVecJsonPathResult { pub(crate) jvalues: Vec, diff --git a/air/src/execution_step/boxed_value/jvaluable.rs b/air/src/execution_step/boxed_value/jvaluable.rs index 24d43900..27c4e5f4 100644 --- a/air/src/execution_step/boxed_value/jvaluable.rs +++ b/air/src/execution_step/boxed_value/jvaluable.rs @@ -24,8 +24,10 @@ use super::iterable::IterableItem; use super::ExecutionError; use super::ExecutionResult; use super::ResolvedCallResult; +use crate::execution_step::lambda_applier::*; use crate::execution_step::SecurityTetraplets; use crate::JValue; +use crate::LambdaAST; pub(crate) use stream::StreamJvaluableIngredients; @@ -33,11 +35,14 @@ use std::borrow::Cow; /// Represent a value that could be transform to a JValue with or without tetraplets. pub(crate) trait JValuable { - /// Applies json path to the internal value, produces JValue. - fn apply_json_path(&self, json_path: &str) -> ExecutionResult>; + /// Applies lambda to the internal value, produces JValue. + fn apply_lambda(&self, lambda: &LambdaAST<'_>) -> ExecutionResult>; - /// Applies json path to the internal value, produces JValue with tetraplet. - fn apply_json_path_with_tetraplets(&self, json_path: &str) -> ExecutionResult<(Vec<&JValue>, SecurityTetraplets)>; + /// Applies lambda to the internal value, produces JValue with tetraplet. + fn apply_lambda_with_tetraplets( + &self, + lambda: &LambdaAST<'_>, + ) -> ExecutionResult<(Vec<&JValue>, SecurityTetraplets)>; /// Return internal value as borrowed if it's possible, owned otherwise. fn as_jvalue(&self) -> Cow<'_, JValue>; diff --git a/air/src/execution_step/boxed_value/jvaluable/cell_vec_resolved_call_result.rs b/air/src/execution_step/boxed_value/jvaluable/cell_vec_resolved_call_result.rs index 20ccb98e..1a810412 100644 --- a/air/src/execution_step/boxed_value/jvaluable/cell_vec_resolved_call_result.rs +++ b/air/src/execution_step/boxed_value/jvaluable/cell_vec_resolved_call_result.rs @@ -14,45 +14,37 @@ * limitations under the License. */ -use super::ExecutionError::GenerationStreamJsonPathError; +use super::select_from_stream; use super::ExecutionResult; use super::JValuable; use super::ResolvedCallResult; use crate::execution_step::SecurityTetraplets; use crate::JValue; +use crate::LambdaAST; -use jsonpath_lib::select_with_iter; +use air_lambda_ast::format_ast; use std::borrow::Cow; use std::ops::Deref; impl JValuable for std::cell::Ref<'_, Vec> { - fn apply_json_path(&self, json_path: &str) -> ExecutionResult> { - let acc_iter = self.iter().map(|r| r.result.deref()); - let (selected_values, _) = select_with_iter(acc_iter, json_path).map_err(|e| { - GenerationStreamJsonPathError(self.iter().cloned().collect::>(), json_path.to_string(), e) - })?; - - Ok(selected_values) + fn apply_lambda(&self, lambda: &LambdaAST<'_>) -> ExecutionResult> { + let stream_iter = self.iter().map(|r| r.result.deref()); + let select_result = select_from_stream(stream_iter, lambda)?; + Ok(vec![select_result.result]) } - fn apply_json_path_with_tetraplets(&self, json_path: &str) -> ExecutionResult<(Vec<&JValue>, SecurityTetraplets)> { - let acc_iter = self.iter().map(|r| r.result.deref()); + fn apply_lambda_with_tetraplets( + &self, + lambda: &LambdaAST<'_>, + ) -> ExecutionResult<(Vec<&JValue>, SecurityTetraplets)> { + let stream_iter = self.iter().map(|r| r.result.deref()); + let select_result = select_from_stream(stream_iter, lambda)?; - let (selected_values, tetraplet_indices) = select_with_iter(acc_iter, json_path).map_err(|e| { - GenerationStreamJsonPathError(self.iter().cloned().collect::>(), json_path.to_string(), e) - })?; + let tetraplet = self[select_result.tetraplet_idx].tetraplet.clone(); + tetraplet.borrow_mut().add_lambda(&format_ast(lambda)); - let tetraplets = tetraplet_indices - .into_iter() - .map(|id| { - let tetraplet = self[id].tetraplet.clone(); - tetraplet.borrow_mut().add_json_path(json_path); - tetraplet - }) - .collect::>(); - - Ok((selected_values, tetraplets)) + Ok((vec![select_result.result], vec![tetraplet])) } fn as_jvalue(&self) -> Cow<'_, JValue> { diff --git a/air/src/execution_step/boxed_value/jvaluable/empty_stream.rs b/air/src/execution_step/boxed_value/jvaluable/empty_stream.rs index 389c2172..ddb98d63 100644 --- a/air/src/execution_step/boxed_value/jvaluable/empty_stream.rs +++ b/air/src/execution_step/boxed_value/jvaluable/empty_stream.rs @@ -17,6 +17,7 @@ use super::ExecutionError; use super::ExecutionResult; use super::JValuable; +use super::LambdaAST; use crate::exec_err; use crate::execution_step::SecurityTetraplets; use crate::JValue; @@ -24,14 +25,17 @@ use crate::JValue; use std::borrow::Cow; impl JValuable for () { - fn apply_json_path(&self, json_path: &str) -> ExecutionResult> { - // applying json path to an empty stream will produce a join behaviour - exec_err!(ExecutionError::EmptyStreamJsonPathError(json_path.to_string())) + fn apply_lambda(&self, _lambda: &LambdaAST<'_>) -> ExecutionResult> { + // applying lambda to an empty stream will produce a join behaviour + exec_err!(ExecutionError::EmptyStreamLambdaError) } - fn apply_json_path_with_tetraplets(&self, json_path: &str) -> ExecutionResult<(Vec<&JValue>, SecurityTetraplets)> { - // applying json path to an empty stream will produce a join behaviour - exec_err!(ExecutionError::EmptyStreamJsonPathError(json_path.to_string())) + fn apply_lambda_with_tetraplets( + &self, + _lambda: &LambdaAST<'_>, + ) -> ExecutionResult<(Vec<&JValue>, SecurityTetraplets)> { + // applying lambda to an empty stream will produce a join behaviour + exec_err!(ExecutionError::EmptyStreamLambdaError) } fn as_jvalue(&self) -> Cow<'_, JValue> { diff --git a/air/src/execution_step/boxed_value/jvaluable/iterable_item.rs b/air/src/execution_step/boxed_value/jvaluable/iterable_item.rs index 0af070c7..9ce25dc5 100644 --- a/air/src/execution_step/boxed_value/jvaluable/iterable_item.rs +++ b/air/src/execution_step/boxed_value/jvaluable/iterable_item.rs @@ -14,20 +14,19 @@ * limitations under the License. */ -use super::ExecutionError::JValueJsonPathError as JsonPathError; +use super::select; use super::ExecutionResult; use super::IterableItem; use super::JValuable; +use super::LambdaAST; use crate::execution_step::SecurityTetraplets; use crate::JValue; -use jsonpath_lib::select; - use std::borrow::Cow; use std::ops::Deref; impl<'ctx> JValuable for IterableItem<'ctx> { - fn apply_json_path(&self, json_path: &str) -> ExecutionResult> { + fn apply_lambda(&self, lambda: &LambdaAST<'_>) -> ExecutionResult> { use super::IterableItem::*; let jvalue = match self { @@ -36,12 +35,14 @@ impl<'ctx> JValuable for IterableItem<'ctx> { RcValue((jvalue, ..)) => jvalue.deref(), }; - let selected_jvalues = - select(jvalue, json_path).map_err(|e| JsonPathError(jvalue.clone(), String::from(json_path), e))?; - Ok(selected_jvalues) + let selected_value = select(jvalue, lambda.iter())?; + Ok(vec![selected_value]) } - fn apply_json_path_with_tetraplets(&self, json_path: &str) -> ExecutionResult<(Vec<&JValue>, SecurityTetraplets)> { + fn apply_lambda_with_tetraplets( + &self, + lambda: &LambdaAST<'_>, + ) -> ExecutionResult<(Vec<&JValue>, SecurityTetraplets)> { use super::IterableItem::*; let (jvalue, tetraplet) = match self { @@ -50,9 +51,8 @@ impl<'ctx> JValuable for IterableItem<'ctx> { RcValue((jvalue, tetraplet, _)) => (jvalue.deref(), tetraplet), }; - let selected_jvalues = - select(jvalue, json_path).map_err(|e| JsonPathError(jvalue.clone(), String::from(json_path), e))?; - Ok((selected_jvalues, vec![tetraplet.clone()])) + let selected_value = select(jvalue, lambda.iter())?; + Ok((vec![selected_value], vec![tetraplet.clone()])) } fn as_jvalue(&self) -> Cow<'_, JValue> { diff --git a/air/src/execution_step/boxed_value/jvaluable/resolved_call_result.rs b/air/src/execution_step/boxed_value/jvaluable/resolved_call_result.rs index cc4d444a..c8ba81ef 100644 --- a/air/src/execution_step/boxed_value/jvaluable/resolved_call_result.rs +++ b/air/src/execution_step/boxed_value/jvaluable/resolved_call_result.rs @@ -14,37 +14,34 @@ * limitations under the License. */ +use super::select; use super::ExecutionResult; use super::JValuable; +use super::LambdaAST; use super::ResolvedCallResult; use crate::execution_step::SecurityTetraplets; use crate::JValue; -use jsonpath_lib::select; +use air_lambda_ast::format_ast; use std::borrow::Cow; use std::ops::Deref; impl JValuable for ResolvedCallResult { - fn apply_json_path(&self, json_path: &str) -> ExecutionResult> { - use super::ExecutionError::JValueJsonPathError as JsonPathError; - - let selected_jvalues = select(&self.result, json_path) - .map_err(|e| JsonPathError(self.result.deref().clone(), String::from(json_path), e))?; - Ok(selected_jvalues) + fn apply_lambda(&self, lambda: &LambdaAST<'_>) -> ExecutionResult> { + let selected_value = select(&self.result, lambda.iter())?; + Ok(vec![selected_value]) } - fn apply_json_path_with_tetraplets(&self, json_path: &str) -> ExecutionResult<(Vec<&JValue>, SecurityTetraplets)> { - use super::ExecutionError::JValueJsonPathError as JsonPathError; - - is_json_path_allowed(&self.result)?; - let selected_jvalues = select(&self.result, json_path) - .map_err(|e| JsonPathError(self.result.deref().clone(), String::from(json_path), e))?; - + fn apply_lambda_with_tetraplets( + &self, + lambda: &LambdaAST<'_>, + ) -> ExecutionResult<(Vec<&JValue>, SecurityTetraplets)> { + let selected_value = select(&self.result, lambda.iter())?; let tetraplet = self.tetraplet.clone(); - tetraplet.borrow_mut().add_json_path(json_path); + tetraplet.borrow_mut().add_lambda(&format_ast(lambda)); - Ok((selected_jvalues, vec![tetraplet])) + Ok((vec![selected_value], vec![tetraplet])) } fn as_jvalue(&self) -> Cow<'_, JValue> { @@ -59,14 +56,3 @@ impl JValuable for ResolvedCallResult { vec![self.tetraplet.clone()] } } - -fn is_json_path_allowed(value: &JValue) -> ExecutionResult<()> { - use super::ExecutionError; - use crate::exec_err; - - match value { - JValue::Array(_) => Ok(()), - JValue::Object(_) => Ok(()), - value => exec_err!(ExecutionError::JsonPathVariableTypeError(value.clone())), - } -} diff --git a/air/src/execution_step/boxed_value/jvaluable/stream.rs b/air/src/execution_step/boxed_value/jvaluable/stream.rs index 4d07fd1a..19608438 100644 --- a/air/src/execution_step/boxed_value/jvaluable/stream.rs +++ b/air/src/execution_step/boxed_value/jvaluable/stream.rs @@ -14,7 +14,7 @@ * limitations under the License. */ -use super::ExecutionError::StreamJsonPathError; +use super::select_from_stream; use super::ExecutionResult; use super::JValuable; use crate::exec_err; @@ -22,8 +22,9 @@ use crate::execution_step::boxed_value::Generation; use crate::execution_step::boxed_value::Stream; use crate::execution_step::SecurityTetraplets; use crate::JValue; +use crate::LambdaAST; -use jsonpath_lib::select_with_iter; +use air_lambda_ast::format_ast; use std::borrow::Cow; use std::ops::Deref; @@ -37,31 +38,26 @@ pub(crate) struct StreamJvaluableIngredients<'stream> { // TODO: this will be deleted soon, because it would be impossible to use streams without // canonicalization as an arg of a call impl JValuable for StreamJvaluableIngredients<'_> { - fn apply_json_path(&self, json_path: &str) -> ExecutionResult> { + fn apply_lambda(&self, lambda: &LambdaAST<'_>) -> ExecutionResult> { let iter = self.iter()?.map(|v| v.result.deref()); + let select_result = select_from_stream(iter, lambda)?; - let (selected_values, _) = select_with_iter(iter, json_path) - .map_err(|e| StreamJsonPathError(self.stream.deref().clone(), json_path.to_string(), e))?; - - Ok(selected_values) + Ok(vec![select_result.result]) } - fn apply_json_path_with_tetraplets(&self, json_path: &str) -> ExecutionResult<(Vec<&JValue>, SecurityTetraplets)> { + fn apply_lambda_with_tetraplets( + &self, + lambda: &LambdaAST<'_>, + ) -> ExecutionResult<(Vec<&JValue>, SecurityTetraplets)> { let iter = self.iter()?.map(|v| v.result.deref()); + let select_result = select_from_stream(iter, lambda)?; - let (selected_values, tetraplet_indices) = select_with_iter(iter, json_path) - .map_err(|e| StreamJsonPathError(self.stream.deref().clone(), json_path.to_string(), e))?; + // unwrap is safe here because each value has a tetraplet and a lambda always returns a valid index + let resolved_call = self.iter()?.nth(select_result.tetraplet_idx).unwrap(); + let tetraplet = resolved_call.tetraplet.clone(); + tetraplet.borrow_mut().add_lambda(&format_ast(lambda)); - let mut tetraplets = Vec::with_capacity(tetraplet_indices.len()); - - for idx in tetraplet_indices.iter() { - let resolved_call = self.iter()?.nth(*idx).unwrap(); - let tetraplet = resolved_call.tetraplet.clone(); - tetraplet.borrow_mut().add_json_path(json_path); - tetraplets.push(tetraplet); - } - - Ok((selected_values, tetraplets)) + Ok((vec![select_result.result], vec![tetraplet])) } fn as_jvalue(&self) -> Cow<'_, JValue> { diff --git a/air/src/execution_step/errors.rs b/air/src/execution_step/errors.rs index 6b818489..364295ef 100644 --- a/air/src/execution_step/errors.rs +++ b/air/src/execution_step/errors.rs @@ -20,14 +20,13 @@ mod joinable; pub(crate) use catchable::Catchable; pub(crate) use joinable::Joinable; -use super::ResolvedCallResult; use super::Stream; +use crate::execution_step::lambda_applier::LambdaError; use crate::JValue; use air_interpreter_interface::CallResults; use air_trace_handler::MergerApResult; use air_trace_handler::TraceHandlerError; -use jsonpath_lib::JsonPathError; use strum::IntoEnumIterator; use strum_macros::EnumDiscriminants; use strum_macros::EnumIter; @@ -55,21 +54,13 @@ pub(crate) enum ExecutionError { #[error("multiple variables found for name '{0}' in data")] MultipleVariablesFound(String), - /// An error occurred while trying to apply json path to this JValue. - #[error("variable with path '{1}' not found in '{0}' with an error: '{2}'")] - JValueJsonPathError(JValue, String, JsonPathError), + /// An error occurred while trying to apply lambda to a value. + #[error(transparent)] + LambdaApplierError(#[from] LambdaError), - /// An error occurred while trying to apply json path to this stream generation with JValue's. - #[error("variable with path '{1}' not found in '{0:?}' with error: '{2}'")] - GenerationStreamJsonPathError(Vec, String, JsonPathError), - - /// An error occurred while trying to apply json path to this stream with JValue's. - #[error("variable with path '{1}' not found in '{0:?}' with error: '{2}'")] - StreamJsonPathError(Stream, String, JsonPathError), - - /// An error occurred while trying to apply json path to an empty stream. - #[error("json path {0} is applied to an empty stream")] - EmptyStreamJsonPathError(String), + /// An error occurred while trying to apply lambda to an empty stream. + #[error("lambda is applied to an empty stream")] + EmptyStreamLambdaError, /// Provided JValue has incompatible with target type. #[error("expected JValue type '{1}', but got '{0}' JValue")] @@ -79,10 +70,6 @@ pub(crate) enum ExecutionError { #[error("expected AValue type '{1}', but got '{0}' AValue")] IncompatibleAValueType(String, String), - /// Multiple values found for such json path. - #[error("multiple variables found for this json path '{0}'")] - MultipleValuesInJsonPath(String), - /// Fold state wasn't found for such iterator name. #[error("fold state not found for this iterable '{0}'")] FoldStateNotFound(String), @@ -91,6 +78,10 @@ pub(crate) enum ExecutionError { #[error("multiple fold states found for iterable '{0}'")] MultipleFoldStates(String), + /// A fold instruction must iterate over array value. + #[error("lambda '{1}' returned non-array value '{0}' for fold instruction")] + FoldIteratesOverNonArray(JValue, String), + /// Errors encountered while shadowing non-scalar values. #[error("variable with name '{0}' can't be shadowed, shadowing isn't supported for iterables")] IterableShadowing(String), @@ -103,17 +94,6 @@ pub(crate) enum ExecutionError { #[error("mismatch is used without corresponding xor")] MismatchWithoutXorError, - /// This error type is produced by a mismatch to notify xor that compared values aren't equal. - #[error("jvalue '{0}' can't be flattened, to be flattened a jvalue should have an array type and consist of zero or one values")] - FlatteningError(JValue), - - /// Json path is applied to scalar that have inappropriate type. - #[error( - "json path can't be applied to scalar '{0}',\ - it could be applied only to streams and variables of array and object types" - )] - JsonPathVariableTypeError(JValue), - /// Errors bubbled from a trace handler. #[error(transparent)] TraceError(#[from] TraceHandlerError), @@ -134,6 +114,12 @@ pub(crate) enum ExecutionError { CallResultsNotEmpty(CallResults), } +impl From for Rc { + fn from(e: LambdaError) -> Self { + Rc::new(ExecutionError::LambdaApplierError(e)) + } +} + /// This macro is needed because it's impossible to implement /// From for Rc due to the orphan rule. #[macro_export] @@ -171,12 +157,12 @@ impl Joinable for ExecutionError { log_join!(" waiting for an argument with name '{}'", var_name); true } - StreamJsonPathError(stream, json_path, _) => { - log_join!(" waiting for an argument with path '{}' on stream '{:?}'", json_path, stream); + LambdaApplierError(LambdaError::StreamNotHaveEnoughValues { stream_size, idx }) => { + log_join!(" waiting for an argument with idx '{}' on stream with size '{}'", idx, stream_size); true } - EmptyStreamJsonPathError(json_path) => { - log_join!(" waiting on empty stream for path '{}'", json_path); + EmptyStreamLambdaError => { + log_join!(" waiting on empty stream for path "); true } diff --git a/air/src/execution_step/lambda_applier/applier.rs b/air/src/execution_step/lambda_applier/applier.rs new file mode 100644 index 00000000..01a59b42 --- /dev/null +++ b/air/src/execution_step/lambda_applier/applier.rs @@ -0,0 +1,84 @@ +/* + * Copyright 2021 Fluence Labs Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +use super::utils::*; +use super::LambdaError; +use super::LambdaResult; +use crate::JValue; +use crate::LambdaAST; + +use air_lambda_parser::ValueAccessor; + +pub(crate) struct StreamSelectResult<'value> { + pub(crate) result: &'value JValue, + pub(crate) tetraplet_idx: usize, +} + +pub(crate) fn select_from_stream<'value>( + stream: impl ExactSizeIterator + 'value, + lambda: &LambdaAST<'_>, +) -> LambdaResult> { + use ValueAccessor::*; + + let (prefix, body) = lambda.split_first(); + let idx = match prefix { + ArrayAccess { idx } => *idx, + FieldAccess { field_name } => { + return Err(LambdaError::FieldAccessorAppliedToStream { + field_name: field_name.to_string(), + }) + } + _ => unreachable!("should not execute if parsing succeeded. QED."), + }; + + let stream_size = stream.len(); + let value = stream + .peekable() + .nth(idx as usize) + .ok_or(LambdaError::StreamNotHaveEnoughValues { stream_size, idx })?; + + let result = select(value, body.iter())?; + let select_result = StreamSelectResult::new(result, idx); + Ok(select_result) +} + +pub(crate) fn select<'value, 'algebra>( + mut value: &'value JValue, + lambda: impl Iterator>, +) -> LambdaResult<&'value JValue> { + for value_algebra in lambda { + match value_algebra { + ValueAccessor::ArrayAccess { idx } => { + value = try_jvalue_with_idx(value, *idx)?; + } + ValueAccessor::FieldAccess { field_name } => { + value = try_jvalue_with_field_name(value, *field_name)?; + } + ValueAccessor::Error => unreachable!("should not execute if parsing succeeded. QED."), + } + } + + Ok(value) +} + +impl<'value> StreamSelectResult<'value> { + pub(self) fn new(result: &'value JValue, tetraplet_idx: u32) -> Self { + Self { + result, + tetraplet_idx: tetraplet_idx as usize, + } + } +} diff --git a/air/src/execution_step/lambda_applier/errors.rs b/air/src/execution_step/lambda_applier/errors.rs new file mode 100644 index 00000000..0f08696f --- /dev/null +++ b/air/src/execution_step/lambda_applier/errors.rs @@ -0,0 +1,40 @@ +/* + * Copyright 2021 Fluence Labs Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +use crate::JValue; + +use thiserror::Error as ThisError; + +#[derive(Debug, Clone, ThisError)] +pub(crate) enum LambdaError { + #[error("lambda is applied to a stream that have only '{stream_size}' elements, but '{idx}' requested")] + StreamNotHaveEnoughValues { stream_size: usize, idx: u32 }, + + #[error("field algebra (with field name = '{field_name}') can't be applied to a stream")] + FieldAccessorAppliedToStream { field_name: String }, + + #[error("value '{value}' is not an array-type to match array algebra with idx = '{idx}'")] + ArrayAccessorNotMatchValue { value: JValue, idx: u32 }, + + #[error("value '{value}' does not contain element for idx = '{idx}'")] + ValueNotContainSuchArrayIdx { value: JValue, idx: u32 }, + + #[error("value '{value}' is not an map-type to match field algebra with field_name = '{field_name}'")] + FieldAccessorNotMatchValue { value: JValue, field_name: String }, + + #[error("value '{value}' does not contain element with field name = '{field_name}'")] + JValueNotContainSuchField { value: JValue, field_name: String }, +} diff --git a/air/src/execution_step/lambda_applier/mod.rs b/air/src/execution_step/lambda_applier/mod.rs new file mode 100644 index 00000000..3315803c --- /dev/null +++ b/air/src/execution_step/lambda_applier/mod.rs @@ -0,0 +1,25 @@ +/* + * Copyright 2021 Fluence Labs Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +mod applier; +mod errors; +mod utils; + +pub(crate) type LambdaResult = std::result::Result; + +pub(crate) use applier::select; +pub(crate) use applier::select_from_stream; +pub(crate) use errors::LambdaError; diff --git a/air/src/execution_step/lambda_applier/utils.rs b/air/src/execution_step/lambda_applier/utils.rs new file mode 100644 index 00000000..be1b2c61 --- /dev/null +++ b/air/src/execution_step/lambda_applier/utils.rs @@ -0,0 +1,54 @@ +/* + * Copyright 2021 Fluence Labs Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +use super::LambdaError; +use super::LambdaResult; +use crate::JValue; + +pub(super) fn try_jvalue_with_idx(jvalue: &JValue, idx: u32) -> LambdaResult<&JValue> { + match jvalue { + JValue::Array(values) => values + .get(idx as usize) + .ok_or_else(|| LambdaError::ValueNotContainSuchArrayIdx { + value: jvalue.clone(), + idx, + }), + _ => Err(LambdaError::ArrayAccessorNotMatchValue { + value: jvalue.clone(), + idx, + }), + } +} + +pub(super) fn try_jvalue_with_field_name<'value>( + jvalue: &'value JValue, + field_name: &str, +) -> LambdaResult<&'value JValue> { + match jvalue { + JValue::Object(values_map) => { + values_map + .get(field_name) + .ok_or_else(|| LambdaError::JValueNotContainSuchField { + value: jvalue.clone(), + field_name: field_name.to_string(), + }) + } + _ => Err(LambdaError::FieldAccessorNotMatchValue { + value: jvalue.clone(), + field_name: field_name.to_string(), + }), + } +} diff --git a/air/src/execution_step/mod.rs b/air/src/execution_step/mod.rs index 7682515c..0f1b653b 100644 --- a/air/src/execution_step/mod.rs +++ b/air/src/execution_step/mod.rs @@ -18,6 +18,7 @@ mod air; mod boxed_value; mod errors; pub(crate) mod execution_context; +mod lambda_applier; mod utils; pub(super) use self::air::ExecutableInstruction; diff --git a/air/src/execution_step/utils/resolve.rs b/air/src/execution_step/utils/resolve.rs index 9049a3e4..394dd9db 100644 --- a/air/src/execution_step/utils/resolve.rs +++ b/air/src/execution_step/utils/resolve.rs @@ -22,11 +22,13 @@ use crate::execution_step::execution_context::LastErrorWithTetraplet; use crate::execution_step::ExecutionError; use crate::execution_step::ExecutionResult; use crate::JValue; +use crate::LambdaAST; use crate::SecurityTetraplet; use air_parser::ast::AstVariable; use air_parser::ast::CallInstrArgValue; use air_parser::ast::LastErrorPath; + use serde_json::json; use std::cell::RefCell; use std::rc::Rc; @@ -47,9 +49,9 @@ pub(crate) fn resolve_to_args<'i>( let variable = Variable::from_ast(variable); prepare_variable(variable, ctx) } - CallInstrArgValue::JsonPath(json_path) => { - let variable = Variable::from_ast(&json_path.variable); - apply_json_path(variable, json_path.path, json_path.should_flatten, ctx) + CallInstrArgValue::VariableWithLambda(var_with_lambda) => { + let variable = Variable::from_ast(&var_with_lambda.variable); + apply_lambda(variable, &var_with_lambda.lambda, ctx) } } } @@ -126,30 +128,16 @@ pub(crate) fn resolve_ast_variable<'ctx, 'i>( resolve_variable(variable, ctx) } -pub(crate) fn apply_json_path<'i>( +pub(crate) fn apply_lambda<'i>( variable: Variable<'_>, - json_path: &str, - should_flatten: bool, + lambda: &LambdaAST<'i>, ctx: &ExecutionCtx<'i>, ) -> ExecutionResult<(JValue, SecurityTetraplets)> { let resolved = resolve_variable(variable, ctx)?; - let (jvalue, tetraplets) = resolved.apply_json_path_with_tetraplets(json_path)?; + let (jvalue, tetraplets) = resolved.apply_lambda_with_tetraplets(lambda)?; - let jvalue = if should_flatten { - match jvalue.len() { - 0 => JValue::Array(vec![]), - 1 => jvalue[0].clone(), - _ => { - let jvalue = jvalue.into_iter().cloned().collect::>(); - return crate::exec_err!(ExecutionError::FlatteningError(JValue::Array(jvalue))); - } - } - } else { - let jvalue = jvalue.into_iter().cloned().collect::>(); - JValue::Array(jvalue) - }; - - Ok((jvalue, tetraplets)) + // it's known that apply_lambda_with_tetraplets returns vec of one value + Ok((jvalue[0].clone(), tetraplets)) } /// Constructs jvaluable result from scalars by name. diff --git a/air/src/lib.rs b/air/src/lib.rs index d8a02cd9..ca0d179e 100644 --- a/air/src/lib.rs +++ b/air/src/lib.rs @@ -53,3 +53,4 @@ pub mod parser { } pub(crate) type JValue = serde_json::Value; +use air_lambda_parser::LambdaAST; diff --git a/air/tests/test_module/instructions/fold.rs b/air/tests/test_module/instructions/fold.rs index 0c5e88c6..b768daad 100644 --- a/air/tests/test_module/instructions/fold.rs +++ b/air/tests/test_module/instructions/fold.rs @@ -157,7 +157,7 @@ fn inner_fold_with_same_iterator() { let result = call_vm!(vm, "", script, "", ""); - assert_eq!(result.ret_code, 1012); + assert_eq!(result.ret_code, 1009); } #[test] @@ -239,7 +239,7 @@ fn fold_with_join() { } #[test] -fn json_path() { +fn lambda() { let mut vm = create_avm(echo_call_service(), "A"); let mut set_variable_vm = create_avm( set_variable_call_service(json!({ "array": ["1","2","3","4","5"] })), diff --git a/air/tests/test_module/instructions/match_.rs b/air/tests/test_module/instructions/match_.rs index 51567a6e..047e3f39 100644 --- a/air/tests/test_module/instructions/match_.rs +++ b/air/tests/test_module/instructions/match_.rs @@ -192,11 +192,11 @@ fn match_without_xor() { let result = call_vm!(set_variable_vm, "", &script, "", ""); let result = call_vm!(vm, "", &script, "", result.data); - assert_eq!(result.ret_code, 1014); + assert_eq!(result.ret_code, 1012); let result = call_vm!(vm, "", script, "", result.data); - assert_eq!(result.ret_code, 1014); + assert_eq!(result.ret_code, 1012); } #[test] diff --git a/air/tests/test_module/instructions/mismatch.rs b/air/tests/test_module/instructions/mismatch.rs index f59631fe..5cdbfd64 100644 --- a/air/tests/test_module/instructions/mismatch.rs +++ b/air/tests/test_module/instructions/mismatch.rs @@ -143,11 +143,11 @@ fn mismatch_without_xor() { let result = call_vm!(set_variable_vm, "asd", &script, "", ""); let result = call_vm!(vm, "asd", &script, "", result.data); - assert_eq!(result.ret_code, 1015); + assert_eq!(result.ret_code, 1013); let result = call_vm!(vm, "asd", script, "", result.data); - assert_eq!(result.ret_code, 1015); + assert_eq!(result.ret_code, 1013); } #[test] diff --git a/air/tests/test_module/integration/flattening.rs b/air/tests/test_module/integration/flattening.rs index f2945c9e..54d8479a 100644 --- a/air/tests/test_module/integration/flattening.rs +++ b/air/tests/test_module/integration/flattening.rs @@ -64,7 +64,7 @@ fn flattening_scalar_arrays() { (call "{0}" ("" "") [] scalar_array) (fold scalar_array.$.iterable! v (seq - (call v.$.peer_id! (v.$.service_id! v.$.function_name!) [v.$.args[0]! v.$.args[1]!]) + (call v.$.peer_id! (v.$.service_id! v.$.function_name!) [v.$.args.[0]! v.$.args.[1]!]) (next v) ) ) @@ -155,7 +155,7 @@ fn flattening_empty_values() { r#" (seq (call "{0}" ("" "") [] $stream) - (call "{1}" ("" "") [$stream.$.args!]) ; here $stream.$.args returns an empty array + (call "{1}" ("" "") [$stream.$.[1]!]) ; here $stream.$.[1] returns an empty array ) "#, set_variable_peer_id, local_peer_id diff --git a/air/tests/test_module/integration/join_behaviour.rs b/air/tests/test_module/integration/join_behaviour.rs index 256d9741..683e14f4 100644 --- a/air/tests/test_module/integration/join_behaviour.rs +++ b/air/tests/test_module/integration/join_behaviour.rs @@ -172,7 +172,7 @@ fn dont_wait_on_json_path_on_scalars() { assert_eq!(array_result.ret_code, 1004); assert_eq!( array_result.error_message, - r#"variable with path '$.[5]' not found in '[1,2,3,4,5]' with an error: 'json value not set'"# + r#"value '[1,2,3,4,5]' does not contain element for idx = '5'"# ); let script = format!( @@ -191,6 +191,6 @@ fn dont_wait_on_json_path_on_scalars() { assert_eq!(object_result.ret_code, 1004); assert_eq!( object_result.error_message, - r#"variable with path '$.non_exist_path' not found in '{"err_msg":"","is_authenticated":1,"ret_code":0}' with an error: 'json value not set'"# + r#"value '{"err_msg":"","is_authenticated":1,"ret_code":0}' does not contain element with field name = 'non_exist_path'"# ); } diff --git a/air/tests/test_module/integration/json_path.rs b/air/tests/test_module/integration/json_path.rs index 923cd4f2..5e79a3ce 100644 --- a/air/tests/test_module/integration/json_path.rs +++ b/air/tests/test_module/integration/json_path.rs @@ -37,5 +37,5 @@ fn json_path_not_allowed_for_non_objects_and_arrays() { let result = checked_call_vm!(set_variable_vm, "asd", &script, "", ""); let result = call_vm!(local_vm, "asd", script, "", result.data); - assert_eq!(result.ret_code, 1017); + assert_eq!(result.ret_code, 1004); } diff --git a/air/tests/test_module/integration/security_tetraplets.rs b/air/tests/test_module/integration/security_tetraplets.rs index f637cfc1..9d70e597 100644 --- a/air/tests/test_module/integration/security_tetraplets.rs +++ b/air/tests/test_module/integration/security_tetraplets.rs @@ -144,7 +144,7 @@ fn fold_json_path() { }; let first_arg_tetraplet = SecurityTetraplet { triplet: first_arg_triplet, - json_path: String::from("$.args"), + json_path: String::from(".args"), }; let second_arg_triplet = ResolvedTriplet { diff --git a/air/tests/test_module/integration/streams_early_exit.rs b/air/tests/test_module/integration/streams_early_exit.rs index 1daa1d4d..1f8e264f 100644 --- a/air/tests/test_module/integration/streams_early_exit.rs +++ b/air/tests/test_module/integration/streams_early_exit.rs @@ -140,7 +140,7 @@ fn par_early_exit() { ]; let setter_3_malicious_data = raw_data_from_trace(setter_3_malicious_trace); let init_result_3 = call_vm!(init, "", &script, init_result_2.data.clone(), setter_3_malicious_data); - assert_eq!(init_result_3.ret_code, 1018); + assert_eq!(init_result_3.ret_code, 1014); let actual_trace = trace_from_result(&init_result_3); let expected_trace = trace_from_result(&init_result_2); diff --git a/crates/air-lib/air-parser/Cargo.toml b/crates/air-lib/air-parser/Cargo.toml index 8fbaaf75..eea7b7bd 100644 --- a/crates/air-lib/air-parser/Cargo.toml +++ b/crates/air-lib/air-parser/Cargo.toml @@ -13,6 +13,8 @@ categories = ["wasm"] lalrpop = "0.19.6" [dependencies] +air-lambda-parser = { path = "../lambda/parser" } + lalrpop-util = "0.19.6" regex = "1.5.4" codespan = "0.11.1" diff --git a/crates/air-lib/air-parser/benches/parser.rs b/crates/air-lib/air-parser/benches/parser.rs index 7b1550b2..5b11bbfe 100644 --- a/crates/air-lib/air-parser/benches/parser.rs +++ b/crates/air-lib/air-parser/benches/parser.rs @@ -25,6 +25,7 @@ use criterion::Criterion; use air_parser::AIRLexer; use air_parser::AIRParser; +use air_parser::VariableValidator; const SOURCE_CODE_BAD: &'static str = r#"(seq (seq @@ -75,22 +76,6 @@ fn create_parser(c: &mut Criterion) { c.bench_function("create_parser", move |b| b.iter(move || AIRParser::new())); } -fn clone_parser(c: &mut Criterion) { - let parser = AIRParser::new(); - c.bench_function("clone_parser", move |b| { - let parser = parser.clone(); - b.iter(move || parser.clone()) - }); -} - -fn clone_parser_rc(c: &mut Criterion) { - let parser = Rc::new(AIRParser::new()); - c.bench_function("clone_parser_rc", move |b| { - let parser = parser.clone(); - b.iter(move || parser.clone()) - }); -} - fn parse(c: &mut Criterion) { let parser = Rc::new(AIRParser::new()); c.bench_function( @@ -98,11 +83,12 @@ fn parse(c: &mut Criterion) { move |b| { let parser = parser.clone(); b.iter(move || { + let mut validator = VariableValidator::new(); let lexer = AIRLexer::new(SOURCE_CODE_GOOD); parser .clone() - .parse("", &mut Vec::new(), lexer) + .parse("", &mut Vec::new(), &mut validator, lexer) .expect("success") }) }, @@ -116,8 +102,12 @@ fn parse_to_fail(c: &mut Criterion) { move |b| { let parser = parser.clone(); b.iter(move || { + let mut validator = VariableValidator::new(); let lexer = AIRLexer::new(SOURCE_CODE_BAD); - parser.clone().parse("", &mut Vec::new(), lexer) + + parser + .clone() + .parse("", &mut Vec::new(), &mut validator, lexer) }) }, ); @@ -138,11 +128,12 @@ fn parse_deep(c: &mut Criterion) { let parser = parser.clone(); let code = &source_code[*i]; b.iter(move || { + let mut validator = VariableValidator::new(); let lexer = AIRLexer::new(code); parser .clone() - .parse("", &mut Vec::new(), lexer) + .parse("", &mut Vec::new(), &mut validator, lexer) .expect("success") }); }, @@ -152,18 +143,20 @@ fn parse_deep(c: &mut Criterion) { fn parse_dashboard_script(c: &mut Criterion) { let parser = Rc::new(AIRParser::new()); - const DASHBOARD_SCRIPT: &str = include_str!("../../../air/tests/scripts/dashboard.clj"); + const DASHBOARD_SCRIPT: &str = + include_str!("../../../../air/tests/test_module/integration/scripts/dashboard.clj"); c.bench_function( format!("parse {} bytes", DASHBOARD_SCRIPT.len()).as_str(), move |b| { let parser = parser.clone(); b.iter(move || { + let mut validator = VariableValidator::new(); let lexer = AIRLexer::new(DASHBOARD_SCRIPT); parser .clone() - .parse("", &mut Vec::new(), lexer) + .parse("", &mut Vec::new(), &mut validator, lexer) .expect("success") }) }, diff --git a/crates/air-lib/air-parser/src/lib.rs b/crates/air-lib/air-parser/src/lib.rs index f255786b..e00c6a01 100644 --- a/crates/air-lib/air-parser/src/lib.rs +++ b/crates/air-lib/air-parser/src/lib.rs @@ -30,7 +30,11 @@ pub use parser::ast; pub use parser::parse; pub use parser::AIRLexer; pub use parser::AIRParser; +pub use parser::VariableValidator; #[cfg(test)] #[macro_use] extern crate fstrings; + +use air_lambda_parser::parse as parse_lambda; +use air_lambda_parser::LambdaAST; diff --git a/crates/air-lib/air-parser/src/parser/air.lalrpop b/crates/air-lib/air-parser/src/parser/air.lalrpop index fa36d8e9..15fa8673 100644 --- a/crates/air-lib/air-parser/src/parser/air.lalrpop +++ b/crates/air-lib/air-parser/src/parser/air.lalrpop @@ -1,11 +1,11 @@ use crate::parser::ast::*; -use crate::parser::air_parser::make_flattened_error; use crate::parser::air_parser::make_stream_iterable_error; use crate::parser::ParserError; use crate::parser::VariableValidator; use crate::parser::Span; use crate::parser::lexer::Token; +use air_lambda_parser::LambdaAST; use lalrpop_util::ErrorRecovery; use std::rc::Rc; @@ -26,17 +26,11 @@ Instr: Box> = { }, "(" ap ")" => { - if let ApArgument::JsonPath(json_path) = &arg { - if let AstVariable::Stream(_) = &json_path.variable { - let token = Token::VariableWithJsonPath(json_path.variable.clone(), json_path.path, json_path.should_flatten); + if let ApArgument::VariableWithLambda(vl) = &arg { + if let AstVariable::Stream(_) = &vl.variable { + let token = Token::VariableWithLambda(vl.variable.clone(), vl.lambda.clone()); errors.push(make_stream_iterable_error(left, token, right)); }; - - // Due the json path constraints json path should be flattened in a apply arguments. - if !json_path.should_flatten { - let token = Token::VariableWithJsonPath(json_path.variable.clone(), json_path.path, json_path.should_flatten); - errors.push(make_flattened_error(left, token, right)); - } } let apply = Ap::new(arg, res); @@ -124,17 +118,11 @@ CallInstrValue: CallInstrValue<'input> = { => CallInstrValue::Literal(l), => CallInstrValue::Variable(AstVariable::Scalar(a)), => CallInstrValue::Variable(AstVariable::Stream(s)), - => { - let variable = j.0; - let path = j.1; - let should_flatten = j.2; - // Due the json path constraints json path should be flattened in a call triplet. - if !should_flatten { - let token = Token::VariableWithJsonPath(variable.clone(), path, should_flatten); - errors.push(make_flattened_error(left, token, right)); - } + => { + let variable = vl.0; + let lambda = vl.1; - CallInstrValue::JsonPath(JsonPath::new(variable, path, should_flatten)) + CallInstrValue::VariableWithLambda(VariableWithLambda::new(variable, lambda)) }, InitPeerId => CallInstrValue::InitPeerId, } @@ -145,7 +133,7 @@ CallInstrArgValue: CallInstrArgValue<'input> = { => CallInstrArgValue::Literal(s), => CallInstrArgValue::Variable(AstVariable::Scalar(v)), => CallInstrArgValue::Variable(AstVariable::Stream(v)), - => CallInstrArgValue::JsonPath(JsonPath::new(j.0, j.1, j.2)), + => CallInstrArgValue::VariableWithLambda(VariableWithLambda::new(vl.0, vl.1)), => CallInstrArgValue::Number(n), => CallInstrArgValue::Boolean(b), InitPeerId => CallInstrArgValue::InitPeerId, @@ -155,7 +143,7 @@ CallInstrArgValue: CallInstrArgValue<'input> = { ApArgument: ApArgument<'input> = { => ApArgument::ScalarVariable(a), - => ApArgument::JsonPath(JsonPath::new(j.0, j.1, j.2)), + => ApArgument::VariableWithLambda(VariableWithLambda::new(vl.0, vl.1)), => ApArgument::Number(n), => ApArgument::Boolean(b), EmptyArray => ApArgument::EmptyArray, @@ -165,22 +153,21 @@ ApArgument: ApArgument<'input> = { ScalarIterable: IterableScalarValue<'input> = { => IterableScalarValue::ScalarVariable(v), - => { + => { use crate::parser::air::AstVariable::*; - let variable = j.0; - let path = j.1; - let should_flatten = j.2; + let variable = vl.0; + let lambda = vl.1; let scalar_name = match variable { Stream(name) => { - let token = Token::VariableWithJsonPath(variable, path, should_flatten); + let token = Token::VariableWithLambda(variable, lambda.clone()); errors.push(make_stream_iterable_error(l, token, r)); name } Scalar(name) => name, }; - IterableScalarValue::JsonPath { scalar_name, path, should_flatten } + IterableScalarValue::VariableWithLambda { scalar_name, lambda } } } @@ -192,7 +179,7 @@ Matchable: MatchableValue<'input> = { => MatchableValue::Boolean(b), => MatchableValue::Number(n), EmptyArray => MatchableValue::EmptyArray, - => MatchableValue::JsonPath(JsonPath::new(j.0, j.1, j.2)), + => MatchableValue::VariableWithLambda(VariableWithLambda::new(vl.0, vl.1)), } extern { @@ -209,7 +196,7 @@ extern { Literal => Token::StringLiteral(<&'input str>), Alphanumeric => Token::Alphanumeric(<&'input str>), Stream => Token::Stream(<&'input str>), - JsonPath => Token::VariableWithJsonPath(>, <&'input str>, ), + VariableWithLambda => Token::VariableWithLambda(>, >), Number => Token::Number(), Boolean => Token::Boolean(), diff --git a/crates/air-lib/air-parser/src/parser/air.rs b/crates/air-lib/air-parser/src/parser/air.rs index 44527ee4..61bd17c4 100644 --- a/crates/air-lib/air-parser/src/parser/air.rs +++ b/crates/air-lib/air-parser/src/parser/air.rs @@ -1,12 +1,12 @@ // auto-generated: "lalrpop 0.19.6" -// sha3: bcd19a478f6a51d6d816bf63f4b9bd31c8eb721de974cf77626c92ea437d66 +// sha3: dce2765d82d72fa2e95c6d8a8a3bd84838971ba5ae153715dd724be731cecdba use crate::parser::ast::*; -use crate::parser::air_parser::make_flattened_error; use crate::parser::air_parser::make_stream_iterable_error; use crate::parser::ParserError; use crate::parser::VariableValidator; use crate::parser::Span; use crate::parser::lexer::Token; +use air_lambda_parser::LambdaAST; use lalrpop_util::ErrorRecovery; use std::rc::Rc; #[allow(unused_extern_crates)] @@ -21,12 +21,12 @@ mod __parse__AIR { #![allow(non_snake_case, non_camel_case_types, unused_mut, unused_variables, unused_imports, unused_parens)] use crate::parser::ast::*; - use crate::parser::air_parser::make_flattened_error; use crate::parser::air_parser::make_stream_iterable_error; use crate::parser::ParserError; use crate::parser::VariableValidator; use crate::parser::Span; use crate::parser::lexer::Token; + use air_lambda_parser::LambdaAST; use lalrpop_util::ErrorRecovery; use std::rc::Rc; #[allow(unused_extern_crates)] @@ -42,9 +42,9 @@ mod __parse__AIR { Variant0(Token<'input>), Variant1(&'input str), Variant2(bool), - Variant3((AstVariable<'input>, &'input str, bool)), - Variant4(LastErrorPath), - Variant5(Number), + Variant3(LastErrorPath), + Variant4(Number), + Variant5((AstVariable<'input>, LambdaAST<'input>)), Variant6(__lalrpop_util::ErrorRecovery, ParserError>), Variant7(CallInstrArgValue<'input>), Variant8(alloc::vec::Vec>), @@ -64,15 +64,15 @@ mod __parse__AIR { // State 0 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32, // State 1 - 0, 0, 0, 0, 35, 36, 37, 0, 38, 39, 40, 41, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 35, 36, 37, 0, 38, 39, 40, 0, 41, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 2 - 12, 0, 0, 0, 44, 0, 0, 45, 46, 0, 47, 0, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12, 0, 0, 0, 44, 0, 0, 45, 0, 46, 0, 47, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 3 - 0, 0, 0, 0, 50, 0, 0, 0, 51, 0, 0, 0, 52, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 50, 0, 0, 0, 0, 0, 0, 51, 52, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 4 - 0, 0, 0, 0, 53, 54, 55, 56, 57, 0, 58, 59, 60, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 53, 54, 55, 56, 0, 57, 58, 59, 60, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 5 - 0, 0, 0, 0, 53, 54, 55, 56, 57, 0, 58, 59, 60, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 53, 54, 55, 56, 0, 57, 58, 59, 60, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 6 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32, // State 7 @@ -80,15 +80,15 @@ mod __parse__AIR { // State 8 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32, // State 9 - 0, 0, 0, 0, 64, 0, 0, 0, 0, 0, 0, 0, 65, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 64, 0, 0, 0, 0, 0, 0, 65, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 10 - 19, 0, 0, 0, 44, 0, 0, 45, 46, 0, 47, 0, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 19, 0, 0, 0, 44, 0, 0, 45, 0, 46, 0, 47, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 11 - 0, 0, 0, 0, 44, 0, 0, 45, 46, 0, 47, 0, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 44, 0, 0, 45, 0, 46, 0, 47, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 12 - 0, 0, 0, 0, 53, 54, 55, 56, 57, 0, 58, 59, 60, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 53, 54, 55, 56, 0, 57, 58, 59, 60, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 13 - 0, 0, 0, 0, 53, 54, 55, 56, 57, 0, 58, 59, 60, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 53, 54, 55, 56, 0, 57, 58, 59, 60, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 14 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32, // State 15 @@ -98,9 +98,9 @@ mod __parse__AIR { // State 17 0, 0, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 18 - 0, 0, 0, 0, 44, 0, 0, 45, 46, 0, 47, 0, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 44, 0, 0, 45, 0, 46, 0, 47, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 19 - 0, 0, 0, 0, 44, 0, 0, 45, 46, 0, 47, 0, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 44, 0, 0, 45, 0, 46, 0, 47, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 20 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32, // State 21 @@ -110,11 +110,11 @@ mod __parse__AIR { // State 23 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32, // State 24 - 0, 83, 0, 0, 64, 0, 0, 0, 0, 0, 0, 0, 65, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 83, 0, 0, 64, 0, 0, 0, 0, 0, 0, 65, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 25 0, 0, 0, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 26 - 0, 0, 0, 0, 44, 0, 0, 45, 46, 0, 47, 0, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 44, 0, 0, 45, 0, 46, 0, 47, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 27 0, 0, 0, 104, 87, 88, 89, 90, 91, 92, 93, 94, 95, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 28 @@ -130,57 +130,57 @@ mod __parse__AIR { // State 33 0, 62, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 34 - 0, 0, 0, 0, -9, 0, 0, 0, 0, 0, 0, 0, -9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, -9, 0, 0, 0, 0, 0, 0, -9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 35 - 0, 0, 0, 0, -12, 0, 0, 0, 0, 0, 0, 0, -12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, -12, 0, 0, 0, 0, 0, 0, -12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 36 - 0, 0, 0, 0, -13, 0, 0, 0, 0, 0, 0, 0, -13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, -13, 0, 0, 0, 0, 0, 0, -13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 37 - 0, 0, 0, 0, -10, 0, 0, 0, 0, 0, 0, 0, -10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, -15, 0, 0, 0, 0, 0, 0, -15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 38 - 0, 0, 0, 0, -15, 0, 0, 0, 0, 0, 0, 0, -15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, -14, 0, 0, 0, 0, 0, 0, -14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 39 - 0, 0, 0, 0, -14, 0, 0, 0, 0, 0, 0, 0, -14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, -11, 0, 0, 0, 0, 0, 0, -11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 40 - 0, 0, 0, 0, -11, 0, 0, 0, 0, 0, 0, 0, -11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, -10, 0, 0, 0, 0, 0, 0, -10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 41 - -61, 0, 0, 0, -61, 0, 0, -61, -61, 0, -61, 0, -61, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + -61, 0, 0, 0, -61, 0, 0, -61, 0, -61, 0, -61, -61, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 42 - -62, 0, 0, 0, -62, 0, 0, -62, -62, 0, -62, 0, -62, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + -62, 0, 0, 0, -62, 0, 0, -62, 0, -62, 0, -62, -62, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 43 - -29, -29, -29, 0, -29, 0, 0, -29, -29, 0, -29, 0, -29, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + -29, -29, -29, 0, -29, 0, 0, -29, 0, -29, 0, -29, -29, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 44 - -32, -32, -32, 0, -32, 0, 0, -32, -32, 0, -32, 0, -32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + -32, -32, -32, 0, -32, 0, 0, -32, 0, -32, 0, -32, -32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 45 - -31, -31, -31, 0, -31, 0, 0, -31, -31, 0, -31, 0, -31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + -28, -28, -28, 0, -28, 0, 0, -28, 0, -28, 0, -28, -28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 46 - -28, -28, -28, 0, -28, 0, 0, -28, -28, 0, -28, 0, -28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + -30, -30, -30, 0, -30, 0, 0, -30, 0, -30, 0, -30, -30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 47 - -30, -30, -30, 0, -30, 0, 0, -30, -30, 0, -30, 0, -30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + -31, -31, -31, 0, -31, 0, 0, -31, 0, -31, 0, -31, -31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 48 0, 0, 0, 0, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 49 0, 0, 0, 0, -64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 50 - 0, 0, 0, 0, -65, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - // State 51 0, 0, 0, 0, 22, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 51 + 0, 0, 0, 0, -65, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 52 - -50, 0, 0, 0, -50, -50, -50, -50, -50, 0, -50, -50, -50, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -50, + -50, 0, 0, 0, -50, -50, -50, -50, 0, -50, -50, -50, -50, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -50, // State 53 - -53, 0, 0, 0, -53, -53, -53, -53, -53, 0, -53, -53, -53, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -53, + -53, 0, 0, 0, -53, -53, -53, -53, 0, -53, -53, -53, -53, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -53, // State 54 - -55, 0, 0, 0, -55, -55, -55, -55, -55, 0, -55, -55, -55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -55, + -55, 0, 0, 0, -55, -55, -55, -55, 0, -55, -55, -55, -55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -55, // State 55 - -49, 0, 0, 0, -49, -49, -49, -49, -49, 0, -49, -49, -49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -49, + -49, 0, 0, 0, -49, -49, -49, -49, 0, -49, -49, -49, -49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -49, // State 56 - -56, 0, 0, 0, -56, -56, -56, -56, -56, 0, -56, -56, -56, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -56, + -52, 0, 0, 0, -52, -52, -52, -52, 0, -52, -52, -52, -52, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -52, // State 57 - -52, 0, 0, 0, -52, -52, -52, -52, -52, 0, -52, -52, -52, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -52, + -54, 0, 0, 0, -54, -54, -54, -54, 0, -54, -54, -54, -54, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -54, // State 58 - -54, 0, 0, 0, -54, -54, -54, -54, -54, 0, -54, -54, -54, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -54, + -51, 0, 0, 0, -51, -51, -51, -51, 0, -51, -51, -51, -51, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -51, // State 59 - -51, 0, 0, 0, -51, -51, -51, -51, -51, 0, -51, -51, -51, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -51, + -56, 0, 0, 0, -56, -56, -56, -56, 0, -56, -56, -56, -56, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -56, // State 60 0, 68, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 61 @@ -206,7 +206,7 @@ mod __parse__AIR { // State 71 -38, -38, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -38, // State 72 - 0, -66, 0, 0, -66, 0, 0, -66, -66, 0, -66, 0, -66, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, -66, 0, 0, -66, 0, 0, -66, 0, -66, 0, -66, -66, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 73 0, 97, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 74 @@ -232,7 +232,7 @@ mod __parse__AIR { // State 84 0, 0, 0, -16, -16, -16, -16, -16, -16, -16, -16, -16, -16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 85 - 0, -17, 0, 0, -17, 0, 0, 0, 0, 0, 0, 0, -17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, -17, 0, 0, -17, 0, 0, 0, 0, 0, 0, -17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 86 0, 0, 0, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 87 @@ -242,19 +242,19 @@ mod __parse__AIR { // State 89 0, 0, 0, -25, -25, -25, -25, -25, -25, -25, -25, -25, -25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 90 - 0, 0, 0, -22, -22, -22, -22, -22, -22, -22, -22, -22, -22, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - // State 91 0, 0, 0, -27, -27, -27, -27, -27, -27, -27, -27, -27, -27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - // State 92 + // State 91 0, 0, 0, -19, -19, -19, -19, -19, -19, -19, -19, -19, -19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - // State 93 + // State 92 0, 0, 0, -23, -23, -23, -23, -23, -23, -23, -23, -23, -23, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - // State 94 + // State 93 0, 0, 0, -21, -21, -21, -21, -21, -21, -21, -21, -21, -21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 94 + 0, 0, 0, -22, -22, -22, -22, -22, -22, -22, -22, -22, -22, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 95 0, 105, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 96 - -63, 0, 0, 0, -63, 0, 0, -63, -63, 0, -63, 0, -63, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + -63, 0, 0, 0, -63, 0, 0, -63, 0, -63, 0, -63, -63, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 97 -42, -42, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -42, // State 98 @@ -268,7 +268,7 @@ mod __parse__AIR { // State 102 0, 0, 0, -5, -5, -5, -5, -5, -5, -5, -5, -5, -5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 103 - 0, -18, 0, 0, -18, 0, 0, 0, 0, 0, 0, 0, -18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, -18, 0, 0, -18, 0, 0, 0, 0, 0, 0, -18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // State 104 0, 0, -34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ]; @@ -554,11 +554,11 @@ mod __parse__AIR { r###"Boolean"###, r###"EmptyArray"###, r###"InitPeerId"###, - r###"JsonPath"###, r###"LastError"###, r###"Literal"###, r###"Number"###, r###"Stream"###, + r###"VariableWithLambda"###, r###"ap"###, r###"call"###, r###"fold"###, @@ -698,11 +698,11 @@ mod __parse__AIR { Token::Boolean(_) if true => Some(5), Token::SquareBrackets if true => Some(6), Token::InitPeerId if true => Some(7), - Token::VariableWithJsonPath(_, _, _) if true => Some(8), - Token::LastError(_) if true => Some(9), - Token::StringLiteral(_) if true => Some(10), - Token::Number(_) if true => Some(11), - Token::Stream(_) if true => Some(12), + Token::LastError(_) if true => Some(8), + Token::StringLiteral(_) if true => Some(9), + Token::Number(_) if true => Some(10), + Token::Stream(_) if true => Some(11), + Token::VariableWithLambda(_, _) if true => Some(12), Token::Ap if true => Some(13), Token::Call if true => Some(14), Token::Fold if true => Some(15), @@ -728,7 +728,7 @@ mod __parse__AIR { { match __token_index { 0 | 1 | 2 | 3 | 6 | 7 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 => __Symbol::Variant0(__token), - 4 | 10 | 12 => match __token { + 4 | 9 | 11 => match __token { Token::Alphanumeric(__tok0) | Token::StringLiteral(__tok0) | Token::Stream(__tok0) if true => __Symbol::Variant1(__tok0), _ => unreachable!(), }, @@ -737,15 +737,15 @@ mod __parse__AIR { _ => unreachable!(), }, 8 => match __token { - Token::VariableWithJsonPath(__tok0, __tok1, __tok2) if true => __Symbol::Variant3((__tok0, __tok1, __tok2)), + Token::LastError(__tok0) if true => __Symbol::Variant3(__tok0), _ => unreachable!(), }, - 9 => match __token { - Token::LastError(__tok0) if true => __Symbol::Variant4(__tok0), + 10 => match __token { + Token::Number(__tok0) if true => __Symbol::Variant4(__tok0), _ => unreachable!(), }, - 11 => match __token { - Token::Number(__tok0) if true => __Symbol::Variant5(__tok0), + 12 => match __token { + Token::VariableWithLambda(__tok0, __tok1) if true => __Symbol::Variant5((__tok0, __tok1)), _ => unreachable!(), }, _ => unreachable!(), @@ -1476,14 +1476,14 @@ mod __parse__AIR { fn __symbol_type_mismatch() -> ! { panic!("symbol type mismatch") } - fn __pop_Variant3< + fn __pop_Variant5< 'input, >( __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)> - ) -> (usize, (AstVariable<'input>, &'input str, bool), usize) + ) -> (usize, (AstVariable<'input>, LambdaAST<'input>), usize) { match __symbols.pop() { - Some((__l, __Symbol::Variant3(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant5(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } @@ -1564,14 +1564,14 @@ mod __parse__AIR { _ => __symbol_type_mismatch() } } - fn __pop_Variant4< + fn __pop_Variant3< 'input, >( __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)> ) -> (usize, LastErrorPath, usize) { match __symbols.pop() { - Some((__l, __Symbol::Variant4(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant3(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } @@ -1586,14 +1586,14 @@ mod __parse__AIR { _ => __symbol_type_mismatch() } } - fn __pop_Variant5< + fn __pop_Variant4< 'input, >( __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)> ) -> (usize, Number, usize) { match __symbols.pop() { - Some((__l, __Symbol::Variant5(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant4(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } @@ -1897,8 +1897,8 @@ mod __parse__AIR { _: core::marker::PhantomData<(&'err (), &'input (), &'v ())>, ) -> (usize, usize) { - // ApArgument = JsonPath => ActionFn(40); - let __sym0 = __pop_Variant3(__symbols); + // ApArgument = VariableWithLambda => ActionFn(40); + let __sym0 = __pop_Variant5(__symbols); let __start = __sym0.0.clone(); let __end = __sym0.2.clone(); let __nt = super::__action40::<>(input, errors, validator, __sym0); @@ -1919,7 +1919,7 @@ mod __parse__AIR { ) -> (usize, usize) { // ApArgument = Number => ActionFn(41); - let __sym0 = __pop_Variant5(__symbols); + let __sym0 = __pop_Variant4(__symbols); let __start = __sym0.0.clone(); let __end = __sym0.2.clone(); let __nt = super::__action41::<>(input, errors, validator, __sym0); @@ -2003,7 +2003,7 @@ mod __parse__AIR { ) -> (usize, usize) { // ApArgument = LastError => ActionFn(45); - let __sym0 = __pop_Variant4(__symbols); + let __sym0 = __pop_Variant3(__symbols); let __start = __sym0.0.clone(); let __end = __sym0.2.clone(); let __nt = super::__action45::<>(input, errors, validator, __sym0); @@ -2154,8 +2154,8 @@ mod __parse__AIR { _: core::marker::PhantomData<(&'err (), &'input (), &'v ())>, ) -> (usize, usize) { - // CallInstrArgValue = JsonPath => ActionFn(33); - let __sym0 = __pop_Variant3(__symbols); + // CallInstrArgValue = VariableWithLambda => ActionFn(33); + let __sym0 = __pop_Variant5(__symbols); let __start = __sym0.0.clone(); let __end = __sym0.2.clone(); let __nt = super::__action33::<>(input, errors, validator, __sym0); @@ -2176,7 +2176,7 @@ mod __parse__AIR { ) -> (usize, usize) { // CallInstrArgValue = Number => ActionFn(34); - let __sym0 = __pop_Variant5(__symbols); + let __sym0 = __pop_Variant4(__symbols); let __start = __sym0.0.clone(); let __end = __sym0.2.clone(); let __nt = super::__action34::<>(input, errors, validator, __sym0); @@ -2260,7 +2260,7 @@ mod __parse__AIR { ) -> (usize, usize) { // CallInstrArgValue = LastError => ActionFn(38); - let __sym0 = __pop_Variant4(__symbols); + let __sym0 = __pop_Variant3(__symbols); let __start = __sym0.0.clone(); let __end = __sym0.2.clone(); let __nt = super::__action38::<>(input, errors, validator, __sym0); @@ -2343,8 +2343,8 @@ mod __parse__AIR { _: core::marker::PhantomData<(&'err (), &'input (), &'v ())>, ) -> (usize, usize) { - // CallInstrValue = JsonPath => ActionFn(78); - let __sym0 = __pop_Variant3(__symbols); + // CallInstrValue = VariableWithLambda => ActionFn(78); + let __sym0 = __pop_Variant5(__symbols); let __start = __sym0.0.clone(); let __end = __sym0.2.clone(); let __nt = super::__action78::<>(input, errors, validator, __sym0); @@ -2895,7 +2895,7 @@ mod __parse__AIR { ) -> (usize, usize) { // Matchable = Number => ActionFn(53); - let __sym0 = __pop_Variant5(__symbols); + let __sym0 = __pop_Variant4(__symbols); let __start = __sym0.0.clone(); let __end = __sym0.2.clone(); let __nt = super::__action53::<>(input, errors, validator, __sym0); @@ -2936,8 +2936,8 @@ mod __parse__AIR { _: core::marker::PhantomData<(&'err (), &'input (), &'v ())>, ) -> (usize, usize) { - // Matchable = JsonPath => ActionFn(55); - let __sym0 = __pop_Variant3(__symbols); + // Matchable = VariableWithLambda => ActionFn(55); + let __sym0 = __pop_Variant5(__symbols); let __start = __sym0.0.clone(); let __end = __sym0.2.clone(); let __nt = super::__action55::<>(input, errors, validator, __sym0); @@ -3128,8 +3128,8 @@ mod __parse__AIR { _: core::marker::PhantomData<(&'err (), &'input (), &'v ())>, ) -> (usize, usize) { - // ScalarIterable = JsonPath => ActionFn(86); - let __sym0 = __pop_Variant3(__symbols); + // ScalarIterable = VariableWithLambda => ActionFn(86); + let __sym0 = __pop_Variant5(__symbols); let __start = __sym0.0.clone(); let __end = __sym0.2.clone(); let __nt = super::__action86::<>(input, errors, validator, __sym0); @@ -3240,17 +3240,11 @@ fn __action3< ) -> Box> { { - if let ApArgument::JsonPath(json_path) = &arg { - if let AstVariable::Stream(_) = &json_path.variable { - let token = Token::VariableWithJsonPath(json_path.variable.clone(), json_path.path, json_path.should_flatten); + if let ApArgument::VariableWithLambda(vl) = &arg { + if let AstVariable::Stream(_) = &vl.variable { + let token = Token::VariableWithLambda(vl.variable.clone(), vl.lambda.clone()); errors.push(make_stream_iterable_error(left, token, right)); }; - - // Due the json path constraints json path should be flattened in a apply arguments. - if !json_path.should_flatten { - let token = Token::VariableWithJsonPath(json_path.variable.clone(), json_path.path, json_path.should_flatten); - errors.push(make_flattened_error(left, token, right)); - } } let apply = Ap::new(arg, res); @@ -3703,21 +3697,15 @@ fn __action27< errors: &'err mut Vec, ParserError>>, validator: &'v mut VariableValidator<'input>, (_, left, _): (usize, usize, usize), - (_, j, _): (usize, (AstVariable<'input>, &'input str, bool), usize), + (_, vl, _): (usize, (AstVariable<'input>, LambdaAST<'input>), usize), (_, right, _): (usize, usize, usize), ) -> CallInstrValue<'input> { { - let variable = j.0; - let path = j.1; - let should_flatten = j.2; - // Due the json path constraints json path should be flattened in a call triplet. - if !should_flatten { - let token = Token::VariableWithJsonPath(variable.clone(), path, should_flatten); - errors.push(make_flattened_error(left, token, right)); - } + let variable = vl.0; + let lambda = vl.1; - CallInstrValue::JsonPath(JsonPath::new(variable, path, should_flatten)) + CallInstrValue::VariableWithLambda(VariableWithLambda::new(variable, lambda)) } } @@ -3805,10 +3793,10 @@ fn __action33< input: &'input str, errors: &'err mut Vec, ParserError>>, validator: &'v mut VariableValidator<'input>, - (_, j, _): (usize, (AstVariable<'input>, &'input str, bool), usize), + (_, vl, _): (usize, (AstVariable<'input>, LambdaAST<'input>), usize), ) -> CallInstrArgValue<'input> { - CallInstrArgValue::JsonPath(JsonPath::new(j.0, j.1, j.2)) + CallInstrArgValue::VariableWithLambda(VariableWithLambda::new(vl.0, vl.1)) } #[allow(unused_variables)] @@ -3910,10 +3898,10 @@ fn __action40< input: &'input str, errors: &'err mut Vec, ParserError>>, validator: &'v mut VariableValidator<'input>, - (_, j, _): (usize, (AstVariable<'input>, &'input str, bool), usize), + (_, vl, _): (usize, (AstVariable<'input>, LambdaAST<'input>), usize), ) -> ApArgument<'input> { - ApArgument::JsonPath(JsonPath::new(j.0, j.1, j.2)) + ApArgument::VariableWithLambda(VariableWithLambda::new(vl.0, vl.1)) } #[allow(unused_variables)] @@ -4016,26 +4004,25 @@ fn __action47< errors: &'err mut Vec, ParserError>>, validator: &'v mut VariableValidator<'input>, (_, l, _): (usize, usize, usize), - (_, j, _): (usize, (AstVariable<'input>, &'input str, bool), usize), + (_, vl, _): (usize, (AstVariable<'input>, LambdaAST<'input>), usize), (_, r, _): (usize, usize, usize), ) -> IterableScalarValue<'input> { { use crate::parser::air::AstVariable::*; - let variable = j.0; - let path = j.1; - let should_flatten = j.2; + let variable = vl.0; + let lambda = vl.1; let scalar_name = match variable { Stream(name) => { - let token = Token::VariableWithJsonPath(variable, path, should_flatten); + let token = Token::VariableWithLambda(variable, lambda.clone()); errors.push(make_stream_iterable_error(l, token, r)); name } Scalar(name) => name, }; - IterableScalarValue::JsonPath { scalar_name, path, should_flatten } + IterableScalarValue::VariableWithLambda { scalar_name, lambda } } } @@ -4153,10 +4140,10 @@ fn __action55< input: &'input str, errors: &'err mut Vec, ParserError>>, validator: &'v mut VariableValidator<'input>, - (_, j, _): (usize, (AstVariable<'input>, &'input str, bool), usize), + (_, vl, _): (usize, (AstVariable<'input>, LambdaAST<'input>), usize), ) -> MatchableValue<'input> { - MatchableValue::JsonPath(JsonPath::new(j.0, j.1, j.2)) + MatchableValue::VariableWithLambda(VariableWithLambda::new(vl.0, vl.1)) } #[allow(unused_variables)] @@ -4434,7 +4421,7 @@ fn __action69< input: &'input str, errors: &'err mut Vec, ParserError>>, validator: &'v mut VariableValidator<'input>, - __0: (usize, (AstVariable<'input>, &'input str, bool), usize), + __0: (usize, (AstVariable<'input>, LambdaAST<'input>), usize), __1: (usize, usize, usize), ) -> CallInstrValue<'input> { @@ -4764,7 +4751,7 @@ fn __action77< input: &'input str, errors: &'err mut Vec, ParserError>>, validator: &'v mut VariableValidator<'input>, - __0: (usize, (AstVariable<'input>, &'input str, bool), usize), + __0: (usize, (AstVariable<'input>, LambdaAST<'input>), usize), __1: (usize, usize, usize), ) -> IterableScalarValue<'input> { @@ -4797,7 +4784,7 @@ fn __action78< input: &'input str, errors: &'err mut Vec, ParserError>>, validator: &'v mut VariableValidator<'input>, - __0: (usize, (AstVariable<'input>, &'input str, bool), usize), + __0: (usize, (AstVariable<'input>, LambdaAST<'input>), usize), ) -> CallInstrValue<'input> { let __start0 = __0.2.clone(); @@ -5111,7 +5098,7 @@ fn __action86< input: &'input str, errors: &'err mut Vec, ParserError>>, validator: &'v mut VariableValidator<'input>, - __0: (usize, (AstVariable<'input>, &'input str, bool), usize), + __0: (usize, (AstVariable<'input>, LambdaAST<'input>), usize), ) -> IterableScalarValue<'input> { let __start0 = __0.2.clone(); diff --git a/crates/air-lib/air-parser/src/parser/air_parser.rs b/crates/air-lib/air-parser/src/parser/air_parser.rs index 923f3483..16c062d2 100644 --- a/crates/air-lib/air-parser/src/parser/air_parser.rs +++ b/crates/air-lib/air-parser/src/parser/air_parser.rs @@ -51,11 +51,11 @@ pub fn parse(air_script: &str) -> Result>, String> { match result { Ok(r) if errors.is_empty() => Ok(r), Ok(_) => Err(report_errors(file_id, files, errors)), - Err(err) => Err(report_errors( + Err(error) => Err(report_errors( file_id, files, vec![ErrorRecovery { - error: err, + error, dropped_tokens: vec![], }], )), @@ -124,10 +124,7 @@ fn parser_error_to_label(file_id: usize, error: ParserError) -> Label { match error { LexerError(error) => lexical_error_to_label(file_id, error), - CallArgsNotFlattened(start, end) => { - Label::primary(file_id, start..end).with_message(error.to_string()) - } - JsonPathAppliedToStream(start, end) => { + LambdaAppliedToStream(start, end) => { Label::primary(file_id, start..end).with_message(error.to_string()) } UndefinedIterable(start, end, _) => { @@ -157,7 +154,7 @@ fn lexical_error_to_label(file_id: usize, error: LexerError) -> Label { EmptyVariableOrConst(start, end) => { Label::primary(file_id, start..end).with_message(error.to_string()) } - InvalidJsonPath(start, end) => { + InvalidLambda(start, end) => { Label::primary(file_id, start..end).with_message(error.to_string()) } UnallowedCharInNumber(start, end) => { @@ -169,6 +166,9 @@ fn lexical_error_to_label(file_id: usize, error: LexerError) -> Label { ParseFloatError(start, end, _) => { Label::primary(file_id, start..end).with_message(error.to_string()) } + LambdaParserError(start, end, _) => { + Label::primary(file_id, start..end).with_message(error.to_string()) + } LastErrorPathError(start, end, _) => { Label::primary(file_id, start..end).with_message(error.to_string()) } @@ -195,18 +195,10 @@ macro_rules! make_user_error( }} ); -pub(super) fn make_flattened_error( - start_pos: usize, - token: Token<'_>, - end_pos: usize, -) -> ErrorRecovery, ParserError> { - make_user_error!(CallArgsNotFlattened, start_pos, token, end_pos) -} - pub(super) fn make_stream_iterable_error( start_pos: usize, token: Token<'_>, end_pos: usize, ) -> ErrorRecovery, ParserError> { - make_user_error!(JsonPathAppliedToStream, start_pos, token, end_pos) + make_user_error!(LambdaAppliedToStream, start_pos, token, end_pos) } diff --git a/crates/air-lib/air-parser/src/parser/ast.rs b/crates/air-lib/air-parser/src/parser/ast.rs index b2890d9c..703e7af7 100644 --- a/crates/air-lib/air-parser/src/parser/ast.rs +++ b/crates/air-lib/air-parser/src/parser/ast.rs @@ -21,6 +21,7 @@ pub use crate::parser::lexer::AstVariable; pub use crate::parser::lexer::LastErrorPath; pub use crate::parser::lexer::Number; +use air_lambda_parser::LambdaAST; use serde::Deserialize; use serde::Serialize; @@ -66,7 +67,7 @@ pub struct Call<'i> { #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub enum ApArgument<'i> { ScalarVariable(&'i str), - JsonPath(JsonPath<'i>), + VariableWithLambda(VariableWithLambda<'i>), Number(Number), Boolean(bool), Literal(&'i str), @@ -85,7 +86,7 @@ pub enum CallInstrValue<'i> { InitPeerId, Literal(&'i str), Variable(AstVariable<'i>), - JsonPath(JsonPath<'i>), + VariableWithLambda(VariableWithLambda<'i>), } #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] @@ -97,16 +98,15 @@ pub enum CallInstrArgValue<'i> { Boolean(bool), EmptyArray, // only empty arrays are allowed now Variable(AstVariable<'i>), - JsonPath(JsonPath<'i>), + VariableWithLambda(VariableWithLambda<'i>), } #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub enum IterableScalarValue<'i> { ScalarVariable(&'i str), - JsonPath { + VariableWithLambda { scalar_name: &'i str, - path: &'i str, - should_flatten: bool, + lambda: LambdaAST<'i>, }, } @@ -118,7 +118,7 @@ pub enum MatchableValue<'i> { Boolean(bool), EmptyArray, Variable(AstVariable<'i>), - JsonPath(JsonPath<'i>), + VariableWithLambda(VariableWithLambda<'i>), } #[derive(Serialize, Debug, PartialEq, Clone)] @@ -171,8 +171,8 @@ pub struct Next<'i>(pub &'i str); pub struct Null; #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] -pub struct JsonPath<'i> { +pub struct VariableWithLambda<'i> { pub variable: AstVariable<'i>, - pub path: &'i str, - pub should_flatten: bool, + #[serde(borrow)] + pub lambda: LambdaAST<'i>, } diff --git a/crates/air-lib/air-parser/src/parser/ast/impls.rs b/crates/air-lib/air-parser/src/parser/ast/impls.rs index ea6b73a0..9883e0ac 100644 --- a/crates/air-lib/air-parser/src/parser/ast/impls.rs +++ b/crates/air-lib/air-parser/src/parser/ast/impls.rs @@ -15,6 +15,7 @@ */ use super::*; +use air_lambda_parser::ValueAccessor; impl<'i> Ap<'i> { pub fn new(argument: ApArgument<'i>, result: AstVariable<'i>) -> Self { @@ -22,12 +23,25 @@ impl<'i> Ap<'i> { } } -impl<'i> JsonPath<'i> { - pub fn new(variable: AstVariable<'i>, path: &'i str, should_flatten: bool) -> Self { - Self { - variable, - path, - should_flatten, +impl<'i> VariableWithLambda<'i> { + pub fn new(variable: AstVariable<'i>, lambda: LambdaAST<'i>) -> Self { + Self { variable, lambda } + } + + // This function is unsafe and lambda must be non-empty, although it's used only for tests + pub fn from_raw_algebras(variable: AstVariable<'i>, lambda: Vec>) -> Self { + let lambda = unsafe { LambdaAST::new_unchecked(lambda) }; + Self { variable, lambda } + } +} + +impl<'i> IterableScalarValue<'i> { + // This function is unsafe and lambda must be non-empty, although it's used only for tests + pub fn new_vl(scalar_name: &'i str, lambda: Vec>) -> Self { + let lambda = unsafe { LambdaAST::new_unchecked(lambda) }; + Self::VariableWithLambda { + scalar_name, + lambda, } } } diff --git a/crates/air-lib/air-parser/src/parser/ast/traits.rs b/crates/air-lib/air-parser/src/parser/ast/traits.rs index 87d1fa1d..3488ead3 100644 --- a/crates/air-lib/air-parser/src/parser/ast/traits.rs +++ b/crates/air-lib/air-parser/src/parser/ast/traits.rs @@ -24,13 +24,13 @@ impl fmt::Display for CallInstrArgValue<'_> { match self { InitPeerId => write!(f, "%init_peer_id%"), - LastError(json_path) => write!(f, "%last_error%{}", json_path), + LastError(error_path) => write!(f, "%last_error%{}", error_path), Literal(str) => write!(f, r#""{}""#, str), Number(number) => write!(f, "{}", number), Boolean(bool) => write!(f, "{}", bool), EmptyArray => write!(f, "[]"), Variable(str) => write!(f, "{}", str), - JsonPath(json_path) => write!(f, "{}", json_path), + VariableWithLambda(vl) => write!(f, "{}", vl), } } } @@ -43,7 +43,7 @@ impl fmt::Display for CallInstrValue<'_> { InitPeerId => write!(f, "%init_peer_id%"), Literal(str) => write!(f, r#""{}""#, str), Variable(str) => write!(f, "{}", str), - JsonPath(json_path) => write!(f, "{}", json_path), + VariableWithLambda(vl) => write!(f, "{}", vl), } } } @@ -54,17 +54,10 @@ impl fmt::Display for IterableScalarValue<'_> { match self { ScalarVariable(str) => write!(f, "{}", str), - JsonPath { + VariableWithLambda { scalar_name, - path, - should_flatten, - } => write!( - f, - "{}.{}{}", - scalar_name, - path, - maybe_flatten_char(*should_flatten) - ), + lambda, + } => write!(f, "{}.$.{:?}", scalar_name, lambda), } } } @@ -80,7 +73,7 @@ impl fmt::Display for MatchableValue<'_> { Boolean(bool) => write!(f, "{}", bool), EmptyArray => write!(f, "[]"), Variable(str) => write!(f, "{}", str), - JsonPath(json_path) => write!(f, "{}", json_path), + VariableWithLambda(vl) => write!(f, "{}", vl), } } } @@ -164,7 +157,7 @@ impl fmt::Display for ApArgument<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { ApArgument::ScalarVariable(name) => write!(f, "{}", name), - ApArgument::JsonPath(json_path) => write!(f, "{}", json_path), + ApArgument::VariableWithLambda(vl) => write!(f, "{}", vl), ApArgument::LastError(error_path) => write!(f, "{}", error_path), ApArgument::Number(value) => write!(f, "{}", value), ApArgument::Boolean(value) => write!(f, "{}", value), @@ -228,22 +221,8 @@ impl fmt::Display for Next<'_> { } } -impl fmt::Display for JsonPath<'_> { +impl fmt::Display for VariableWithLambda<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!( - f, - "{}.{}{}", - self.variable, - self.path, - maybe_flatten_char(self.should_flatten) - ) - } -} - -fn maybe_flatten_char(should_flatten: bool) -> &'static str { - if should_flatten { - "!" - } else { - "" + write!(f, "{}.$.{:?}", self.variable, self.lambda,) } } diff --git a/crates/air-lib/air-parser/src/parser/errors.rs b/crates/air-lib/air-parser/src/parser/errors.rs index d311625c..0b8d36d6 100644 --- a/crates/air-lib/air-parser/src/parser/errors.rs +++ b/crates/air-lib/air-parser/src/parser/errors.rs @@ -22,13 +22,8 @@ pub enum ParserError { #[error(transparent)] LexerError(#[from] LexerError), - #[error( - "while using json path in this position, result should be flattened, add ! at the end" - )] - CallArgsNotFlattened(usize, usize), - - #[error("json path can't be applied to streams in this position")] - JsonPathAppliedToStream(usize, usize), + #[error("lambda can't be applied to streams in this position")] + LambdaAppliedToStream(usize, usize), #[error("variable '{2}' wasn't defined")] UndefinedVariable(usize, usize, String), diff --git a/crates/air-lib/air-parser/src/parser/lexer/air_lexer.rs b/crates/air-lib/air-parser/src/parser/lexer/air_lexer.rs index d26ccb08..12bfc7ee 100644 --- a/crates/air-lib/air-parser/src/parser/lexer/air_lexer.rs +++ b/crates/air-lib/air-parser/src/parser/lexer/air_lexer.rs @@ -215,7 +215,7 @@ fn parse_last_error(input: &str, start_pos: usize) -> LexerResult> { let last_error_size = LAST_ERROR.len(); let last_error_path = match &input[last_error_size..] { "" => LastErrorPath::None, - // The second option with ! is needed for compatibility with flattening in "standard" json path used in AIR. + // The second option with ! is needed for compatibility with flattening in "standard" lambda used in AIR. // However version without ! returns just a error, because the opposite is unsound. ".$.instruction" | ".$.instruction!" => LastErrorPath::Instruction, ".$.msg" | ".$.msg!" => LastErrorPath::Message, diff --git a/crates/air-lib/air-parser/src/parser/lexer/call_variable_parser.rs b/crates/air-lib/air-parser/src/parser/lexer/call_variable_parser.rs index 0bf05b5d..4e782c0b 100644 --- a/crates/air-lib/air-parser/src/parser/lexer/call_variable_parser.rs +++ b/crates/air-lib/air-parser/src/parser/lexer/call_variable_parser.rs @@ -18,6 +18,7 @@ use super::AstVariable; use super::LexerError; use super::LexerResult; use super::Token; +use crate::LambdaAST; use std::convert::TryInto; use std::iter::Peekable; @@ -210,7 +211,7 @@ impl<'input> CallVariableParser<'input> { fn try_parse_as_json_path(&mut self) -> LexerResult<()> { if !self.json_path_allowed_char() && !self.try_parse_as_flattening() { let error_pos = self.pos_in_string_to_parse(); - return Err(LexerError::InvalidJsonPath(error_pos, error_pos)); + return Err(LexerError::InvalidLambda(error_pos, error_pos)); } Ok(()) @@ -281,6 +282,22 @@ impl<'input> CallVariableParser<'input> { } } + fn try_to_variable_and_lambda( + &self, + pos: usize, + ) -> LexerResult<(&'input str, LambdaAST<'input>)> { + // +2 to ignore ".$" prefix + let lambda = crate::parse_lambda(&self.string_to_parse[pos + 2..]).map_err(|e| { + LexerError::LambdaParserError( + self.start_pos + pos, + self.start_pos + self.string_to_parse.len(), + e.to_string(), + ) + })?; + + Ok((&self.string_to_parse[0..pos], lambda)) + } + fn to_token(&self) -> LexerResult> { use super::token::UnparsedNumber; @@ -303,29 +320,12 @@ impl<'input> CallVariableParser<'input> { } } (false, true) => { - let json_path_start_pos = self.state.first_dot_met_pos.unwrap(); - let should_flatten = self.state.flattening_met; - let (variable, json_path) = - to_variable_and_path(self.string_to_parse, json_path_start_pos, should_flatten); + let lambda_start_pos = self.state.first_dot_met_pos.unwrap(); + let (variable, lambda) = self.try_to_variable_and_lambda(lambda_start_pos)?; let variable = self.to_variable(variable); - Ok(Token::VariableWithJsonPath( - variable, - json_path, - should_flatten, - )) + Ok(Token::VariableWithLambda(variable, lambda)) } } } } - -fn to_variable_and_path(str: &str, pos: usize, should_flatten: bool) -> (&str, &str) { - let json_path = if should_flatten { - // -1 to not include the flattening symbol ! to the resulted json path - &str[pos + 1..str.len() - 1] - } else { - &str[pos + 1..] - }; - - (&str[0..pos], json_path) -} diff --git a/crates/air-lib/air-parser/src/parser/lexer/errors.rs b/crates/air-lib/air-parser/src/parser/lexer/errors.rs index 1d54ab5e..e1c1b079 100644 --- a/crates/air-lib/air-parser/src/parser/lexer/errors.rs +++ b/crates/air-lib/air-parser/src/parser/lexer/errors.rs @@ -36,8 +36,8 @@ pub enum LexerError { #[error("this variable or constant shouldn't have empty name")] EmptyVariableOrConst(usize, usize), - #[error("invalid character in json path")] - InvalidJsonPath(usize, usize), + #[error("invalid character in lambda")] + InvalidLambda(usize, usize), #[error("a digit could contain only digits or one dot")] UnallowedCharInNumber(usize, usize), @@ -48,13 +48,17 @@ pub enum LexerError { #[error("{2}")] ParseFloatError(usize, usize, #[source] ParseFloatError), + // TODO: use LambdaParserError directly here (it'll require introducing a lifetime) + #[error("{2}")] + LambdaParserError(usize, usize, String), + #[error("{2} is an incorrect path for %last_error%, only .$.instruction, .$.msg, and .$.peer_id are allowed")] LastErrorPathError(usize, usize, String), #[error("this float is too big, a float could contain less than 12 digits")] TooBigFloat(usize, usize), - #[error("leading dot without any symbols before - please write 0 if it's float or variable name if it's json path")] + #[error("leading dot without any symbols before - please write 0 if it's float or variable name if it's a lambda")] LeadingDot(usize, usize), } diff --git a/crates/air-lib/air-parser/src/parser/lexer/tests.rs b/crates/air-lib/air-parser/src/parser/lexer/tests.rs index 495ca145..eb539be4 100644 --- a/crates/air-lib/air-parser/src/parser/lexer/tests.rs +++ b/crates/air-lib/air-parser/src/parser/lexer/tests.rs @@ -22,6 +22,8 @@ use super::LexerError; use super::Number; use super::Token; +use air_lambda_parser::{LambdaAST, ValueAccessor}; + fn run_lexer(input: &str) -> Vec, usize, LexerError>> { let lexer = AIRLexer::new(input); lexer.collect() @@ -262,34 +264,38 @@ fn too_big_float_number() { } #[test] -fn json_path() { - // this json path contains all allowed in json path characters - const JSON_PATH: &str = r#"value.$[$@[]():?.*,"]"#; +fn lambda() { + // this lambda contains all allowed in lambda characters + const LAMBDA: &str = r#"value.$.field[1]"#; let variable = AstVariable::Scalar("value"); lexer_test( - JSON_PATH, + LAMBDA, Single(Ok(( 0, - Token::VariableWithJsonPath(variable, r#"$[$@[]():?.*,"]"#, false), - JSON_PATH.len(), + Token::VariableWithLambda(variable, unsafe { + LambdaAST::new_unchecked(vec![ + ValueAccessor::FieldAccess { + field_name: "field", + }, + ValueAccessor::ArrayAccess { idx: 1 }, + ]) + }), + LAMBDA.len(), ))), ); } #[test] -fn json_path_numbers() { - const JSON_PATH: &str = r#"12345.$[$@[]():?.*,"]"#; +fn lambda_path_numbers() { + const LAMBDA: &str = r#"12345.$[$@[]():?.*,"]"#; + + lexer_test(LAMBDA, Single(Err(LexerError::UnallowedCharInNumber(6, 6)))); + + const LAMBDA1: &str = r#"+12345.$[$@[]():?.*,"]"#; lexer_test( - JSON_PATH, - Single(Err(LexerError::UnallowedCharInNumber(6, 6))), - ); - - const JSON_PATH1: &str = r#"+12345.$[$@[]():?.*,"]"#; - - lexer_test( - JSON_PATH1, + LAMBDA1, Single(Err(LexerError::UnallowedCharInNumber(7, 7))), ); } @@ -320,7 +326,7 @@ fn unclosed_quote() { #[test] fn bad_value() { - // value contains ! that only allowed at the end of a json path + // value contains ! that only allowed at the end of a lambda expression const INVALID_VALUE: &str = r#"val!ue.$[$@[]():?.*,"\]"#; lexer_test( @@ -328,31 +334,25 @@ fn bad_value() { Single(Err(LexerError::IsNotAlphanumeric(3, 3))), ); - // value contains ! that only allowed at the end of a json path + // value contains ! that only allowed at the end of a lambda expression const INVALID_VALUE2: &str = r#"value.$![$@[]():?.*,"\]"#; - lexer_test( - INVALID_VALUE2, - Single(Err(LexerError::InvalidJsonPath(7, 7))), - ); + lexer_test(INVALID_VALUE2, Single(Err(LexerError::InvalidLambda(7, 7)))); } #[test] -fn invalid_json_path() { - const INVALID_JSON_PATH: &str = r#"value.$%"#; +fn invalid_lambda() { + const INVALID_LAMBDA: &str = r#"value.$%"#; - lexer_test( - INVALID_JSON_PATH, - Single(Err(LexerError::InvalidJsonPath(7, 7))), - ); + lexer_test(INVALID_LAMBDA, Single(Err(LexerError::InvalidLambda(7, 7)))); } #[test] -fn invalid_json_path_numbers() { - // this json path contains all allowed in json path charactes - const JSON_PATH: &str = r#"+12345$[$@[]():?.*,"!]"#; +fn invalid_lambda_numbers() { + // this lambda contains all allowed in lambda characters + const LAMBDA: &str = r#"+12345$[$@[]():?.*,"!]"#; - lexer_test(JSON_PATH, Single(Err(LexerError::IsNotAlphanumeric(6, 6)))); + lexer_test(LAMBDA, Single(Err(LexerError::IsNotAlphanumeric(6, 6)))); } #[test] diff --git a/crates/air-lib/air-parser/src/parser/lexer/token.rs b/crates/air-lib/air-parser/src/parser/lexer/token.rs index 26d16caa..07da17f4 100644 --- a/crates/air-lib/air-parser/src/parser/lexer/token.rs +++ b/crates/air-lib/air-parser/src/parser/lexer/token.rs @@ -18,6 +18,7 @@ mod traits; use super::LexerError; use super::LexerResult; +use crate::LambdaAST; use serde::Deserialize; use serde::Serialize; @@ -33,7 +34,7 @@ pub enum Token<'input> { StringLiteral(&'input str), Alphanumeric(&'input str), Stream(&'input str), - VariableWithJsonPath(AstVariable<'input>, &'input str, bool), + VariableWithLambda(AstVariable<'input>, LambdaAST<'input>), Number(Number), Boolean(bool), diff --git a/crates/air-lib/air-parser/src/parser/mod.rs b/crates/air-lib/air-parser/src/parser/mod.rs index 62cb317a..06e3506c 100644 --- a/crates/air-lib/air-parser/src/parser/mod.rs +++ b/crates/air-lib/air-parser/src/parser/mod.rs @@ -32,9 +32,9 @@ pub mod tests; pub use self::air_parser::parse; pub use air::AIRParser; pub use lexer::AIRLexer; +pub use validator::VariableValidator; use errors::ParserError; -use validator::VariableValidator; #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub struct Span { diff --git a/crates/air-lib/air-parser/src/parser/tests.rs b/crates/air-lib/air-parser/src/parser/tests.rs index b5c59d4c..1b0685a8 100644 --- a/crates/air-lib/air-parser/src/parser/tests.rs +++ b/crates/air-lib/air-parser/src/parser/tests.rs @@ -25,6 +25,8 @@ use ast::CallInstrArgValue; use ast::CallInstrValue; use ast::Instruction; +use air_lambda_parser::ValueAccessor; + use fstrings::f; use lalrpop_util::ParseError; use std::rc::Rc; @@ -140,11 +142,12 @@ fn parse_json_path() { "#; let instruction = parse(source_code); let expected = Instruction::Call(Call { - peer_part: PeerPk(CallInstrValue::JsonPath(ast::JsonPath::new( - Scalar("id"), - "$.a", - true, - ))), + peer_part: PeerPk(CallInstrValue::VariableWithLambda( + ast::VariableWithLambda::from_raw_algebras( + Scalar("id"), + vec![ValueAccessor::FieldAccess { field_name: "a" }], + ), + )), function_part: FuncName(CallInstrValue::Literal("f")), args: Rc::new(vec![ CallInstrArgValue::Literal("hello"), @@ -289,9 +292,9 @@ fn parse_undefined_stream_without_json_path() { } #[test] -fn parse_undefined_stream_with_json_path() { +fn parse_undefined_stream_with_lambda() { let source_code = r#" - (call "" "" [$stream.$json_path]) + (call "" "" [$stream.$.json_path]) "#; let lexer = crate::AIRLexer::new(source_code); @@ -324,27 +327,37 @@ fn parse_json_path_complex() { let source_code = r#" (seq (call m.$.[1]! "f" [] void) - (call m.$.abc["c"].cde[a][0].cde["bcd"]! "f" [] void) + (call m.$.abc[0].cde[1][0].cde[1]! "f" [] void) ) "#; let instruction = parse(source_code); let expected = seq( Instruction::Call(Call { - peer_part: PeerPk(CallInstrValue::JsonPath(ast::JsonPath::new( - Scalar("m"), - "$.[1]", - true, - ))), + peer_part: PeerPk(CallInstrValue::VariableWithLambda( + ast::VariableWithLambda::from_raw_algebras( + Scalar("m"), + vec![ValueAccessor::ArrayAccess { idx: 1 }], + ), + )), function_part: FuncName(CallInstrValue::Literal("f")), args: Rc::new(vec![]), output: Variable(Scalar("void")), }), Instruction::Call(Call { - peer_part: PeerPk(CallInstrValue::JsonPath(ast::JsonPath::new( - Scalar("m"), - r#"$.abc["c"].cde[a][0].cde["bcd"]"#, - true, - ))), + peer_part: PeerPk(CallInstrValue::VariableWithLambda( + ast::VariableWithLambda::from_raw_algebras( + Scalar("m"), + vec![ + ValueAccessor::FieldAccess { field_name: "abc" }, + ValueAccessor::ArrayAccess { idx: 0 }, + ValueAccessor::FieldAccess { field_name: "cde" }, + ValueAccessor::ArrayAccess { idx: 1 }, + ValueAccessor::ArrayAccess { idx: 0 }, + ValueAccessor::FieldAccess { field_name: "cde" }, + ValueAccessor::ArrayAccess { idx: 1 }, + ], + ), + )), function_part: FuncName(CallInstrValue::Literal("f")), args: Rc::new(vec![]), output: Variable(Scalar("void")), @@ -360,26 +373,37 @@ fn json_path_square_braces() { use ast::PeerPart::*; let source_code = r#" - (call u.$["peer_id"]! ("return" "") [u.$["peer_id"].cde[0]["abc"].abc u.$["name"]] $void) + (call u.$.peer_id! ("return" "") [u.$[1].cde[0][0].abc u.$.name] $void) "#; let instruction = parse(source_code); let expected = Instruction::Call(Call { - peer_part: PeerPk(CallInstrValue::JsonPath(ast::JsonPath::new( - Scalar("u"), - r#"$["peer_id"]"#, - true, - ))), + peer_part: PeerPk(CallInstrValue::VariableWithLambda( + ast::VariableWithLambda::from_raw_algebras( + Scalar("u"), + vec![ValueAccessor::FieldAccess { + field_name: "peer_id", + }], + ), + )), function_part: ServiceIdWithFuncName( CallInstrValue::Literal("return"), CallInstrValue::Literal(""), ), args: Rc::new(vec![ - CallInstrArgValue::JsonPath(ast::JsonPath::new( + CallInstrArgValue::VariableWithLambda(ast::VariableWithLambda::from_raw_algebras( Scalar("u"), - r#"$["peer_id"].cde[0]["abc"].abc"#, - false, + vec![ + ValueAccessor::ArrayAccess { idx: 1 }, + ValueAccessor::FieldAccess { field_name: "cde" }, + ValueAccessor::ArrayAccess { idx: 0 }, + ValueAccessor::ArrayAccess { idx: 0 }, + ValueAccessor::FieldAccess { field_name: "abc" }, + ], + )), + CallInstrArgValue::VariableWithLambda(ast::VariableWithLambda::from_raw_algebras( + Scalar("u"), + vec![ValueAccessor::FieldAccess { field_name: "name" }], )), - CallInstrArgValue::JsonPath(ast::JsonPath::new(Scalar("u"), r#"$["name"]"#, false)), ]), output: Variable(Stream("$void")), }); @@ -886,20 +910,19 @@ fn ap_with_last_error() { #[test] fn fold_json_path() { use ast::FoldScalar; - use ast::IterableScalarValue::*; + use ast::IterableScalarValue; let source_code = r#" ; comment - (fold members.$.["users"] m (null)) ;;; comment + (fold members.$.[123321] m (null)) ;;; comment ;;; comment "#; let instruction = parse(source_code); let expected = Instruction::FoldScalar(FoldScalar { - iterable: JsonPath { - scalar_name: "members", - path: "$.[\"users\"]", - should_flatten: false, - }, + iterable: IterableScalarValue::new_vl( + "members", + vec![ValueAccessor::ArrayAccess { idx: 123321 }], + ), iterator: "m", instruction: Rc::new(null()), }); @@ -925,20 +948,24 @@ fn fold_on_stream() { #[test] fn comments() { use ast::FoldScalar; - use ast::IterableScalarValue::*; + use ast::IterableScalarValue; let source_code = r#" ; comment - (fold members.$.["users"] m (null)) ;;; comment ;;?()() + (fold members.$.field[1] m (null)) ;;; comment ;;?()() ;;; comme;?!.$. nt[][][][()()()null;$::! "#; let instruction = parse(source_code); let expected = Instruction::FoldScalar(FoldScalar { - iterable: JsonPath { - scalar_name: "members", - path: "$.[\"users\"]", - should_flatten: false, - }, + iterable: IterableScalarValue::new_vl( + "members", + vec![ + ValueAccessor::FieldAccess { + field_name: "field", + }, + ValueAccessor::ArrayAccess { idx: 1 }, + ], + ), iterator: "m", instruction: Rc::new(null()), }); diff --git a/crates/air-lib/air-parser/src/parser/validator.rs b/crates/air-lib/air-parser/src/parser/validator.rs index 5863532e..64a4d4e0 100644 --- a/crates/air-lib/air-parser/src/parser/validator.rs +++ b/crates/air-lib/air-parser/src/parser/validator.rs @@ -50,7 +50,7 @@ pub struct VariableValidator<'i> { } impl<'i> VariableValidator<'i> { - pub(super) fn new() -> Self { + pub fn new() -> Self { <_>::default() } @@ -96,7 +96,7 @@ impl<'i> VariableValidator<'i> { pub(super) fn met_ap(&mut self, ap: &Ap<'i>, span: Span) { match &ap.argument { ApArgument::ScalarVariable(name) => self.met_variable(&AstVariable::Scalar(name), span), - ApArgument::JsonPath(json_path) => self.met_variable(&json_path.variable, span), + ApArgument::VariableWithLambda(vl) => self.met_variable(&vl.variable, span), ApArgument::Number(_) | ApArgument::Boolean(_) | ApArgument::Literal(_) @@ -151,7 +151,7 @@ impl<'i> VariableValidator<'i> { fn met_instr_value(&mut self, instr_value: &CallInstrValue<'i>, span: Span) { match instr_value { - CallInstrValue::JsonPath(json_path) => self.met_variable(&json_path.variable, span), + CallInstrValue::VariableWithLambda(vl) => self.met_variable(&vl.variable, span), CallInstrValue::Variable(variable) => self.met_variable(variable, span), _ => {} } @@ -159,7 +159,7 @@ impl<'i> VariableValidator<'i> { fn met_instr_arg_value(&mut self, instr_arg_value: &CallInstrArgValue<'i>, span: Span) { match instr_arg_value { - CallInstrArgValue::JsonPath(json_path) => self.met_variable(&json_path.variable, span), + CallInstrArgValue::VariableWithLambda(vl) => self.met_variable(&vl.variable, span), CallInstrArgValue::Variable(variable) => { // skipping streams here allows treating non-defined streams as empty arrays if let AstVariable::Scalar(_) = variable { @@ -224,7 +224,7 @@ impl<'i> VariableValidator<'i> { | MatchableValue::Literal(_) | MatchableValue::EmptyArray => {} MatchableValue::Variable(variable) => self.met_variable(variable, span), - MatchableValue::JsonPath(json_path) => self.met_variable(&json_path.variable, span), + MatchableValue::VariableWithLambda(vl) => self.met_variable(&vl.variable, span), } } @@ -242,7 +242,7 @@ impl<'i> VariableValidator<'i> { fn met_iterable_value(&mut self, iterable_value: &IterableScalarValue<'i>, span: Span) { match iterable_value { - IterableScalarValue::JsonPath { scalar_name, .. } => { + IterableScalarValue::VariableWithLambda { scalar_name, .. } => { self.met_variable(&AstVariable::Scalar(scalar_name), span) } IterableScalarValue::ScalarVariable(name) => { diff --git a/crates/air-lib/lambda/ast/Cargo.toml b/crates/air-lib/lambda/ast/Cargo.toml new file mode 100644 index 00000000..800fc469 --- /dev/null +++ b/crates/air-lib/lambda/ast/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "air-lambda-ast" +description = "Parser of AIR value algebra values" +version = "0.1.0" +authors = ["Fluence Labs"] +edition = "2018" +license = "Apache-2.0" +publish = false +keywords = ["fluence", "air", "lambda", "ast"] +categories = ["wasm"] + +[lib] +name = "air_lambda_ast" +path = "src/lib.rs" + +[dependencies] +non-empty-vec = { version = "0.2.0", features = ["serde"] } + +serde = { version = "1.0.118", features = ["rc", "derive"] } +serde_json = "1.0.61" diff --git a/crates/air-lib/lambda/ast/README.md b/crates/air-lib/lambda/ast/README.md new file mode 100644 index 00000000..c7a0d739 --- /dev/null +++ b/crates/air-lib/lambda/ast/README.md @@ -0,0 +1,3 @@ +## AIR lambda AST + +AIR supports lambda expressions that could be applied both to scalars and streams. This crate defines an AST of such expressions, it has a array-like structure, because such a structure is easier to handle. diff --git a/crates/air-lib/lambda/ast/src/ast.rs b/crates/air-lib/lambda/ast/src/ast.rs new file mode 100644 index 00000000..cce658c8 --- /dev/null +++ b/crates/air-lib/lambda/ast/src/ast.rs @@ -0,0 +1,36 @@ +/* + * Copyright 2021 Fluence Labs Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +mod traits; + +use non_empty_vec::NonEmpty; +use serde::Deserialize; +use serde::Serialize; + +pub type LambdaAST<'input> = NonEmpty>; + +#[derive(Debug, PartialEq, Clone, Copy, Serialize, Deserialize)] +pub enum ValueAccessor<'input> { + // (.)?[$idx] + ArrayAccess { idx: u32 }, + + // .field + FieldAccess { field_name: &'input str }, + + // needed to allow parser catch all errors from a lambda expression without stopping + // on the very first one. Although, this variant is guaranteed not to be present in a lambda. + Error, +} diff --git a/crates/air-lib/lambda/ast/src/ast/traits.rs b/crates/air-lib/lambda/ast/src/ast/traits.rs new file mode 100644 index 00000000..12517a9b --- /dev/null +++ b/crates/air-lib/lambda/ast/src/ast/traits.rs @@ -0,0 +1,31 @@ +/* + * Copyright 2021 Fluence Labs Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +use super::*; + +use std::fmt; + +impl fmt::Display for ValueAccessor<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + use ValueAccessor::*; + + match self { + ArrayAccess { idx } => write!(f, ".[{}]", idx), + FieldAccess { field_name } => write!(f, ".{}", field_name), + Error => write!(f, "a parser error occurred while parsing lambda expression"), + } + } +} diff --git a/crates/air-lib/lambda/ast/src/lib.rs b/crates/air-lib/lambda/ast/src/lib.rs new file mode 100644 index 00000000..43c0c766 --- /dev/null +++ b/crates/air-lib/lambda/ast/src/lib.rs @@ -0,0 +1,38 @@ +/* + * Copyright 2021 Fluence Labs Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#![deny( + dead_code, + nonstandard_style, + unused_imports, + unused_mut, + unused_variables, + unused_unsafe, + unreachable_patterns +)] + +mod ast; + +pub use ast::*; + +pub fn format_ast(lambda_ast: &LambdaAST<'_>) -> String { + let mut formatted_ast = String::new(); + for algebra in lambda_ast.iter() { + formatted_ast.push_str(&algebra.to_string()); + } + + formatted_ast +} diff --git a/crates/air-lib/lambda/parser/Cargo.toml b/crates/air-lib/lambda/parser/Cargo.toml new file mode 100644 index 00000000..94b8342c --- /dev/null +++ b/crates/air-lib/lambda/parser/Cargo.toml @@ -0,0 +1,33 @@ +[package] +name = "air-lambda-parser" +description = "Parser of an AIR lambda" +version = "0.1.0" +authors = ["Fluence Labs"] +edition = "2018" +license = "Apache-2.0" +publish = false +keywords = ["fluence", "air", "parser", "lalrpop"] +categories = ["wasm"] + +[lib] +name = "air_lambda_parser" +path = "src/lib.rs" + +[build-dependencies] +lalrpop = "0.19.6" + +[dependencies] +air-lambda-ast = { path = "../ast" } + +lalrpop-util = "0.19.6" +regex = "1.5.4" +codespan = "0.11.1" +codespan-reporting = "0.11.1" +multimap = "0.8.3" + +# TODO: hide serde behind a feature +serde = { version = "1.0.118", features = ["rc", "derive"] } +serde_json = "1.0.61" + +itertools = "0.10.0" +thiserror = "1.0.23" diff --git a/crates/air-lib/lambda/parser/build.rs b/crates/air-lib/lambda/parser/build.rs new file mode 100644 index 00000000..e8855cd0 --- /dev/null +++ b/crates/air-lib/lambda/parser/build.rs @@ -0,0 +1,24 @@ +/* + * Copyright 2020 Fluence Labs Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +extern crate lalrpop; + +fn main() { + lalrpop::Configuration::new() + .generate_in_source_tree() + .process() + .unwrap(); +} diff --git a/crates/air-lib/lambda/parser/src/lib.rs b/crates/air-lib/lambda/parser/src/lib.rs new file mode 100644 index 00000000..08a54369 --- /dev/null +++ b/crates/air-lib/lambda/parser/src/lib.rs @@ -0,0 +1,36 @@ +/* + * Copyright 2021 Fluence Labs Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#![deny( + dead_code, + nonstandard_style, + unused_imports, + unused_mut, + unused_variables, + unused_unsafe, + unreachable_patterns +)] + +mod parser; + +pub use parser::parse; +pub use parser::AlgebraLexer; +pub use parser::LambdaParser; +pub use parser::LambdaParserError; +pub use parser::LexerError; + +pub use air_lambda_ast::LambdaAST; +pub use air_lambda_ast::ValueAccessor; diff --git a/crates/air-lib/lambda/parser/src/parser/errors.rs b/crates/air-lib/lambda/parser/src/parser/errors.rs new file mode 100644 index 00000000..0e0e1c01 --- /dev/null +++ b/crates/air-lib/lambda/parser/src/parser/errors.rs @@ -0,0 +1,51 @@ +/* + * Copyright 2021 Fluence Labs Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +use crate::parser::lexer::LexerError; +use crate::parser::lexer::Token; + +use lalrpop_util::ErrorRecovery; +use lalrpop_util::ParseError; +use thiserror::Error as ThisError; + +#[derive(ThisError, Debug, Clone, PartialEq, Eq)] +pub enum LambdaParserError<'input> { + #[error(transparent)] + LexerError(#[from] LexerError), + + #[error("provided lambda expression doesn't contain any algebras")] + EmptyLambda, + + #[error("{0:?}")] + ParseError(ParseError, LexerError>), + + #[error("{0:?}")] + RecoveryErrors(Vec, LexerError>>), +} + +impl<'input> From, LexerError>> for LambdaParserError<'input> { + fn from(e: ParseError, LexerError>) -> Self { + Self::ParseError(e) + } +} + +impl<'input> From, LexerError>>> + for LambdaParserError<'input> +{ + fn from(errors: Vec, LexerError>>) -> Self { + Self::RecoveryErrors(errors) + } +} diff --git a/crates/air-lib/lambda/parser/src/parser/lambda_parser.rs b/crates/air-lib/lambda/parser/src/parser/lambda_parser.rs new file mode 100644 index 00000000..11f712f6 --- /dev/null +++ b/crates/air-lib/lambda/parser/src/parser/lambda_parser.rs @@ -0,0 +1,52 @@ +/* + * Copyright 2021 Fluence Labs Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +use super::lexer::AlgebraLexer; +use super::va_lambda; +use super::LambdaParserError; +use super::LambdaParserResult; +use crate::LambdaAST; +use crate::ValueAccessor; + +use va_lambda::LambdaParser; + +// Caching parser to cache internal regexes, which are expensive to instantiate +// See also https://github.com/lalrpop/lalrpop/issues/269 +thread_local!(static PARSER: LambdaParser = LambdaParser::new()); + +/// Parse AIR `source_code` to `Box` +pub fn parse(lambda: &str) -> LambdaParserResult<'_, LambdaAST> { + PARSER.with(|parser| { + let mut errors = Vec::new(); + let lexer = AlgebraLexer::new(lambda); + let result = parser.parse(lambda, &mut errors, lexer); + + match result { + Ok(algebras) if errors.is_empty() => try_to_lambda(algebras), + Ok(_) => Err(errors.into()), + Err(e) => Err(e.into()), + } + }) +} + +fn try_to_lambda(algebras: Vec) -> LambdaParserResult<'_, LambdaAST> { + if algebras.is_empty() { + return Err(LambdaParserError::EmptyLambda); + } + + let ast = unsafe { LambdaAST::new_unchecked(algebras) }; + Ok(ast) +} diff --git a/crates/air-lib/lambda/parser/src/parser/lexer/algebra_lexer.rs b/crates/air-lib/lambda/parser/src/parser/lexer/algebra_lexer.rs new file mode 100644 index 00000000..d7e033ac --- /dev/null +++ b/crates/air-lib/lambda/parser/src/parser/lexer/algebra_lexer.rs @@ -0,0 +1,108 @@ +/* + * Copyright 2021 Fluence Labs Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +use super::errors::LexerError; +use super::token::Token; + +use crate::parser::lexer::is_air_alphanumeric; +use std::iter::Peekable; +use std::str::CharIndices; + +const ARRAY_IDX_BASE: u32 = 10; + +pub type Spanned = Result<(Loc, Token, Loc), Error>; + +pub struct AlgebraLexer<'input> { + input: &'input str, + chars: Peekable>, +} + +impl<'input> Iterator for AlgebraLexer<'input> { + type Item = Spanned, usize, LexerError>; + + fn next(&mut self) -> Option { + self.next_token() + } +} + +impl<'input> AlgebraLexer<'input> { + pub fn new(input: &'input str) -> Self { + Self { + input, + chars: input.char_indices().peekable(), + } + } + + pub fn next_token(&mut self) -> Option, usize, LexerError>> { + self.chars.next().map(|(start_pos, ch)| match ch { + '[' => Ok((start_pos, Token::OpenSquareBracket, start_pos + 1)), + ']' => Ok((start_pos, Token::CloseSquareBracket, start_pos + 1)), + + '.' => Ok((start_pos, Token::Selector, start_pos + 1)), + + d if d.is_digit(ARRAY_IDX_BASE) => self.tokenize_arrays_idx(start_pos), + s if is_air_alphanumeric(s) => self.tokenize_field_name(start_pos), + + '!' => Ok((start_pos, Token::FlatteningSign, start_pos + 1)), + + _ => Err(LexerError::UnexpectedSymbol(start_pos, start_pos + 1)), + }) + } + + fn tokenize_arrays_idx( + &mut self, + start_pos: usize, + ) -> Spanned, usize, LexerError> { + let array_idx = self.tokenize_until(start_pos, |ch| ch.is_digit(ARRAY_IDX_BASE)); + match array_idx + .parse::() + .map_err(|e| LexerError::ParseIntError(start_pos, start_pos + array_idx.len(), e)) + { + Ok(idx) => Ok((start_pos, Token::ArrayIdx(idx), start_pos + array_idx.len())), + Err(e) => Err(e), + } + } + + fn tokenize_field_name( + &mut self, + start_pos: usize, + ) -> Spanned, usize, LexerError> { + let field_name = self.tokenize_until(start_pos, is_air_alphanumeric); + + Ok(( + start_pos, + Token::FieldName(field_name), + start_pos + field_name.len(), + )) + } + + fn tokenize_until( + &mut self, + start_pos: usize, + condition: impl Fn(char) -> bool, + ) -> &'input str { + let mut end_pos = start_pos; + while let Some((pos, ch)) = self.chars.peek() { + if !condition(*ch) { + break; + } + end_pos = *pos; + self.chars.next(); + } + + &self.input[start_pos..end_pos + 1] + } +} diff --git a/crates/air-lib/lambda/parser/src/parser/lexer/errors.rs b/crates/air-lib/lambda/parser/src/parser/lexer/errors.rs new file mode 100644 index 00000000..7ca1bdde --- /dev/null +++ b/crates/air-lib/lambda/parser/src/parser/lexer/errors.rs @@ -0,0 +1,28 @@ +/* + * Copyright 2021 Fluence Labs Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +use thiserror::Error as ThisError; + +use std::num::ParseIntError; + +#[derive(ThisError, Debug, Clone, PartialEq, Eq)] +pub enum LexerError { + #[error("unexpected symbol for value algebra")] + UnexpectedSymbol(usize, usize), + + #[error("{2}")] + ParseIntError(usize, usize, #[source] ParseIntError), +} diff --git a/crates/air-lib/lambda/parser/src/parser/lexer/mod.rs b/crates/air-lib/lambda/parser/src/parser/lexer/mod.rs new file mode 100644 index 00000000..2eb95cf2 --- /dev/null +++ b/crates/air-lib/lambda/parser/src/parser/lexer/mod.rs @@ -0,0 +1,29 @@ +/* + * Copyright 2021 Fluence Labs Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +mod algebra_lexer; +mod errors; +mod token; +mod utils; + +#[cfg(test)] +mod tests; + +pub use algebra_lexer::AlgebraLexer; +pub use errors::LexerError; +pub use token::Token; + +pub(self) use utils::is_air_alphanumeric; diff --git a/crates/air-lib/lambda/parser/src/parser/lexer/tests.rs b/crates/air-lib/lambda/parser/src/parser/lexer/tests.rs new file mode 100644 index 00000000..df64f21d --- /dev/null +++ b/crates/air-lib/lambda/parser/src/parser/lexer/tests.rs @@ -0,0 +1,52 @@ +/* + * Copyright 2021 Fluence Labs Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +use super::algebra_lexer::Spanned; +use super::AlgebraLexer; +use super::LexerError; +use super::Token; + +fn run_lexer(input: &str) -> Vec, usize, LexerError>> { + let lexer = AlgebraLexer::new(input); + lexer.collect() +} + +#[test] +fn array_access() { + let array_access: &str = ".[0]"; + + let actual = run_lexer(array_access); + let expected = vec![ + Spanned::Ok((0, Token::Selector, 1)), + Spanned::Ok((1, Token::OpenSquareBracket, 2)), + Spanned::Ok((2, Token::ArrayIdx(0), 3)), + Spanned::Ok((3, Token::CloseSquareBracket, 4)), + ]; + assert_eq!(actual, expected); +} + +#[test] +fn field_access() { + let field_name = "some_field_name"; + let field_access = format!(".{}", field_name); + + let actual = run_lexer(&field_access); + let expected = vec![ + Spanned::Ok((0, Token::Selector, 1)), + Spanned::Ok((1, Token::FieldName(field_name), 1 + field_name.len())), + ]; + assert_eq!(actual, expected); +} diff --git a/crates/air-lib/lambda/parser/src/parser/lexer/token.rs b/crates/air-lib/lambda/parser/src/parser/lexer/token.rs new file mode 100644 index 00000000..750c860e --- /dev/null +++ b/crates/air-lib/lambda/parser/src/parser/lexer/token.rs @@ -0,0 +1,33 @@ +/* + * Copyright 2021 Fluence Labs Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +use serde::Deserialize; +use serde::Serialize; + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub enum Token<'input> { + // . + Selector, + + OpenSquareBracket, + CloseSquareBracket, + + ArrayIdx(u32), + FieldName(&'input str), + + // ! + FlatteningSign, +} diff --git a/crates/air-lib/lambda/parser/src/parser/lexer/utils.rs b/crates/air-lib/lambda/parser/src/parser/lexer/utils.rs new file mode 100644 index 00000000..a05abe1c --- /dev/null +++ b/crates/air-lib/lambda/parser/src/parser/lexer/utils.rs @@ -0,0 +1,20 @@ +/* + * Copyright 2021 Fluence Labs Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// TODO: decouple it to a separate crate +pub(super) fn is_air_alphanumeric(ch: char) -> bool { + ch.is_alphanumeric() || ch == '_' || ch == '-' +} diff --git a/crates/air-lib/lambda/parser/src/parser/mod.rs b/crates/air-lib/lambda/parser/src/parser/mod.rs new file mode 100644 index 00000000..2bc24999 --- /dev/null +++ b/crates/air-lib/lambda/parser/src/parser/mod.rs @@ -0,0 +1,42 @@ +/* + * Copyright 2021 Fluence Labs Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +pub mod lambda_parser; +mod lexer; + +// air is auto-generated, so exclude it from `cargo fmt -- --check` and `cargo clippy` +#[rustfmt::skip] +#[allow(clippy::all)] +mod va_lambda; + +mod errors; + +#[cfg(test)] +pub mod tests; + +pub type LambdaParserResult<'input, T> = std::result::Result>; + +pub use errors::LambdaParserError; +pub use lambda_parser::parse; +pub use lexer::AlgebraLexer; +pub use lexer::LexerError; +pub use va_lambda::LambdaParser; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct Span { + pub left: usize, + pub right: usize, +} diff --git a/crates/air-lib/lambda/parser/src/parser/tests.rs b/crates/air-lib/lambda/parser/src/parser/tests.rs new file mode 100644 index 00000000..a556821b --- /dev/null +++ b/crates/air-lib/lambda/parser/src/parser/tests.rs @@ -0,0 +1,134 @@ +/* + * Copyright 2021 Fluence Labs Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +use crate::parser::LambdaParser; +use crate::ValueAccessor; + +thread_local!(static TEST_PARSER: LambdaParser = LambdaParser::new()); + +fn parse(source_code: &str) -> Vec> { + TEST_PARSER.with(|parser| { + let mut errors = Vec::new(); + let lexer = crate::parser::AlgebraLexer::new(source_code); + parser + .parse(source_code, &mut errors, lexer) + .expect("parsing should be successful") + }) +} + +#[test] +fn field_access() { + let field_name = "some_field_name"; + let lambda = format!(".{}", field_name); + + let actual = parse(&lambda); + let expected = vec![ValueAccessor::FieldAccess { field_name }]; + assert_eq!(actual, expected); +} + +#[test] +fn field_access_with_flattening() { + let field_name = "some_field_name"; + let lambda = format!(".{}!", field_name); + + let actual = parse(&lambda); + let expected = vec![ValueAccessor::FieldAccess { field_name }]; + assert_eq!(actual, expected); +} + +#[test] +fn array_access() { + let idx = 0; + let lambda = format!(".[{}]", idx); + + let actual = parse(&lambda); + let expected = vec![ValueAccessor::ArrayAccess { idx }]; + assert_eq!(actual, expected); +} + +#[test] +fn array_access_with_flattening() { + let idx = 0; + let lambda = format!(".[{}]!", idx); + + let actual = parse(&lambda); + let expected = vec![ValueAccessor::ArrayAccess { idx }]; + assert_eq!(actual, expected); +} + +#[test] +fn field_array_access() { + let field_name = "some_field_name"; + let idx = 1; + let lambda = format!(".{}.[{}]", field_name, idx); + + let actual = parse(&lambda); + let expected = vec![ + ValueAccessor::FieldAccess { field_name }, + ValueAccessor::ArrayAccess { idx }, + ]; + assert_eq!(actual, expected); +} + +#[test] +fn field_array_access_without_dot() { + let field_name = "some_field_name"; + let idx = 1; + let lambda = format!(".{}[{}]", field_name, idx); + + let actual = parse(&lambda); + let expected = vec![ + ValueAccessor::FieldAccess { field_name }, + ValueAccessor::ArrayAccess { idx }, + ]; + assert_eq!(actual, expected); +} + +#[test] +fn array_field_access() { + let field_name = "some_field_name"; + let idx = 1; + let lambda = format!(".[{}].{}", idx, field_name); + + let actual = parse(&lambda); + let expected = vec![ + ValueAccessor::ArrayAccess { idx }, + ValueAccessor::FieldAccess { field_name }, + ]; + assert_eq!(actual, expected); +} + +#[test] +fn many_array_field_access() { + let field_name_1 = "some_field_name_1"; + let field_name_2 = "some_field_name_2"; + let idx_1 = 1; + let idx_2 = u32::MAX; + let lambda = format!(".[{}].{}.[{}].{}", idx_1, field_name_1, idx_2, field_name_2); + + let actual = parse(&lambda); + let expected = vec![ + ValueAccessor::ArrayAccess { idx: idx_1 }, + ValueAccessor::FieldAccess { + field_name: field_name_1, + }, + ValueAccessor::ArrayAccess { idx: idx_2 }, + ValueAccessor::FieldAccess { + field_name: field_name_2, + }, + ]; + assert_eq!(actual, expected); +} diff --git a/crates/air-lib/lambda/parser/src/parser/va_lambda.lalrpop b/crates/air-lib/lambda/parser/src/parser/va_lambda.lalrpop new file mode 100644 index 00000000..69c1b4c5 --- /dev/null +++ b/crates/air-lib/lambda/parser/src/parser/va_lambda.lalrpop @@ -0,0 +1,39 @@ +use crate::ValueAlgebra; +use crate::parser::lexer::LexerError; +use crate::parser::lexer::Token; + +use lalrpop_util::ErrorRecovery; + +// the only thing why input matters here is just introducing lifetime for Token +grammar<'err, 'input>(input: &'input str, errors: &'err mut Vec, LexerError>>); + +pub Lambda: Vec> = => <>; + +ValueAlgebra: ValueAlgebra<'input> = { + "[" "]" => { + ValueAlgebra::ArrayAccess { idx } + }, + + "." => { + ValueAlgebra::FieldAccess { field_name } + }, + + ! => { errors.push(<>); ValueAlgebra::Error }, +} + +extern { + type Location = usize; + type Error = LexerError; + + enum Token<'input> { + "." => Token::Selector, + + "[" => Token::OpenSquareBracket, + "]" => Token::CloseSquareBracket, + + array_idx => Token::ArrayIdx(), + field_name => Token::FieldName(<&'input str>), + + "!" => Token::FlatteningSign, + } +} diff --git a/crates/air-lib/lambda/parser/src/parser/va_lambda.rs b/crates/air-lib/lambda/parser/src/parser/va_lambda.rs new file mode 100644 index 00000000..c6cb5b7f --- /dev/null +++ b/crates/air-lib/lambda/parser/src/parser/va_lambda.rs @@ -0,0 +1,1495 @@ +// auto-generated: "lalrpop 0.19.6" +// sha3: db7b23bede3d6914e4bbadd2b1df6f44d50b164a9c2ed557274f415fc6b1d5 +use crate::ValueAccessor; +use crate::parser::lexer::LexerError; +use crate::parser::lexer::Token; +use lalrpop_util::ErrorRecovery; +#[allow(unused_extern_crates)] +extern crate lalrpop_util as __lalrpop_util; +#[allow(unused_imports)] +use self::__lalrpop_util::state_machine as __state_machine; +extern crate core; +extern crate alloc; + +#[cfg_attr(rustfmt, rustfmt_skip)] +mod __parse__Lambda { + #![allow(non_snake_case, non_camel_case_types, unused_mut, unused_variables, unused_imports, unused_parens)] + + use crate::ValueAccessor; + use crate::parser::lexer::LexerError; + use crate::parser::lexer::Token; + use lalrpop_util::ErrorRecovery; + #[allow(unused_extern_crates)] + extern crate lalrpop_util as __lalrpop_util; + #[allow(unused_imports)] + use self::__lalrpop_util::state_machine as __state_machine; + extern crate core; + extern crate alloc; + use super::__ToTriple; + #[allow(dead_code)] + pub(crate) enum __Symbol<'input> + { + Variant0(Token<'input>), + Variant1(u32), + Variant2(&'input str), + Variant3(__lalrpop_util::ErrorRecovery, LexerError>), + Variant4(core::option::Option>), + Variant5(Vec>), + Variant6(ValueAccessor<'input>), + Variant7(alloc::vec::Vec>), + } + const __ACTION: &[i8] = &[ + // State 0 + 0, 5, 6, 0, 0, 0, 7, + // State 1 + 0, 5, 6, 0, 0, 0, 7, + // State 2 + 0, 0, 0, 0, 0, 0, 0, + // State 3 + 0, -16, -16, 0, 0, 0, -16, + // State 4 + 0, 0, 9, 0, 0, 10, 0, + // State 5 + 0, 0, 0, 0, 11, 0, 0, + // State 6 + 0, -13, -13, 0, 0, 0, -13, + // State 7 + 0, -17, -17, 0, 0, 0, -17, + // State 8 + 0, 0, 0, 0, 12, 0, 0, + // State 9 + 13, -12, -12, 0, 0, 0, -12, + // State 10 + 0, 0, 0, 14, 0, 0, 0, + // State 11 + 0, 0, 0, 15, 0, 0, 0, + // State 12 + 0, -11, -11, 0, 0, 0, -11, + // State 13 + 16, -10, -10, 0, 0, 0, -10, + // State 14 + 17, -9, -9, 0, 0, 0, -9, + // State 15 + 0, -8, -8, 0, 0, 0, -8, + // State 16 + 0, -7, -7, 0, 0, 0, -7, + ]; + fn __action(state: i8, integer: usize) -> i8 { + __ACTION[(state as usize) * 7 + integer] + } + const __EOF_ACTION: &[i8] = &[ + // State 0 + -5, + // State 1 + -6, + // State 2 + -18, + // State 3 + -16, + // State 4 + 0, + // State 5 + 0, + // State 6 + -13, + // State 7 + -17, + // State 8 + 0, + // State 9 + -12, + // State 10 + 0, + // State 11 + 0, + // State 12 + -11, + // State 13 + -10, + // State 14 + -9, + // State 15 + -8, + // State 16 + -7, + ]; + fn __goto(state: i8, nt: usize) -> i8 { + match nt { + 2 => 2, + 3 => match state { + 1 => 7, + _ => 3, + }, + 5 => 1, + _ => 0, + } + } + fn __expected_tokens(__state: i8) -> alloc::vec::Vec { + const __TERMINAL: &[&str] = &[ + r###""!""###, + r###"".""###, + r###""[""###, + r###""]""###, + r###"array_idx"###, + r###"field_name"###, + ]; + __TERMINAL.iter().enumerate().filter_map(|(index, terminal)| { + let next_state = __action(__state, index); + if next_state == 0 { + None + } else { + Some(alloc::string::ToString::to_string(terminal)) + } + }).collect() + } + pub(crate) struct __StateMachine<'err, 'input> + where 'input: 'err + { + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __phantom: core::marker::PhantomData<(&'err (), &'input ())>, + } + impl<'err, 'input> __state_machine::ParserDefinition for __StateMachine<'err, 'input> + where 'input: 'err + { + type Location = usize; + type Error = LexerError; + type Token = Token<'input>; + type TokenIndex = usize; + type Symbol = __Symbol<'input>; + type Success = Vec>; + type StateIndex = i8; + type Action = i8; + type ReduceIndex = i8; + type NonterminalIndex = usize; + + #[inline] + fn start_location(&self) -> Self::Location { + Default::default() + } + + #[inline] + fn start_state(&self) -> Self::StateIndex { + 0 + } + + #[inline] + fn token_to_index(&self, token: &Self::Token) -> Option { + __token_to_integer(token, core::marker::PhantomData::<(&(), &())>) + } + + #[inline] + fn action(&self, state: i8, integer: usize) -> i8 { + __action(state, integer) + } + + #[inline] + fn error_action(&self, state: i8) -> i8 { + __action(state, 7 - 1) + } + + #[inline] + fn eof_action(&self, state: i8) -> i8 { + __EOF_ACTION[state as usize] + } + + #[inline] + fn goto(&self, state: i8, nt: usize) -> i8 { + __goto(state, nt) + } + + fn token_to_symbol(&self, token_index: usize, token: Self::Token) -> Self::Symbol { + __token_to_symbol(token_index, token, core::marker::PhantomData::<(&(), &())>) + } + + fn expected_tokens(&self, state: i8) -> alloc::vec::Vec { + __expected_tokens(state) + } + + #[inline] + fn uses_error_recovery(&self) -> bool { + true + } + + #[inline] + fn error_recovery_symbol( + &self, + recovery: __state_machine::ErrorRecovery, + ) -> Self::Symbol { + __Symbol::Variant3(recovery) + } + + fn reduce( + &mut self, + action: i8, + start_location: Option<&Self::Location>, + states: &mut alloc::vec::Vec, + symbols: &mut alloc::vec::Vec<__state_machine::SymbolTriple>, + ) -> Option<__state_machine::ParseResult> { + __reduce( + self.input, + self.errors, + action, + start_location, + states, + symbols, + core::marker::PhantomData::<(&(), &())>, + ) + } + + fn simulate_reduce(&self, action: i8) -> __state_machine::SimulatedReduce { + __simulate_reduce(action, core::marker::PhantomData::<(&(), &())>) + } + } + fn __token_to_integer< + 'err, + 'input, + >( + __token: &Token<'input>, + _: core::marker::PhantomData<(&'err (), &'input ())>, + ) -> Option + { + match *__token { + Token::FlatteningSign if true => Some(0), + Token::Selector if true => Some(1), + Token::OpenSquareBracket if true => Some(2), + Token::CloseSquareBracket if true => Some(3), + Token::ArrayIdx(_) if true => Some(4), + Token::FieldName(_) if true => Some(5), + _ => None, + } + } + fn __token_to_symbol< + 'err, + 'input, + >( + __token_index: usize, + __token: Token<'input>, + _: core::marker::PhantomData<(&'err (), &'input ())>, + ) -> __Symbol<'input> + { + match __token_index { + 0 | 1 | 2 | 3 => __Symbol::Variant0(__token), + 4 => match __token { + Token::ArrayIdx(__tok0) if true => __Symbol::Variant1(__tok0), + _ => unreachable!(), + }, + 5 => match __token { + Token::FieldName(__tok0) if true => __Symbol::Variant2(__tok0), + _ => unreachable!(), + }, + _ => unreachable!(), + } + } + fn __simulate_reduce< + 'err, + 'input, + >( + __reduce_index: i8, + _: core::marker::PhantomData<(&'err (), &'input ())>, + ) -> __state_machine::SimulatedReduce<__StateMachine<'err, 'input>> + where + 'input: 'err, + { + match __reduce_index { + 0 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 0, + } + } + 1 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 0, + nonterminal_produced: 0, + } + } + 2 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 1, + } + } + 3 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 0, + nonterminal_produced: 1, + } + } + 4 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 0, + nonterminal_produced: 2, + } + } + 5 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 2, + } + } + 6 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 5, + nonterminal_produced: 3, + } + } + 7 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 4, + nonterminal_produced: 3, + } + } + 8 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 4, + nonterminal_produced: 3, + } + } + 9 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 3, + nonterminal_produced: 3, + } + } + 10 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 3, + nonterminal_produced: 3, + } + } + 11 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 3, + } + } + 12 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 3, + } + } + 13 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 0, + nonterminal_produced: 4, + } + } + 14 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 4, + } + } + 15 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 5, + } + } + 16 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 5, + } + } + 17 => __state_machine::SimulatedReduce::Accept, + _ => panic!("invalid reduction index {}", __reduce_index) + } + } + pub struct LambdaParser { + _priv: (), + } + + impl LambdaParser { + pub fn new() -> LambdaParser { + LambdaParser { + _priv: (), + } + } + + #[allow(dead_code)] + pub fn parse< + 'err, + 'input, + __TOKEN: __ToTriple<'err, 'input, >, + __TOKENS: IntoIterator, + >( + &self, + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __tokens0: __TOKENS, + ) -> Result>, __lalrpop_util::ParseError, LexerError>> + { + let __tokens = __tokens0.into_iter(); + let mut __tokens = __tokens.map(|t| __ToTriple::to_triple(t)); + __state_machine::Parser::drive( + __StateMachine { + input, + errors, + __phantom: core::marker::PhantomData::<(&(), &())>, + }, + __tokens, + ) + } + } + fn __accepts< + 'err, + 'input, + >( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __error_state: i8, + __states: & [i8], + __opt_integer: Option, + _: core::marker::PhantomData<(&'err (), &'input ())>, + ) -> bool + { + let mut __states = __states.to_vec(); + __states.push(__error_state); + loop { + let mut __states_len = __states.len(); + let __top = __states[__states_len - 1]; + let __action = match __opt_integer { + None => __EOF_ACTION[__top as usize], + Some(__integer) => __action(__top, __integer), + }; + if __action == 0 { return false; } + if __action > 0 { return true; } + let (__to_pop, __nt) = match __simulate_reduce(-(__action + 1), core::marker::PhantomData::<(&(), &())>) { + __state_machine::SimulatedReduce::Reduce { + states_to_pop, nonterminal_produced + } => (states_to_pop, nonterminal_produced), + __state_machine::SimulatedReduce::Accept => return true, + }; + __states_len -= __to_pop; + __states.truncate(__states_len); + let __top = __states[__states_len - 1]; + let __next_state = __goto(__top, __nt); + __states.push(__next_state); + } + } + pub(crate) fn __reduce< + 'err, + 'input, + >( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __action: i8, + __lookahead_start: Option<&usize>, + __states: &mut alloc::vec::Vec, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)>, + _: core::marker::PhantomData<(&'err (), &'input ())>, + ) -> Option>,__lalrpop_util::ParseError, LexerError>>> + { + let (__pop_states, __nonterminal) = match __action { + 0 => { + __reduce0(input, errors, __lookahead_start, __symbols, core::marker::PhantomData::<(&(), &())>) + } + 1 => { + __reduce1(input, errors, __lookahead_start, __symbols, core::marker::PhantomData::<(&(), &())>) + } + 2 => { + __reduce2(input, errors, __lookahead_start, __symbols, core::marker::PhantomData::<(&(), &())>) + } + 3 => { + __reduce3(input, errors, __lookahead_start, __symbols, core::marker::PhantomData::<(&(), &())>) + } + 4 => { + __reduce4(input, errors, __lookahead_start, __symbols, core::marker::PhantomData::<(&(), &())>) + } + 5 => { + __reduce5(input, errors, __lookahead_start, __symbols, core::marker::PhantomData::<(&(), &())>) + } + 6 => { + __reduce6(input, errors, __lookahead_start, __symbols, core::marker::PhantomData::<(&(), &())>) + } + 7 => { + __reduce7(input, errors, __lookahead_start, __symbols, core::marker::PhantomData::<(&(), &())>) + } + 8 => { + __reduce8(input, errors, __lookahead_start, __symbols, core::marker::PhantomData::<(&(), &())>) + } + 9 => { + __reduce9(input, errors, __lookahead_start, __symbols, core::marker::PhantomData::<(&(), &())>) + } + 10 => { + __reduce10(input, errors, __lookahead_start, __symbols, core::marker::PhantomData::<(&(), &())>) + } + 11 => { + __reduce11(input, errors, __lookahead_start, __symbols, core::marker::PhantomData::<(&(), &())>) + } + 12 => { + __reduce12(input, errors, __lookahead_start, __symbols, core::marker::PhantomData::<(&(), &())>) + } + 13 => { + __reduce13(input, errors, __lookahead_start, __symbols, core::marker::PhantomData::<(&(), &())>) + } + 14 => { + __reduce14(input, errors, __lookahead_start, __symbols, core::marker::PhantomData::<(&(), &())>) + } + 15 => { + __reduce15(input, errors, __lookahead_start, __symbols, core::marker::PhantomData::<(&(), &())>) + } + 16 => { + __reduce16(input, errors, __lookahead_start, __symbols, core::marker::PhantomData::<(&(), &())>) + } + 17 => { + // __Lambda = Lambda => ActionFn(0); + let __sym0 = __pop_Variant5(__symbols); + let __start = __sym0.0.clone(); + let __end = __sym0.2.clone(); + let __nt = super::__action0::<>(input, errors, __sym0); + return Some(Ok(__nt)); + } + _ => panic!("invalid action code {}", __action) + }; + let __states_len = __states.len(); + __states.truncate(__states_len - __pop_states); + let __state = *__states.last().unwrap(); + let __next_state = __goto(__state, __nonterminal); + __states.push(__next_state); + None + } + #[inline(never)] + fn __symbol_type_mismatch() -> ! { + panic!("symbol type mismatch") + } + fn __pop_Variant0< + 'input, + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)> + ) -> (usize, Token<'input>, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant0(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant6< + 'input, + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)> + ) -> (usize, ValueAccessor<'input>, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant6(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant5< + 'input, + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)> + ) -> (usize, Vec>, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant5(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant3< + 'input, + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)> + ) -> (usize, __lalrpop_util::ErrorRecovery, LexerError>, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant3(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant7< + 'input, + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)> + ) -> (usize, alloc::vec::Vec>, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant7(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant4< + 'input, + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)> + ) -> (usize, core::option::Option>, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant4(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant1< + 'input, + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)> + ) -> (usize, u32, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant1(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant2< + 'input, + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)> + ) -> (usize, &'input str, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant2(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + pub(crate) fn __reduce0< + 'err, + 'input, + >( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)>, + _: core::marker::PhantomData<(&'err (), &'input ())>, + ) -> (usize, usize) + { + // "!"? = "!" => ActionFn(5); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0.clone(); + let __end = __sym0.2.clone(); + let __nt = super::__action5::<>(input, errors, __sym0); + __symbols.push((__start, __Symbol::Variant4(__nt), __end)); + (1, 0) + } + pub(crate) fn __reduce1< + 'err, + 'input, + >( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)>, + _: core::marker::PhantomData<(&'err (), &'input ())>, + ) -> (usize, usize) + { + // "!"? = => ActionFn(6); + let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2.clone())).unwrap_or_default(); + let __end = __start.clone(); + let __nt = super::__action6::<>(input, errors, &__start, &__end); + __symbols.push((__start, __Symbol::Variant4(__nt), __end)); + (0, 0) + } + pub(crate) fn __reduce2< + 'err, + 'input, + >( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)>, + _: core::marker::PhantomData<(&'err (), &'input ())>, + ) -> (usize, usize) + { + // "."? = "." => ActionFn(7); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0.clone(); + let __end = __sym0.2.clone(); + let __nt = super::__action7::<>(input, errors, __sym0); + __symbols.push((__start, __Symbol::Variant4(__nt), __end)); + (1, 1) + } + pub(crate) fn __reduce3< + 'err, + 'input, + >( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)>, + _: core::marker::PhantomData<(&'err (), &'input ())>, + ) -> (usize, usize) + { + // "."? = => ActionFn(8); + let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2.clone())).unwrap_or_default(); + let __end = __start.clone(); + let __nt = super::__action8::<>(input, errors, &__start, &__end); + __symbols.push((__start, __Symbol::Variant4(__nt), __end)); + (0, 1) + } + pub(crate) fn __reduce4< + 'err, + 'input, + >( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)>, + _: core::marker::PhantomData<(&'err (), &'input ())>, + ) -> (usize, usize) + { + // Lambda = => ActionFn(21); + let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2.clone())).unwrap_or_default(); + let __end = __start.clone(); + let __nt = super::__action21::<>(input, errors, &__start, &__end); + __symbols.push((__start, __Symbol::Variant5(__nt), __end)); + (0, 2) + } + pub(crate) fn __reduce5< + 'err, + 'input, + >( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)>, + _: core::marker::PhantomData<(&'err (), &'input ())>, + ) -> (usize, usize) + { + // Lambda = ValueAccessor+ => ActionFn(22); + let __sym0 = __pop_Variant7(__symbols); + let __start = __sym0.0.clone(); + let __end = __sym0.2.clone(); + let __nt = super::__action22::<>(input, errors, __sym0); + __symbols.push((__start, __Symbol::Variant5(__nt), __end)); + (1, 2) + } + pub(crate) fn __reduce6< + 'err, + 'input, + >( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)>, + _: core::marker::PhantomData<(&'err (), &'input ())>, + ) -> (usize, usize) + { + // ValueAccessor = ".", "[", array_idx, "]", "!" => ActionFn(17); + assert!(__symbols.len() >= 5); + let __sym4 = __pop_Variant0(__symbols); + let __sym3 = __pop_Variant0(__symbols); + let __sym2 = __pop_Variant1(__symbols); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0.clone(); + let __end = __sym4.2.clone(); + let __nt = super::__action17::<>(input, errors, __sym0, __sym1, __sym2, __sym3, __sym4); + __symbols.push((__start, __Symbol::Variant6(__nt), __end)); + (5, 3) + } + pub(crate) fn __reduce7< + 'err, + 'input, + >( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)>, + _: core::marker::PhantomData<(&'err (), &'input ())>, + ) -> (usize, usize) + { + // ValueAccessor = "[", array_idx, "]", "!" => ActionFn(18); + assert!(__symbols.len() >= 4); + let __sym3 = __pop_Variant0(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0.clone(); + let __end = __sym3.2.clone(); + let __nt = super::__action18::<>(input, errors, __sym0, __sym1, __sym2, __sym3); + __symbols.push((__start, __Symbol::Variant6(__nt), __end)); + (4, 3) + } + pub(crate) fn __reduce8< + 'err, + 'input, + >( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)>, + _: core::marker::PhantomData<(&'err (), &'input ())>, + ) -> (usize, usize) + { + // ValueAccessor = ".", "[", array_idx, "]" => ActionFn(19); + assert!(__symbols.len() >= 4); + let __sym3 = __pop_Variant0(__symbols); + let __sym2 = __pop_Variant1(__symbols); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0.clone(); + let __end = __sym3.2.clone(); + let __nt = super::__action19::<>(input, errors, __sym0, __sym1, __sym2, __sym3); + __symbols.push((__start, __Symbol::Variant6(__nt), __end)); + (4, 3) + } + pub(crate) fn __reduce9< + 'err, + 'input, + >( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)>, + _: core::marker::PhantomData<(&'err (), &'input ())>, + ) -> (usize, usize) + { + // ValueAccessor = "[", array_idx, "]" => ActionFn(20); + assert!(__symbols.len() >= 3); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0.clone(); + let __end = __sym2.2.clone(); + let __nt = super::__action20::<>(input, errors, __sym0, __sym1, __sym2); + __symbols.push((__start, __Symbol::Variant6(__nt), __end)); + (3, 3) + } + pub(crate) fn __reduce10< + 'err, + 'input, + >( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)>, + _: core::marker::PhantomData<(&'err (), &'input ())>, + ) -> (usize, usize) + { + // ValueAccessor = ".", field_name, "!" => ActionFn(15); + assert!(__symbols.len() >= 3); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant2(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0.clone(); + let __end = __sym2.2.clone(); + let __nt = super::__action15::<>(input, errors, __sym0, __sym1, __sym2); + __symbols.push((__start, __Symbol::Variant6(__nt), __end)); + (3, 3) + } + pub(crate) fn __reduce11< + 'err, + 'input, + >( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)>, + _: core::marker::PhantomData<(&'err (), &'input ())>, + ) -> (usize, usize) + { + // ValueAccessor = ".", field_name => ActionFn(16); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant2(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0.clone(); + let __end = __sym1.2.clone(); + let __nt = super::__action16::<>(input, errors, __sym0, __sym1); + __symbols.push((__start, __Symbol::Variant6(__nt), __end)); + (2, 3) + } + pub(crate) fn __reduce12< + 'err, + 'input, + >( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)>, + _: core::marker::PhantomData<(&'err (), &'input ())>, + ) -> (usize, usize) + { + // ValueAccessor = error => ActionFn(4); + let __sym0 = __pop_Variant3(__symbols); + let __start = __sym0.0.clone(); + let __end = __sym0.2.clone(); + let __nt = super::__action4::<>(input, errors, __sym0); + __symbols.push((__start, __Symbol::Variant6(__nt), __end)); + (1, 3) + } + pub(crate) fn __reduce13< + 'err, + 'input, + >( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)>, + _: core::marker::PhantomData<(&'err (), &'input ())>, + ) -> (usize, usize) + { + // ValueAccessor* = => ActionFn(9); + let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2.clone())).unwrap_or_default(); + let __end = __start.clone(); + let __nt = super::__action9::<>(input, errors, &__start, &__end); + __symbols.push((__start, __Symbol::Variant7(__nt), __end)); + (0, 4) + } + pub(crate) fn __reduce14< + 'err, + 'input, + >( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)>, + _: core::marker::PhantomData<(&'err (), &'input ())>, + ) -> (usize, usize) + { + // ValueAccessor* = ValueAccessor+ => ActionFn(10); + let __sym0 = __pop_Variant7(__symbols); + let __start = __sym0.0.clone(); + let __end = __sym0.2.clone(); + let __nt = super::__action10::<>(input, errors, __sym0); + __symbols.push((__start, __Symbol::Variant7(__nt), __end)); + (1, 4) + } + pub(crate) fn __reduce15< + 'err, + 'input, + >( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)>, + _: core::marker::PhantomData<(&'err (), &'input ())>, + ) -> (usize, usize) + { + // ValueAccessor+ = ValueAccessor => ActionFn(11); + let __sym0 = __pop_Variant6(__symbols); + let __start = __sym0.0.clone(); + let __end = __sym0.2.clone(); + let __nt = super::__action11::<>(input, errors, __sym0); + __symbols.push((__start, __Symbol::Variant7(__nt), __end)); + (1, 5) + } + pub(crate) fn __reduce16< + 'err, + 'input, + >( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<'input>,usize)>, + _: core::marker::PhantomData<(&'err (), &'input ())>, + ) -> (usize, usize) + { + // ValueAccessor+ = ValueAccessor+, ValueAccessor => ActionFn(12); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant6(__symbols); + let __sym0 = __pop_Variant7(__symbols); + let __start = __sym0.0.clone(); + let __end = __sym1.2.clone(); + let __nt = super::__action12::<>(input, errors, __sym0, __sym1); + __symbols.push((__start, __Symbol::Variant7(__nt), __end)); + (2, 5) + } +} +pub use self::__parse__Lambda::LambdaParser; + +#[allow(unused_variables)] +fn __action0< + 'err, + 'input, +>( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + (_, __0, _): (usize, Vec>, usize), +) -> Vec> +{ + __0 +} + +#[allow(unused_variables)] +fn __action1< + 'err, + 'input, +>( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + (_, __0, _): (usize, alloc::vec::Vec>, usize), +) -> Vec> +{ + __0 +} + +#[allow(unused_variables)] +fn __action2< + 'err, + 'input, +>( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + (_, maybe_dot_selector, _): (usize, core::option::Option>, usize), + (_, _, _): (usize, Token<'input>, usize), + (_, idx, _): (usize, u32, usize), + (_, _, _): (usize, Token<'input>, usize), + (_, maybe_flatten_sign, _): (usize, core::option::Option>, usize), +) -> ValueAccessor<'input> +{ + { + ValueAccessor::ArrayAccess { idx } + } +} + +#[allow(unused_variables)] +fn __action3< + 'err, + 'input, +>( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + (_, _, _): (usize, Token<'input>, usize), + (_, field_name, _): (usize, &'input str, usize), + (_, maybe_flatten_sign, _): (usize, core::option::Option>, usize), +) -> ValueAccessor<'input> +{ + { + ValueAccessor::FieldAccess { field_name } + } +} + +#[allow(unused_variables)] +fn __action4< + 'err, + 'input, +>( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + (_, __0, _): (usize, __lalrpop_util::ErrorRecovery, LexerError>, usize), +) -> ValueAccessor<'input> +{ + { errors.push(__0); ValueAccessor::Error } +} + +#[allow(unused_variables)] +fn __action5< + 'err, + 'input, +>( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + (_, __0, _): (usize, Token<'input>, usize), +) -> core::option::Option> +{ + Some(__0) +} + +#[allow(unused_variables)] +fn __action6< + 'err, + 'input, +>( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __lookbehind: &usize, + __lookahead: &usize, +) -> core::option::Option> +{ + None +} + +#[allow(unused_variables)] +fn __action7< + 'err, + 'input, +>( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + (_, __0, _): (usize, Token<'input>, usize), +) -> core::option::Option> +{ + Some(__0) +} + +#[allow(unused_variables)] +fn __action8< + 'err, + 'input, +>( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __lookbehind: &usize, + __lookahead: &usize, +) -> core::option::Option> +{ + None +} + +#[allow(unused_variables)] +fn __action9< + 'err, + 'input, +>( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __lookbehind: &usize, + __lookahead: &usize, +) -> alloc::vec::Vec> +{ + alloc::vec![] +} + +#[allow(unused_variables)] +fn __action10< + 'err, + 'input, +>( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + (_, v, _): (usize, alloc::vec::Vec>, usize), +) -> alloc::vec::Vec> +{ + v +} + +#[allow(unused_variables)] +fn __action11< + 'err, + 'input, +>( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + (_, __0, _): (usize, ValueAccessor<'input>, usize), +) -> alloc::vec::Vec> +{ + alloc::vec![__0] +} + +#[allow(unused_variables)] +fn __action12< + 'err, + 'input, +>( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + (_, v, _): (usize, alloc::vec::Vec>, usize), + (_, e, _): (usize, ValueAccessor<'input>, usize), +) -> alloc::vec::Vec> +{ + { let mut v = v; v.push(e); v } +} + +#[allow(unused_variables)] +fn __action13< + 'err, + 'input, +>( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __0: (usize, core::option::Option>, usize), + __1: (usize, Token<'input>, usize), + __2: (usize, u32, usize), + __3: (usize, Token<'input>, usize), + __4: (usize, Token<'input>, usize), +) -> ValueAccessor<'input> +{ + let __start0 = __4.0.clone(); + let __end0 = __4.2.clone(); + let __temp0 = __action5( + input, + errors, + __4, + ); + let __temp0 = (__start0, __temp0, __end0); + __action2( + input, + errors, + __0, + __1, + __2, + __3, + __temp0, + ) +} + +#[allow(unused_variables)] +fn __action14< + 'err, + 'input, +>( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __0: (usize, core::option::Option>, usize), + __1: (usize, Token<'input>, usize), + __2: (usize, u32, usize), + __3: (usize, Token<'input>, usize), +) -> ValueAccessor<'input> +{ + let __start0 = __3.2.clone(); + let __end0 = __3.2.clone(); + let __temp0 = __action6( + input, + errors, + &__start0, + &__end0, + ); + let __temp0 = (__start0, __temp0, __end0); + __action2( + input, + errors, + __0, + __1, + __2, + __3, + __temp0, + ) +} + +#[allow(unused_variables)] +fn __action15< + 'err, + 'input, +>( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __0: (usize, Token<'input>, usize), + __1: (usize, &'input str, usize), + __2: (usize, Token<'input>, usize), +) -> ValueAccessor<'input> +{ + let __start0 = __2.0.clone(); + let __end0 = __2.2.clone(); + let __temp0 = __action5( + input, + errors, + __2, + ); + let __temp0 = (__start0, __temp0, __end0); + __action3( + input, + errors, + __0, + __1, + __temp0, + ) +} + +#[allow(unused_variables)] +fn __action16< + 'err, + 'input, +>( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __0: (usize, Token<'input>, usize), + __1: (usize, &'input str, usize), +) -> ValueAccessor<'input> +{ + let __start0 = __1.2.clone(); + let __end0 = __1.2.clone(); + let __temp0 = __action6( + input, + errors, + &__start0, + &__end0, + ); + let __temp0 = (__start0, __temp0, __end0); + __action3( + input, + errors, + __0, + __1, + __temp0, + ) +} + +#[allow(unused_variables)] +fn __action17< + 'err, + 'input, +>( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __0: (usize, Token<'input>, usize), + __1: (usize, Token<'input>, usize), + __2: (usize, u32, usize), + __3: (usize, Token<'input>, usize), + __4: (usize, Token<'input>, usize), +) -> ValueAccessor<'input> +{ + let __start0 = __0.0.clone(); + let __end0 = __0.2.clone(); + let __temp0 = __action7( + input, + errors, + __0, + ); + let __temp0 = (__start0, __temp0, __end0); + __action13( + input, + errors, + __temp0, + __1, + __2, + __3, + __4, + ) +} + +#[allow(unused_variables)] +fn __action18< + 'err, + 'input, +>( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __0: (usize, Token<'input>, usize), + __1: (usize, u32, usize), + __2: (usize, Token<'input>, usize), + __3: (usize, Token<'input>, usize), +) -> ValueAccessor<'input> +{ + let __start0 = __0.0.clone(); + let __end0 = __0.0.clone(); + let __temp0 = __action8( + input, + errors, + &__start0, + &__end0, + ); + let __temp0 = (__start0, __temp0, __end0); + __action13( + input, + errors, + __temp0, + __0, + __1, + __2, + __3, + ) +} + +#[allow(unused_variables)] +fn __action19< + 'err, + 'input, +>( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __0: (usize, Token<'input>, usize), + __1: (usize, Token<'input>, usize), + __2: (usize, u32, usize), + __3: (usize, Token<'input>, usize), +) -> ValueAccessor<'input> +{ + let __start0 = __0.0.clone(); + let __end0 = __0.2.clone(); + let __temp0 = __action7( + input, + errors, + __0, + ); + let __temp0 = (__start0, __temp0, __end0); + __action14( + input, + errors, + __temp0, + __1, + __2, + __3, + ) +} + +#[allow(unused_variables)] +fn __action20< + 'err, + 'input, +>( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __0: (usize, Token<'input>, usize), + __1: (usize, u32, usize), + __2: (usize, Token<'input>, usize), +) -> ValueAccessor<'input> +{ + let __start0 = __0.0.clone(); + let __end0 = __0.0.clone(); + let __temp0 = __action8( + input, + errors, + &__start0, + &__end0, + ); + let __temp0 = (__start0, __temp0, __end0); + __action14( + input, + errors, + __temp0, + __0, + __1, + __2, + ) +} + +#[allow(unused_variables)] +fn __action21< + 'err, + 'input, +>( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __lookbehind: &usize, + __lookahead: &usize, +) -> Vec> +{ + let __start0 = __lookbehind.clone(); + let __end0 = __lookahead.clone(); + let __temp0 = __action9( + input, + errors, + &__start0, + &__end0, + ); + let __temp0 = (__start0, __temp0, __end0); + __action1( + input, + errors, + __temp0, + ) +} + +#[allow(unused_variables)] +fn __action22< + 'err, + 'input, +>( + input: &'input str, + errors: &'err mut Vec, LexerError>>, + __0: (usize, alloc::vec::Vec>, usize), +) -> Vec> +{ + let __start0 = __0.0.clone(); + let __end0 = __0.2.clone(); + let __temp0 = __action10( + input, + errors, + __0, + ); + let __temp0 = (__start0, __temp0, __end0); + __action1( + input, + errors, + __temp0, + ) +} + +pub trait __ToTriple<'err, 'input, > { + fn to_triple(value: Self) -> Result<(usize,Token<'input>,usize), __lalrpop_util::ParseError, LexerError>>; +} + +impl<'err, 'input, > __ToTriple<'err, 'input, > for (usize, Token<'input>, usize) { + fn to_triple(value: Self) -> Result<(usize,Token<'input>,usize), __lalrpop_util::ParseError, LexerError>> { + Ok(value) + } +} +impl<'err, 'input, > __ToTriple<'err, 'input, > for Result<(usize, Token<'input>, usize), LexerError> { + fn to_triple(value: Self) -> Result<(usize,Token<'input>,usize), __lalrpop_util::ParseError, LexerError>> { + match value { + Ok(v) => Ok(v), + Err(error) => Err(__lalrpop_util::ParseError::User { error }), + } + } +} diff --git a/crates/air-lib/polyplets/src/tetraplet.rs b/crates/air-lib/polyplets/src/tetraplet.rs index 087f7de8..d235000b 100644 --- a/crates/air-lib/polyplets/src/tetraplet.rs +++ b/crates/air-lib/polyplets/src/tetraplet.rs @@ -27,6 +27,7 @@ pub struct SecurityTetraplet { pub triplet: ResolvedTriplet, /// Value was produced by applying this `json_path` to the output from `call_service`. + // TODO: since it's not a json path anymore, it's needed to rename it to lambda pub json_path: String, } @@ -55,7 +56,7 @@ impl SecurityTetraplet { } } - pub fn add_json_path(&mut self, json_path: &str) { + pub fn add_lambda(&mut self, json_path: &str) { self.json_path.push_str(json_path) } }