mirror of
https://github.com/fluencelabs/aquavm
synced 2024-12-04 15:20:16 +00:00
Improve flattening (#69)
This commit is contained in:
parent
d43b5454fa
commit
13f93a0f88
71
Cargo.lock
generated
71
Cargo.lock
generated
@ -11,7 +11,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "air-parser"
|
name = "air-parser"
|
||||||
version = "0.4.0"
|
version = "0.5.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"codespan",
|
"codespan",
|
||||||
"codespan-reporting",
|
"codespan-reporting",
|
||||||
@ -79,7 +79,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aquamarine"
|
name = "aquamarine"
|
||||||
version = "0.6.0"
|
version = "0.7.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"fluence",
|
"fluence",
|
||||||
"interpreter-lib",
|
"interpreter-lib",
|
||||||
@ -240,9 +240,9 @@ checksum = "cfa8873f51c92e232f9bac4065cddef41b714152812bfc5f7672ba16d6ef8cd9"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bstr"
|
name = "bstr"
|
||||||
version = "0.2.14"
|
version = "0.2.15"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "473fc6b38233f9af7baa94fb5852dca389e3d95b8e21c8e3719301462c5d9faf"
|
checksum = "a40b47ad93e1a5404e6c18dec46b628214fee441c70f4ab5d6942142cc268a3d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"memchr",
|
"memchr",
|
||||||
@ -252,9 +252,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bumpalo"
|
name = "bumpalo"
|
||||||
version = "3.5.0"
|
version = "3.6.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f07aa6688c702439a1be0307b6a94dffe1168569e45b9500c1372bc580740d59"
|
checksum = "099e596ef14349721d9016f6b80dd3419ea1bf289ab9b44df8e4dfd3a005d5d9"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "byte-tools"
|
name = "byte-tools"
|
||||||
@ -528,9 +528,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ctor"
|
name = "ctor"
|
||||||
version = "0.1.18"
|
version = "0.1.19"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "10bcb9d7dcbf7002aaffbb53eac22906b64cdcc127971dcc387d8eb7c95d5560"
|
checksum = "e8f45d9ad417bcef4817d614a501ab55cdd96a6fdb24f49aab89a54acfd66b19"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"quote",
|
"quote",
|
||||||
"syn",
|
"syn",
|
||||||
@ -976,7 +976,7 @@ checksum = "c9495705279e7140bf035dde1f6e750c162df8b625267cd52cc44e0b156732c8"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if 1.0.0",
|
"cfg-if 1.0.0",
|
||||||
"libc",
|
"libc",
|
||||||
"wasi 0.10.1+wasi-snapshot-preview1",
|
"wasi 0.10.2+wasi-snapshot-preview1",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1073,7 +1073,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "interpreter-lib"
|
name = "interpreter-lib"
|
||||||
version = "0.6.0"
|
version = "0.7.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"air-parser",
|
"air-parser",
|
||||||
"aqua-interpreter-interface 0.3.1",
|
"aqua-interpreter-interface 0.3.1",
|
||||||
@ -1159,9 +1159,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "jsonpath_lib-fl"
|
name = "jsonpath_lib-fl"
|
||||||
version = "0.2.6"
|
version = "0.2.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "243653439f0992adf0bbf6ed5b798966fdbacd417b9dcb025b50200ec20c17ff"
|
checksum = "e81233a3c2e1f4579f1fdb856eeec115dcb23817374268212ebad691bd53e664"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"array_tool",
|
"array_tool",
|
||||||
"env_logger",
|
"env_logger",
|
||||||
@ -1216,22 +1216,22 @@ checksum = "3576a87f2ba00f6f106fdfcd16db1d698d648a26ad8e0573cad8537c3c362d2a"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lexical-core"
|
name = "lexical-core"
|
||||||
version = "0.7.4"
|
version = "0.7.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "db65c6da02e61f55dae90a0ae427b2a5f6b3e8db09f58d10efab23af92592616"
|
checksum = "21f866863575d0e1d654fbeeabdc927292fdf862873dc3c96c6f753357e13374"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrayvec",
|
"arrayvec",
|
||||||
"bitflags",
|
"bitflags",
|
||||||
"cfg-if 0.1.10",
|
"cfg-if 1.0.0",
|
||||||
"ryu",
|
"ryu",
|
||||||
"static_assertions",
|
"static_assertions",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libc"
|
name = "libc"
|
||||||
version = "0.2.83"
|
version = "0.2.86"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7eb0c4e9c72ee9d69b767adebc5f4788462a3b45624acd919475c92597bcaf4f"
|
checksum = "b7282d924be3275cec7f6756ff4121987bc6481325397dde6ba3e7802b1a8b1c"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lock_api"
|
name = "lock_api"
|
||||||
@ -1416,7 +1416,7 @@ checksum = "6d7744ac029df22dca6284efe4e898991d28e3085c706c972bcd7da4a27a15eb"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"instant",
|
"instant",
|
||||||
"lock_api 0.4.2",
|
"lock_api 0.4.2",
|
||||||
"parking_lot_core 0.8.2",
|
"parking_lot_core 0.8.3",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1428,21 +1428,21 @@ dependencies = [
|
|||||||
"cfg-if 0.1.10",
|
"cfg-if 0.1.10",
|
||||||
"cloudabi",
|
"cloudabi",
|
||||||
"libc",
|
"libc",
|
||||||
"redox_syscall",
|
"redox_syscall 0.1.57",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
"winapi",
|
"winapi",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "parking_lot_core"
|
name = "parking_lot_core"
|
||||||
version = "0.8.2"
|
version = "0.8.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9ccb628cad4f84851442432c60ad8e1f607e29752d0bf072cbd0baf28aa34272"
|
checksum = "fa7a782938e745763fe6907fc6ba86946d72f49fe7e21de074e08128a99fb018"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if 1.0.0",
|
"cfg-if 1.0.0",
|
||||||
"instant",
|
"instant",
|
||||||
"libc",
|
"libc",
|
||||||
"redox_syscall",
|
"redox_syscall 0.2.5",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
"winapi",
|
"winapi",
|
||||||
]
|
]
|
||||||
@ -1554,9 +1554,9 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "quote"
|
name = "quote"
|
||||||
version = "1.0.8"
|
version = "1.0.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "991431c3519a3f36861882da93630ce66b52918dcf1b8e2fd66b397fc96f28df"
|
checksum = "c3d0b9745dc2debf507c8422de05d7226cc1f0644216dfdfead988f9b1ab32a7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
]
|
]
|
||||||
@ -1603,6 +1603,15 @@ version = "0.1.57"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce"
|
checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "redox_syscall"
|
||||||
|
version = "0.2.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "94341e4e44e24f6b591b59e47a8a027df12e008d73fd5672dbea9cc22f4507d9"
|
||||||
|
dependencies = [
|
||||||
|
"bitflags",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "redox_users"
|
name = "redox_users"
|
||||||
version = "0.3.5"
|
version = "0.3.5"
|
||||||
@ -1610,7 +1619,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "de0737333e7a9502c789a36d7c7fa6092a49895d4faa31ca5df163857ded2e9d"
|
checksum = "de0737333e7a9502c789a36d7c7fa6092a49895d4faa31ca5df163857ded2e9d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"getrandom 0.1.16",
|
"getrandom 0.1.16",
|
||||||
"redox_syscall",
|
"redox_syscall 0.1.57",
|
||||||
"rust-argon2",
|
"rust-argon2",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -1670,9 +1679,9 @@ checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "safe-transmute"
|
name = "safe-transmute"
|
||||||
version = "0.11.0"
|
version = "0.11.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "50b8b2cd387f744f69469aaed197954ba4c0ecdb31e02edf99b023e0df11178a"
|
checksum = "1d95e7284b4bd97e24af76023904cd0157c9cc9da0310beb4139a1e88a748d47"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "same-file"
|
name = "same-file"
|
||||||
@ -1888,9 +1897,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "thread_local"
|
name = "thread_local"
|
||||||
version = "1.1.2"
|
version = "1.1.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d8208a331e1cb318dd5bd76951d2b8fc48ca38a69f5f4e4af1b6a9f8c6236915"
|
checksum = "8018d24e04c95ac8790716a5987d0fec4f8b27249ffa0f7d33f1369bdfb88cbd"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"once_cell",
|
"once_cell",
|
||||||
]
|
]
|
||||||
@ -2038,9 +2047,9 @@ checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasi"
|
name = "wasi"
|
||||||
version = "0.10.1+wasi-snapshot-preview1"
|
version = "0.10.2+wasi-snapshot-preview1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "93c6c3420963c5c64bca373b25e77acb562081b9bb4dd5bb864187742186cea9"
|
checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen"
|
name = "wasm-bindgen"
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "air-parser"
|
name = "air-parser"
|
||||||
version = "0.4.0"
|
version = "0.5.0"
|
||||||
authors = ["Fluence Labs"]
|
authors = ["Fluence Labs"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
license = "Apache-2.0"
|
license = "Apache-2.0"
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
use crate::parser::ast::*;
|
use crate::parser::ast::*;
|
||||||
use crate::parser::into_variable_and_path;
|
use crate::parser::air_parser::into_variable_and_path;
|
||||||
|
use crate::parser::air_parser::make_flattened_error;
|
||||||
use crate::parser::lexer::LexerError;
|
use crate::parser::lexer::LexerError;
|
||||||
use crate::parser::lexer::Token;
|
use crate::parser::lexer::Token;
|
||||||
use crate::parser::lexer::Number;
|
use crate::parser::lexer::Number;
|
||||||
@ -71,9 +72,14 @@ ServiceId = CallInstrValue;
|
|||||||
CallInstrValue: CallInstrValue<'input> = {
|
CallInstrValue: CallInstrValue<'input> = {
|
||||||
<s:Literal> => CallInstrValue::Literal(s),
|
<s:Literal> => CallInstrValue::Literal(s),
|
||||||
<s:Alphanumeric> => CallInstrValue::Variable(s),
|
<s:Alphanumeric> => CallInstrValue::Variable(s),
|
||||||
<v:JsonPath> => {
|
<l: @L> <v:JsonPath> <r: @R> => {
|
||||||
let (variable, path) = into_variable_and_path(v.0, v.1);
|
let (variable, path) = into_variable_and_path(v.0, v.1, v.2);
|
||||||
CallInstrValue::JsonPath { variable, path }
|
let should_flatten = v.2;
|
||||||
|
if !should_flatten {
|
||||||
|
let token = Token::JsonPath(v.0, v.1, v.2);
|
||||||
|
errors.push(make_flattened_error(l, token, r));
|
||||||
|
}
|
||||||
|
CallInstrValue::JsonPath { variable, path, should_flatten }
|
||||||
},
|
},
|
||||||
InitPeerId => CallInstrValue::InitPeerId,
|
InitPeerId => CallInstrValue::InitPeerId,
|
||||||
}
|
}
|
||||||
@ -84,8 +90,9 @@ CallInstrArgValue: CallInstrArgValue<'input> = {
|
|||||||
<s:Literal> => CallInstrArgValue::Literal(s),
|
<s:Literal> => CallInstrArgValue::Literal(s),
|
||||||
<s:Alphanumeric> => CallInstrArgValue::Variable(s),
|
<s:Alphanumeric> => CallInstrArgValue::Variable(s),
|
||||||
<v:JsonPath> => {
|
<v:JsonPath> => {
|
||||||
let (variable, path) = into_variable_and_path(v.0, v.1);
|
let (variable, path) = into_variable_and_path(v.0, v.1, v.2);
|
||||||
CallInstrArgValue::JsonPath { variable, path }
|
let should_flatten = v.2;
|
||||||
|
CallInstrArgValue::JsonPath { variable, path, should_flatten }
|
||||||
},
|
},
|
||||||
<n:Number> => CallInstrArgValue::Number(n),
|
<n:Number> => CallInstrArgValue::Number(n),
|
||||||
<b:Boolean> => CallInstrArgValue::Boolean(b),
|
<b:Boolean> => CallInstrArgValue::Boolean(b),
|
||||||
@ -96,8 +103,9 @@ CallInstrArgValue: CallInstrArgValue<'input> = {
|
|||||||
Iterable: IterableValue<'input> = {
|
Iterable: IterableValue<'input> = {
|
||||||
<s:Alphanumeric> => IterableValue::Variable(s),
|
<s:Alphanumeric> => IterableValue::Variable(s),
|
||||||
<v:JsonPath> => {
|
<v:JsonPath> => {
|
||||||
let (variable, path) = into_variable_and_path(v.0, v.1);
|
let (variable, path) = into_variable_and_path(v.0, v.1, v.2);
|
||||||
IterableValue::JsonPath { variable, path }
|
let should_flatten = v.2;
|
||||||
|
IterableValue::JsonPath { variable, path, should_flatten }
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -105,8 +113,9 @@ Matchable: MatchableValue<'input> = {
|
|||||||
<s:Alphanumeric> => MatchableValue::Variable(s),
|
<s:Alphanumeric> => MatchableValue::Variable(s),
|
||||||
<s:Literal> => MatchableValue::Literal(s),
|
<s:Literal> => MatchableValue::Literal(s),
|
||||||
<v:JsonPath> => {
|
<v:JsonPath> => {
|
||||||
let (variable, path) = into_variable_and_path(v.0, v.1);
|
let (variable, path) = into_variable_and_path(v.0, v.1, v.2);
|
||||||
MatchableValue::JsonPath { variable, path }
|
let should_flatten = v.2;
|
||||||
|
MatchableValue::JsonPath { variable, path, should_flatten }
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -122,7 +131,7 @@ extern {
|
|||||||
|
|
||||||
Alphanumeric => Token::Alphanumeric(<&'input str>),
|
Alphanumeric => Token::Alphanumeric(<&'input str>),
|
||||||
Literal => Token::StringLiteral(<&'input str>),
|
Literal => Token::StringLiteral(<&'input str>),
|
||||||
JsonPath => Token::JsonPath(<&'input str>, <usize>),
|
JsonPath => Token::JsonPath(<&'input str>, <usize>, <bool>),
|
||||||
Accumulator => Token::Accumulator(<&'input str>),
|
Accumulator => Token::Accumulator(<&'input str>),
|
||||||
Number => Token::Number(<Number>),
|
Number => Token::Number(<Number>),
|
||||||
Boolean => Token::Boolean(<bool>),
|
Boolean => Token::Boolean(<bool>),
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -147,5 +147,34 @@ fn lexical_error_to_label(file_id: usize, error: LexerError) -> Label<usize> {
|
|||||||
LeadingDot(start, end) => {
|
LeadingDot(start, end) => {
|
||||||
Label::primary(file_id, start..end).with_message(error.to_string())
|
Label::primary(file_id, start..end).with_message(error.to_string())
|
||||||
}
|
}
|
||||||
|
CallArgsNotFlattened(start, end) => {
|
||||||
|
Label::primary(file_id, start..end).with_message(error.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn into_variable_and_path(str: &str, pos: usize, should_flatten: bool) -> (&str, &str) {
|
||||||
|
let json_path = if should_flatten {
|
||||||
|
&str[pos + 1..str.len() - 1]
|
||||||
|
} else {
|
||||||
|
&str[pos + 1..]
|
||||||
|
};
|
||||||
|
|
||||||
|
(&str[0..pos], json_path)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn make_flattened_error(
|
||||||
|
start_pos: usize,
|
||||||
|
token: Token<'_>,
|
||||||
|
end_pos: usize,
|
||||||
|
) -> ErrorRecovery<usize, Token<'_>, LexerError> {
|
||||||
|
let error = LexerError::CallArgsNotFlattened(start_pos, end_pos);
|
||||||
|
let error = ParseError::User { error };
|
||||||
|
|
||||||
|
let dropped_tokens = vec![(start_pos, token, end_pos)];
|
||||||
|
|
||||||
|
ErrorRecovery {
|
||||||
|
error,
|
||||||
|
dropped_tokens,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -63,7 +63,11 @@ pub enum CallInstrValue<'i> {
|
|||||||
InitPeerId,
|
InitPeerId,
|
||||||
Literal(&'i str),
|
Literal(&'i str),
|
||||||
Variable(&'i str),
|
Variable(&'i str),
|
||||||
JsonPath { variable: &'i str, path: &'i str },
|
JsonPath {
|
||||||
|
variable: &'i str,
|
||||||
|
path: &'i str,
|
||||||
|
should_flatten: bool,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
|
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
|
||||||
@ -74,13 +78,21 @@ pub enum CallInstrArgValue<'i> {
|
|||||||
Number(Number),
|
Number(Number),
|
||||||
Boolean(bool),
|
Boolean(bool),
|
||||||
Variable(&'i str),
|
Variable(&'i str),
|
||||||
JsonPath { variable: &'i str, path: &'i str },
|
JsonPath {
|
||||||
|
variable: &'i str,
|
||||||
|
path: &'i str,
|
||||||
|
should_flatten: bool,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
|
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
|
||||||
pub enum IterableValue<'i> {
|
pub enum IterableValue<'i> {
|
||||||
Variable(&'i str),
|
Variable(&'i str),
|
||||||
JsonPath { variable: &'i str, path: &'i str },
|
JsonPath {
|
||||||
|
variable: &'i str,
|
||||||
|
path: &'i str,
|
||||||
|
should_flatten: bool,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
|
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
|
||||||
@ -89,7 +101,11 @@ pub enum MatchableValue<'i> {
|
|||||||
Number(Number),
|
Number(Number),
|
||||||
Boolean(bool),
|
Boolean(bool),
|
||||||
Variable(&'i str),
|
Variable(&'i str),
|
||||||
JsonPath { variable: &'i str, path: &'i str },
|
JsonPath {
|
||||||
|
variable: &'i str,
|
||||||
|
path: &'i str,
|
||||||
|
should_flatten: bool,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Debug, PartialEq, Clone)]
|
#[derive(Serialize, Debug, PartialEq, Clone)]
|
||||||
|
@ -29,11 +29,26 @@ impl fmt::Display for CallInstrArgValue<'_> {
|
|||||||
Number(number) => write!(f, "{}", number),
|
Number(number) => write!(f, "{}", number),
|
||||||
Boolean(bool) => write!(f, "{}", bool),
|
Boolean(bool) => write!(f, "{}", bool),
|
||||||
Variable(str) => write!(f, "{}", str),
|
Variable(str) => write!(f, "{}", str),
|
||||||
JsonPath { variable, path } => write!(f, "{}.{}", variable, path),
|
JsonPath {
|
||||||
|
variable,
|
||||||
|
path,
|
||||||
|
should_flatten,
|
||||||
|
} => print_json_path(variable, path, should_flatten, f),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn print_json_path(
|
||||||
|
variable: &str,
|
||||||
|
path: &str,
|
||||||
|
should_flatten: &bool,
|
||||||
|
f: &mut fmt::Formatter,
|
||||||
|
) -> fmt::Result {
|
||||||
|
let maybe_flatten_char = if *should_flatten { "!" } else { "" };
|
||||||
|
|
||||||
|
write!(f, "{}.{}{}", variable, path, maybe_flatten_char)
|
||||||
|
}
|
||||||
|
|
||||||
impl fmt::Display for CallInstrValue<'_> {
|
impl fmt::Display for CallInstrValue<'_> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
use CallInstrValue::*;
|
use CallInstrValue::*;
|
||||||
@ -42,7 +57,11 @@ impl fmt::Display for CallInstrValue<'_> {
|
|||||||
InitPeerId => write!(f, "%init_peer_id%"),
|
InitPeerId => write!(f, "%init_peer_id%"),
|
||||||
Literal(str) => write!(f, r#""{}""#, str),
|
Literal(str) => write!(f, r#""{}""#, str),
|
||||||
Variable(str) => write!(f, "{}", str),
|
Variable(str) => write!(f, "{}", str),
|
||||||
JsonPath { variable, path } => write!(f, "{}.{}", variable, path),
|
JsonPath {
|
||||||
|
variable,
|
||||||
|
path,
|
||||||
|
should_flatten,
|
||||||
|
} => print_json_path(variable, path, should_flatten, f),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -53,7 +72,11 @@ impl fmt::Display for IterableValue<'_> {
|
|||||||
|
|
||||||
match self {
|
match self {
|
||||||
Variable(str) => write!(f, "{}", str),
|
Variable(str) => write!(f, "{}", str),
|
||||||
JsonPath { variable, path } => write!(f, "{}.{}", variable, path),
|
JsonPath {
|
||||||
|
variable,
|
||||||
|
path,
|
||||||
|
should_flatten,
|
||||||
|
} => print_json_path(variable, path, should_flatten, f),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -67,7 +90,11 @@ impl fmt::Display for MatchableValue<'_> {
|
|||||||
Number(number) => write!(f, "{}", number),
|
Number(number) => write!(f, "{}", number),
|
||||||
Boolean(bool) => write!(f, "{}", bool),
|
Boolean(bool) => write!(f, "{}", bool),
|
||||||
Variable(str) => write!(f, "{}", str),
|
Variable(str) => write!(f, "{}", str),
|
||||||
JsonPath { variable, path } => write!(f, "{}.{}", variable, path),
|
JsonPath {
|
||||||
|
variable,
|
||||||
|
path,
|
||||||
|
should_flatten,
|
||||||
|
} => print_json_path(variable, path, should_flatten, f),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -34,6 +34,7 @@ struct ParserState {
|
|||||||
pub(self) first_dot_met_pos: Option<usize>,
|
pub(self) first_dot_met_pos: Option<usize>,
|
||||||
pub(self) non_numeric_met: bool,
|
pub(self) non_numeric_met: bool,
|
||||||
pub(self) digit_met: bool,
|
pub(self) digit_met: bool,
|
||||||
|
pub(self) flattening_met: bool,
|
||||||
pub(self) is_first_char: bool,
|
pub(self) is_first_char: bool,
|
||||||
pub(self) current_char: char,
|
pub(self) current_char: char,
|
||||||
pub(self) current_pos: usize,
|
pub(self) current_pos: usize,
|
||||||
@ -58,6 +59,7 @@ impl<'input> CallVariableParser<'input> {
|
|||||||
first_dot_met_pos: None,
|
first_dot_met_pos: None,
|
||||||
non_numeric_met: false,
|
non_numeric_met: false,
|
||||||
digit_met: false,
|
digit_met: false,
|
||||||
|
flattening_met: false,
|
||||||
is_first_char: true,
|
is_first_char: true,
|
||||||
current_char,
|
current_char,
|
||||||
current_pos,
|
current_pos,
|
||||||
@ -186,8 +188,8 @@ impl<'input> CallVariableParser<'input> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn try_parse_as_json_path(&self) -> LexerResult<()> {
|
fn try_parse_as_json_path(&mut self) -> LexerResult<()> {
|
||||||
if !self.json_path_allowed_char() {
|
if !self.json_path_allowed_char() && !self.try_parse_as_flattening() {
|
||||||
let error_pos = self.pos_in_string_to_parse();
|
let error_pos = self.pos_in_string_to_parse();
|
||||||
return Err(LexerError::InvalidJsonPath(error_pos, error_pos));
|
return Err(LexerError::InvalidJsonPath(error_pos, error_pos));
|
||||||
}
|
}
|
||||||
@ -195,6 +197,15 @@ impl<'input> CallVariableParser<'input> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn try_parse_as_flattening(&mut self) -> bool {
|
||||||
|
if self.is_last_char() && self.current_char() == '!' {
|
||||||
|
self.state.flattening_met = true;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
fn try_parse_first_met_dot(&mut self) -> LexerResult<bool> {
|
fn try_parse_first_met_dot(&mut self) -> LexerResult<bool> {
|
||||||
if !self.dot_met() && self.current_char() == '.' {
|
if !self.dot_met() && self.current_char() == '.' {
|
||||||
if self.current_pos() == 0 {
|
if self.current_pos() == 0 {
|
||||||
@ -238,6 +249,10 @@ impl<'input> CallVariableParser<'input> {
|
|||||||
self.state.current_char
|
self.state.current_char
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn is_last_char(&self) -> bool {
|
||||||
|
self.current_pos() == self.string_to_parse.len() - 1
|
||||||
|
}
|
||||||
|
|
||||||
fn to_token(&self) -> LexerResult<Token<'input>> {
|
fn to_token(&self) -> LexerResult<Token<'input>> {
|
||||||
use super::token::UnparsedNumber;
|
use super::token::UnparsedNumber;
|
||||||
|
|
||||||
@ -256,6 +271,7 @@ impl<'input> CallVariableParser<'input> {
|
|||||||
(false, true) => Ok(Token::JsonPath(
|
(false, true) => Ok(Token::JsonPath(
|
||||||
self.string_to_parse,
|
self.string_to_parse,
|
||||||
self.state.first_dot_met_pos.unwrap(),
|
self.state.first_dot_met_pos.unwrap(),
|
||||||
|
self.state.flattening_met,
|
||||||
)),
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -53,6 +53,9 @@ pub enum LexerError {
|
|||||||
|
|
||||||
#[error("leading dot without any symbols before - please write 0 if it's float or variable name if it's json path")]
|
#[error("leading dot without any symbols before - please write 0 if it's float or variable name if it's json path")]
|
||||||
LeadingDot(usize, usize),
|
LeadingDot(usize, usize),
|
||||||
|
|
||||||
|
#[error("while using json path in call triplet, result should be flattened, add ! at the end")]
|
||||||
|
CallArgsNotFlattened(usize, usize),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<std::convert::Infallible> for LexerError {
|
impl From<std::convert::Infallible> for LexerError {
|
||||||
|
@ -269,24 +269,28 @@ fn too_big_float_number() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn json_path() {
|
fn json_path() {
|
||||||
// this json path contains all allowed in json path charactes
|
// this json path contains all allowed in json path charactes
|
||||||
const JSON_PATH: &str = r#"value.$[$@[]():?.*,"!]"#;
|
const JSON_PATH: &str = r#"value.$[$@[]():?.*,"]"#;
|
||||||
|
|
||||||
lexer_test(
|
lexer_test(
|
||||||
JSON_PATH,
|
JSON_PATH,
|
||||||
Single(Ok((0, Token::JsonPath(JSON_PATH, 5), JSON_PATH.len()))),
|
Single(Ok((
|
||||||
|
0,
|
||||||
|
Token::JsonPath(JSON_PATH, 5, false),
|
||||||
|
JSON_PATH.len(),
|
||||||
|
))),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn json_path_numbers() {
|
fn json_path_numbers() {
|
||||||
const JSON_PATH: &str = r#"12345.$[$@[]():?.*,"!]"#;
|
const JSON_PATH: &str = r#"12345.$[$@[]():?.*,"]"#;
|
||||||
|
|
||||||
lexer_test(
|
lexer_test(
|
||||||
JSON_PATH,
|
JSON_PATH,
|
||||||
Single(Err(LexerError::UnallowedCharInNumber(6, 6))),
|
Single(Err(LexerError::UnallowedCharInNumber(6, 6))),
|
||||||
);
|
);
|
||||||
|
|
||||||
const JSON_PATH1: &str = r#"+12345.$[$@[]():?.*,"!]"#;
|
const JSON_PATH1: &str = r#"+12345.$[$@[]():?.*,"]"#;
|
||||||
|
|
||||||
lexer_test(
|
lexer_test(
|
||||||
JSON_PATH1,
|
JSON_PATH1,
|
||||||
@ -320,13 +324,21 @@ fn unclosed_quote() {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn bad_value() {
|
fn bad_value() {
|
||||||
// value contains ! that only allowed in json path
|
// value contains ! that only allowed at the end of a json path
|
||||||
const INVALID_VALUE: &str = r#"val!ue.$[$@[]():?.*,"\!]"#;
|
const INVALID_VALUE: &str = r#"val!ue.$[$@[]():?.*,"\]"#;
|
||||||
|
|
||||||
lexer_test(
|
lexer_test(
|
||||||
INVALID_VALUE,
|
INVALID_VALUE,
|
||||||
Single(Err(LexerError::IsNotAlphanumeric(3, 3))),
|
Single(Err(LexerError::IsNotAlphanumeric(3, 3))),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// value contains ! that only allowed at the end of a json path
|
||||||
|
const INVALID_VALUE2: &str = r#"value.$![$@[]():?.*,"\]"#;
|
||||||
|
|
||||||
|
lexer_test(
|
||||||
|
INVALID_VALUE2,
|
||||||
|
Single(Err(LexerError::InvalidJsonPath(7, 7))),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -26,7 +26,7 @@ pub enum Token<'input> {
|
|||||||
|
|
||||||
StringLiteral(&'input str),
|
StringLiteral(&'input str),
|
||||||
Alphanumeric(&'input str),
|
Alphanumeric(&'input str),
|
||||||
JsonPath(&'input str, usize),
|
JsonPath(&'input str, usize, bool),
|
||||||
Accumulator(&'input str),
|
Accumulator(&'input str),
|
||||||
Number(Number),
|
Number(Number),
|
||||||
Boolean(bool),
|
Boolean(bool),
|
||||||
|
@ -37,7 +37,6 @@ pub(super) fn is_json_path_allowed_char(ch: char) -> bool {
|
|||||||
',' => true,
|
',' => true,
|
||||||
'"' => true,
|
'"' => true,
|
||||||
'\'' => true,
|
'\'' => true,
|
||||||
'!' => true,
|
|
||||||
ch => is_aqua_alphanumeric(ch),
|
ch => is_aqua_alphanumeric(ch),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -30,7 +30,3 @@ pub mod tests;
|
|||||||
pub use self::air_parser::parse;
|
pub use self::air_parser::parse;
|
||||||
pub use air::AIRParser;
|
pub use air::AIRParser;
|
||||||
pub use lexer::AIRLexer;
|
pub use lexer::AIRLexer;
|
||||||
|
|
||||||
fn into_variable_and_path(str: &str, pos: usize) -> (&str, &str) {
|
|
||||||
(&str[0..pos], &str[pos + 1..])
|
|
||||||
}
|
|
||||||
|
@ -123,13 +123,14 @@ fn parse_json_path() {
|
|||||||
use ast::PeerPart::*;
|
use ast::PeerPart::*;
|
||||||
|
|
||||||
let source_code = r#"
|
let source_code = r#"
|
||||||
(call id.$.a "f" ["hello" name] void[])
|
(call id.$.a! "f" ["hello" name] void[])
|
||||||
"#;
|
"#;
|
||||||
let instruction = parse(source_code);
|
let instruction = parse(source_code);
|
||||||
let expected = Instruction::Call(Call {
|
let expected = Instruction::Call(Call {
|
||||||
peer_part: PeerPk(CallInstrValue::JsonPath {
|
peer_part: PeerPk(CallInstrValue::JsonPath {
|
||||||
variable: "id",
|
variable: "id",
|
||||||
path: "$.a",
|
path: "$.a",
|
||||||
|
should_flatten: true,
|
||||||
}),
|
}),
|
||||||
function_part: FuncName(CallInstrValue::Literal("f")),
|
function_part: FuncName(CallInstrValue::Literal("f")),
|
||||||
args: Rc::new(vec![
|
args: Rc::new(vec![
|
||||||
@ -141,6 +142,24 @@ fn parse_json_path() {
|
|||||||
assert_eq!(instruction, expected);
|
assert_eq!(instruction, expected);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_json_path_without_flattening() {
|
||||||
|
let source_code = r#"
|
||||||
|
(call id.$.a "f" ["hello" name] void[])
|
||||||
|
"#;
|
||||||
|
|
||||||
|
let lexer = crate::AIRLexer::new(source_code);
|
||||||
|
|
||||||
|
let parser = crate::AIRParser::new();
|
||||||
|
let mut errors = Vec::new();
|
||||||
|
parser
|
||||||
|
.parse(source_code, &mut errors, lexer)
|
||||||
|
.expect("parser shoudn't fail");
|
||||||
|
|
||||||
|
assert_eq!(errors.len(), 1);
|
||||||
|
assert!(matches!(errors[0], lalrpop_util::ErrorRecovery { .. }));
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_json_path_complex() {
|
fn parse_json_path_complex() {
|
||||||
use ast::Call;
|
use ast::Call;
|
||||||
@ -151,8 +170,8 @@ fn parse_json_path_complex() {
|
|||||||
|
|
||||||
let source_code = r#"
|
let source_code = r#"
|
||||||
(seq
|
(seq
|
||||||
(call m.$.[1] "f" [] void)
|
(call m.$.[1]! "f" [] void)
|
||||||
(call m.$.abc["c"].cde[a][0].cde["bcd"] "f" [] void)
|
(call m.$.abc["c"].cde[a][0].cde["bcd"]! "f" [] void)
|
||||||
)
|
)
|
||||||
"#;
|
"#;
|
||||||
let instruction = parse(source_code);
|
let instruction = parse(source_code);
|
||||||
@ -161,6 +180,7 @@ fn parse_json_path_complex() {
|
|||||||
peer_part: PeerPk(CallInstrValue::JsonPath {
|
peer_part: PeerPk(CallInstrValue::JsonPath {
|
||||||
variable: "m",
|
variable: "m",
|
||||||
path: "$.[1]",
|
path: "$.[1]",
|
||||||
|
should_flatten: true,
|
||||||
}),
|
}),
|
||||||
function_part: FuncName(CallInstrValue::Literal("f")),
|
function_part: FuncName(CallInstrValue::Literal("f")),
|
||||||
args: Rc::new(vec![]),
|
args: Rc::new(vec![]),
|
||||||
@ -170,6 +190,7 @@ fn parse_json_path_complex() {
|
|||||||
peer_part: PeerPk(CallInstrValue::JsonPath {
|
peer_part: PeerPk(CallInstrValue::JsonPath {
|
||||||
variable: "m",
|
variable: "m",
|
||||||
path: r#"$.abc["c"].cde[a][0].cde["bcd"]"#,
|
path: r#"$.abc["c"].cde[a][0].cde["bcd"]"#,
|
||||||
|
should_flatten: true,
|
||||||
}),
|
}),
|
||||||
function_part: FuncName(CallInstrValue::Literal("f")),
|
function_part: FuncName(CallInstrValue::Literal("f")),
|
||||||
args: Rc::new(vec![]),
|
args: Rc::new(vec![]),
|
||||||
@ -189,13 +210,14 @@ fn json_path_square_braces() {
|
|||||||
use ast::PeerPart::*;
|
use ast::PeerPart::*;
|
||||||
|
|
||||||
let source_code = r#"
|
let source_code = r#"
|
||||||
(call u.$["peer_id"] ("return" "") [u.$["peer_id"].cde[0]["abc"].abc u.$["name"]] void[])
|
(call u.$["peer_id"]! ("return" "") [u.$["peer_id"].cde[0]["abc"].abc u.$["name"]] void[])
|
||||||
"#;
|
"#;
|
||||||
let instruction = parse(source_code);
|
let instruction = parse(source_code);
|
||||||
let expected = Instruction::Call(Call {
|
let expected = Instruction::Call(Call {
|
||||||
peer_part: PeerPk(CallInstrValue::JsonPath {
|
peer_part: PeerPk(CallInstrValue::JsonPath {
|
||||||
variable: "u",
|
variable: "u",
|
||||||
path: r#"$["peer_id"]"#,
|
path: r#"$["peer_id"]"#,
|
||||||
|
should_flatten: true,
|
||||||
}),
|
}),
|
||||||
function_part: ServiceIdWithFuncName(
|
function_part: ServiceIdWithFuncName(
|
||||||
CallInstrValue::Literal("return"),
|
CallInstrValue::Literal("return"),
|
||||||
@ -205,10 +227,12 @@ fn json_path_square_braces() {
|
|||||||
CallInstrArgValue::JsonPath {
|
CallInstrArgValue::JsonPath {
|
||||||
variable: "u",
|
variable: "u",
|
||||||
path: r#"$["peer_id"].cde[0]["abc"].abc"#,
|
path: r#"$["peer_id"].cde[0]["abc"].abc"#,
|
||||||
|
should_flatten: false,
|
||||||
},
|
},
|
||||||
CallInstrArgValue::JsonPath {
|
CallInstrArgValue::JsonPath {
|
||||||
variable: "u",
|
variable: "u",
|
||||||
path: r#"$["name"]"#,
|
path: r#"$["name"]"#,
|
||||||
|
should_flatten: false,
|
||||||
},
|
},
|
||||||
]),
|
]),
|
||||||
output: Accumulator("void"),
|
output: Accumulator("void"),
|
||||||
@ -600,6 +624,7 @@ fn fold_json_path() {
|
|||||||
iterable: JsonPath {
|
iterable: JsonPath {
|
||||||
variable: "members",
|
variable: "members",
|
||||||
path: "$.[\"users\"]",
|
path: "$.[\"users\"]",
|
||||||
|
should_flatten: false,
|
||||||
},
|
},
|
||||||
iterator: "m",
|
iterator: "m",
|
||||||
instruction: Rc::new(null()),
|
instruction: Rc::new(null()),
|
||||||
@ -622,6 +647,7 @@ fn comments() {
|
|||||||
iterable: JsonPath {
|
iterable: JsonPath {
|
||||||
variable: "members",
|
variable: "members",
|
||||||
path: "$.[\"users\"]",
|
path: "$.[\"users\"]",
|
||||||
|
should_flatten: false,
|
||||||
},
|
},
|
||||||
iterator: "m",
|
iterator: "m",
|
||||||
instruction: Rc::new(null()),
|
instruction: Rc::new(null()),
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "interpreter-lib"
|
name = "interpreter-lib"
|
||||||
version = "0.6.0"
|
version = "0.7.0"
|
||||||
authors = ["Fluence Labs"]
|
authors = ["Fluence Labs"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
|
|
||||||
@ -17,7 +17,7 @@ aqua-interpreter-interface = { path = "../crates/interpreter-interface" }
|
|||||||
serde = { version = "=1.0.118", features = [ "derive", "rc" ] }
|
serde = { version = "=1.0.118", features = [ "derive", "rc" ] }
|
||||||
serde_json = "=1.0.61"
|
serde_json = "=1.0.61"
|
||||||
|
|
||||||
jsonpath_lib-fl = "=0.2.6"
|
jsonpath_lib-fl = "=0.2.5"
|
||||||
|
|
||||||
boolinator = "2.4.0"
|
boolinator = "2.4.0"
|
||||||
log = "0.4.11"
|
log = "0.4.11"
|
||||||
|
@ -89,7 +89,14 @@ fn resolve_to_string<'i>(value: &CallInstrValue<'i>, ctx: &ExecutionCtx<'i>) ->
|
|||||||
let jvalue = resolved.into_jvalue();
|
let jvalue = resolved.into_jvalue();
|
||||||
jvalue_to_string(jvalue)?
|
jvalue_to_string(jvalue)?
|
||||||
}
|
}
|
||||||
CallInstrValue::JsonPath { variable, path } => {
|
CallInstrValue::JsonPath {
|
||||||
|
variable,
|
||||||
|
path,
|
||||||
|
should_flatten,
|
||||||
|
} => {
|
||||||
|
// this is checked on the parsing stage
|
||||||
|
debug_assert!(*should_flatten);
|
||||||
|
|
||||||
let resolved = resolve_to_jvaluable(variable, ctx)?;
|
let resolved = resolve_to_jvaluable(variable, ctx)?;
|
||||||
let resolved = resolved.apply_json_path(path)?;
|
let resolved = resolved.apply_json_path(path)?;
|
||||||
vec_to_string(resolved, path)?
|
vec_to_string(resolved, path)?
|
||||||
|
@ -50,7 +50,23 @@ pub(crate) fn are_matchable_eq<'ctx>(
|
|||||||
|
|
||||||
Ok(left_value == right_value)
|
Ok(left_value == right_value)
|
||||||
}
|
}
|
||||||
(JsonPath { variable: lv, path: lp }, JsonPath { variable: rv, path: rp }) => {
|
(
|
||||||
|
JsonPath {
|
||||||
|
variable: lv,
|
||||||
|
path: lp,
|
||||||
|
should_flatten: lsf,
|
||||||
|
},
|
||||||
|
JsonPath {
|
||||||
|
variable: rv,
|
||||||
|
path: rp,
|
||||||
|
should_flatten: rsf,
|
||||||
|
},
|
||||||
|
) => {
|
||||||
|
// TODO: improve comparison
|
||||||
|
if lsf != rsf {
|
||||||
|
return Ok(false);
|
||||||
|
}
|
||||||
|
|
||||||
let left_jvaluable = resolve_to_jvaluable(lv, exec_ctx)?;
|
let left_jvaluable = resolve_to_jvaluable(lv, exec_ctx)?;
|
||||||
let left_value = left_jvaluable.apply_json_path(lp)?;
|
let left_value = left_jvaluable.apply_json_path(lp)?;
|
||||||
|
|
||||||
@ -91,14 +107,27 @@ fn compare_matchable<'ctx>(
|
|||||||
let jvalue = jvaluable.as_jvalue();
|
let jvalue = jvaluable.as_jvalue();
|
||||||
Ok(comparator(jvalue))
|
Ok(comparator(jvalue))
|
||||||
}
|
}
|
||||||
JsonPath { variable, path } => {
|
JsonPath {
|
||||||
|
variable,
|
||||||
|
path,
|
||||||
|
should_flatten,
|
||||||
|
} => {
|
||||||
let jvaluable = resolve_to_jvaluable(variable, exec_ctx)?;
|
let jvaluable = resolve_to_jvaluable(variable, exec_ctx)?;
|
||||||
let jvalues = jvaluable.apply_json_path(path)?;
|
let jvalues = jvaluable.apply_json_path(path)?;
|
||||||
if jvalues.len() != 1 {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(comparator(Cow::Borrowed(jvalues[0])))
|
let jvalue = if *should_flatten {
|
||||||
|
if jvalues.len() != 1 {
|
||||||
|
return Ok(false);
|
||||||
|
}
|
||||||
|
Cow::Borrowed(jvalues[0])
|
||||||
|
} else {
|
||||||
|
let jvalue = jvalues.into_iter().cloned().collect::<Vec<_>>();
|
||||||
|
let jvalue = JValue::Array(jvalue);
|
||||||
|
|
||||||
|
Cow::Owned(jvalue)
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(comparator(jvalue))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -37,7 +37,11 @@ pub(super) fn construct_iterable_value<'ctx>(
|
|||||||
) -> ExecutionResult<Option<IterableValue>> {
|
) -> ExecutionResult<Option<IterableValue>> {
|
||||||
match ast_iterable {
|
match ast_iterable {
|
||||||
ast::IterableValue::Variable(name) => handle_instruction_variable(exec_ctx, name),
|
ast::IterableValue::Variable(name) => handle_instruction_variable(exec_ctx, name),
|
||||||
ast::IterableValue::JsonPath { variable, path } => handle_instruction_json_path(exec_ctx, variable, path),
|
ast::IterableValue::JsonPath {
|
||||||
|
variable,
|
||||||
|
path,
|
||||||
|
should_flatten,
|
||||||
|
} => handle_instruction_json_path(exec_ctx, variable, path, *should_flatten),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -97,13 +101,14 @@ fn handle_instruction_json_path<'ctx>(
|
|||||||
exec_ctx: &ExecutionCtx<'ctx>,
|
exec_ctx: &ExecutionCtx<'ctx>,
|
||||||
variable_name: &str,
|
variable_name: &str,
|
||||||
json_path: &str,
|
json_path: &str,
|
||||||
|
should_flatten: bool,
|
||||||
) -> ExecutionResult<Option<IterableValue>> {
|
) -> ExecutionResult<Option<IterableValue>> {
|
||||||
use ExecutionError::JValueAccJsonPathError;
|
use ExecutionError::JValueAccJsonPathError;
|
||||||
|
|
||||||
let iterable: Option<IterableValue> = match exec_ctx.data_cache.get(variable_name) {
|
match exec_ctx.data_cache.get(variable_name) {
|
||||||
Some(AValue::JValueRef(variable)) => {
|
Some(AValue::JValueRef(variable)) => {
|
||||||
let jvalues = apply_json_path(&variable.result, json_path)?;
|
let jvalues = apply_json_path(&variable.result, json_path)?;
|
||||||
from_jvalues(jvalues, variable.triplet.clone(), json_path)
|
from_jvalues(jvalues, variable.triplet.clone(), json_path, should_flatten)
|
||||||
}
|
}
|
||||||
Some(AValue::JValueAccumulatorRef(acc)) => {
|
Some(AValue::JValueAccumulatorRef(acc)) => {
|
||||||
let acc = acc.borrow();
|
let acc = acc.borrow();
|
||||||
@ -115,7 +120,7 @@ fn handle_instruction_json_path<'ctx>(
|
|||||||
let (jvalues, tetraplet_indices) = select_with_iter(acc_iter, &json_path)
|
let (jvalues, tetraplet_indices) = select_with_iter(acc_iter, &json_path)
|
||||||
.map_err(|e| JValueAccJsonPathError(acc.clone(), json_path.to_string(), e))?;
|
.map_err(|e| JValueAccJsonPathError(acc.clone(), json_path.to_string(), e))?;
|
||||||
|
|
||||||
let jvalues = jvalues.into_iter().cloned().collect();
|
let jvalues = construct_iterable_jvalues(jvalues, should_flatten)?;
|
||||||
let tetraplets = tetraplet_indices
|
let tetraplets = tetraplet_indices
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|id| SecurityTetraplet {
|
.map(|id| SecurityTetraplet {
|
||||||
@ -125,19 +130,17 @@ fn handle_instruction_json_path<'ctx>(
|
|||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let foldable = IterableVecJsonPathResult::init(jvalues, tetraplets);
|
let foldable = IterableVecJsonPathResult::init(jvalues, tetraplets);
|
||||||
Some(Box::new(foldable))
|
Ok(Some(Box::new(foldable)))
|
||||||
}
|
}
|
||||||
Some(AValue::JValueFoldCursor(fold_state)) => {
|
Some(AValue::JValueFoldCursor(fold_state)) => {
|
||||||
let iterable_value = fold_state.iterable.peek().unwrap();
|
let iterable_value = fold_state.iterable.peek().unwrap();
|
||||||
let jvalues = iterable_value.apply_json_path(json_path)?;
|
let jvalues = iterable_value.apply_json_path(json_path)?;
|
||||||
let triplet = as_triplet(&iterable_value);
|
let triplet = as_triplet(&iterable_value);
|
||||||
|
|
||||||
from_jvalues(jvalues, triplet, json_path)
|
from_jvalues(jvalues, triplet, json_path, should_flatten)
|
||||||
}
|
}
|
||||||
_ => return exec_err!(ExecutionError::VariableNotFound(variable_name.to_string())),
|
_ => return exec_err!(ExecutionError::VariableNotFound(variable_name.to_string())),
|
||||||
};
|
}
|
||||||
|
|
||||||
Ok(iterable)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn apply_json_path<'jvalue, 'str>(
|
fn apply_json_path<'jvalue, 'str>(
|
||||||
@ -150,12 +153,17 @@ fn apply_json_path<'jvalue, 'str>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Applies json_path to provided jvalues and construct IterableValue from the result and given triplet.
|
/// Applies json_path to provided jvalues and construct IterableValue from the result and given triplet.
|
||||||
fn from_jvalues(jvalues: Vec<&JValue>, triplet: Rc<ResolvedTriplet>, json_path: &str) -> Option<IterableValue> {
|
fn from_jvalues(
|
||||||
|
jvalues: Vec<&JValue>,
|
||||||
|
triplet: Rc<ResolvedTriplet>,
|
||||||
|
json_path: &str,
|
||||||
|
should_flatten: bool,
|
||||||
|
) -> ExecutionResult<Option<IterableValue>> {
|
||||||
if jvalues.is_empty() {
|
if jvalues.is_empty() {
|
||||||
return None;
|
return Ok(None);
|
||||||
}
|
}
|
||||||
|
|
||||||
let jvalues = jvalues.into_iter().cloned().collect();
|
let jvalues = construct_iterable_jvalues(jvalues, should_flatten)?;
|
||||||
|
|
||||||
let tetraplet = SecurityTetraplet {
|
let tetraplet = SecurityTetraplet {
|
||||||
triplet,
|
triplet,
|
||||||
@ -163,7 +171,29 @@ fn from_jvalues(jvalues: Vec<&JValue>, triplet: Rc<ResolvedTriplet>, json_path:
|
|||||||
};
|
};
|
||||||
|
|
||||||
let foldable = IterableJsonPathResult::init(jvalues, tetraplet);
|
let foldable = IterableJsonPathResult::init(jvalues, tetraplet);
|
||||||
Some(Box::new(foldable))
|
Ok(Some(Box::new(foldable)))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn construct_iterable_jvalues(jvalues: Vec<&JValue>, should_flatten: bool) -> ExecutionResult<Vec<JValue>> {
|
||||||
|
if !should_flatten {
|
||||||
|
let jvalues = jvalues.into_iter().cloned().collect();
|
||||||
|
return Ok(jvalues);
|
||||||
|
}
|
||||||
|
|
||||||
|
if jvalues.len() != 1 {
|
||||||
|
let jvalues = jvalues.into_iter().cloned().collect();
|
||||||
|
let jvalue = JValue::Array(jvalues);
|
||||||
|
return exec_err!(ExecutionError::FlatteningError(jvalue));
|
||||||
|
}
|
||||||
|
|
||||||
|
match jvalues[0] {
|
||||||
|
JValue::Array(values) => Ok(values.into_iter().cloned().collect::<Vec<_>>()),
|
||||||
|
_ => {
|
||||||
|
let jvalues = jvalues.into_iter().cloned().collect();
|
||||||
|
let jvalue = JValue::Array(jvalues);
|
||||||
|
exec_err!(ExecutionError::FlatteningError(jvalue))
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn as_triplet(iterable: &IterableItem<'_>) -> Rc<ResolvedTriplet> {
|
fn as_triplet(iterable: &IterableItem<'_>) -> Rc<ResolvedTriplet> {
|
||||||
|
@ -44,7 +44,6 @@ macro_rules! execute {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let instruction = format!("{}", $self);
|
let instruction = format!("{}", $self);
|
||||||
println!("set error on {}", instruction);
|
|
||||||
let last_error = LastErrorDescriptor::new(e.clone(), instruction, None);
|
let last_error = LastErrorDescriptor::new(e.clone(), instruction, None);
|
||||||
$exec_ctx.last_error = Some(last_error);
|
$exec_ctx.last_error = Some(last_error);
|
||||||
Err(e)
|
Err(e)
|
||||||
|
@ -91,6 +91,10 @@ pub(crate) enum ExecutionError {
|
|||||||
/// This error type is produced by a mismatch to notify xor that compared values aren't equal.
|
/// This error type is produced by a mismatch to notify xor that compared values aren't equal.
|
||||||
#[error("mismatch is used without corresponding xor")]
|
#[error("mismatch is used without corresponding xor")]
|
||||||
MismatchWithoutXorError,
|
MismatchWithoutXorError,
|
||||||
|
|
||||||
|
/// This error type is produced by a mismatch to notify xor that compared values aren't equal.
|
||||||
|
#[error("jvalue '{0}' can't be flattened, to be flattened a jvalue should have an array type and consist only one value")]
|
||||||
|
FlatteningError(JValue),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ExecutionError {
|
impl ExecutionError {
|
||||||
@ -114,6 +118,7 @@ impl ExecutionError {
|
|||||||
ShadowingError(_) => 14,
|
ShadowingError(_) => 14,
|
||||||
MatchWithoutXorError => 15,
|
MatchWithoutXorError => 15,
|
||||||
MismatchWithoutXorError => 16,
|
MismatchWithoutXorError => 16,
|
||||||
|
FlatteningError(_) => 17,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -36,7 +36,11 @@ pub(crate) fn resolve_to_args<'i>(
|
|||||||
CallInstrArgValue::Boolean(value) => prepare_consts(*value, ctx),
|
CallInstrArgValue::Boolean(value) => prepare_consts(*value, ctx),
|
||||||
CallInstrArgValue::Number(value) => prepare_consts(value, ctx),
|
CallInstrArgValue::Number(value) => prepare_consts(value, ctx),
|
||||||
CallInstrArgValue::Variable(name) => prepare_variable(name, ctx),
|
CallInstrArgValue::Variable(name) => prepare_variable(name, ctx),
|
||||||
CallInstrArgValue::JsonPath { variable, path } => prepare_json_path(variable, path, ctx),
|
CallInstrArgValue::JsonPath {
|
||||||
|
variable,
|
||||||
|
path,
|
||||||
|
should_flatten,
|
||||||
|
} => prepare_json_path(variable, path, *should_flatten, ctx),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -84,12 +88,22 @@ fn prepare_variable<'i>(name: &str, ctx: &ExecutionCtx<'i>) -> ExecutionResult<(
|
|||||||
fn prepare_json_path<'i>(
|
fn prepare_json_path<'i>(
|
||||||
name: &str,
|
name: &str,
|
||||||
json_path: &str,
|
json_path: &str,
|
||||||
|
should_flatten: bool,
|
||||||
ctx: &ExecutionCtx<'i>,
|
ctx: &ExecutionCtx<'i>,
|
||||||
) -> ExecutionResult<(JValue, Vec<SecurityTetraplet>)> {
|
) -> ExecutionResult<(JValue, Vec<SecurityTetraplet>)> {
|
||||||
let resolved = resolve_to_jvaluable(name, ctx)?;
|
let resolved = resolve_to_jvaluable(name, ctx)?;
|
||||||
let (jvalue, tetraplets) = resolved.apply_json_path_with_tetraplets(json_path)?;
|
let (jvalue, tetraplets) = resolved.apply_json_path_with_tetraplets(json_path)?;
|
||||||
let jvalue = jvalue.into_iter().cloned().collect::<Vec<_>>();
|
|
||||||
let jvalue = JValue::Array(jvalue);
|
let jvalue = if should_flatten {
|
||||||
|
if jvalue.len() != 1 {
|
||||||
|
let jvalue = jvalue.into_iter().cloned().collect::<Vec<_>>();
|
||||||
|
return crate::exec_err!(ExecutionError::FlatteningError(JValue::Array(jvalue)));
|
||||||
|
}
|
||||||
|
jvalue[0].clone()
|
||||||
|
} else {
|
||||||
|
let jvalue = jvalue.into_iter().cloned().collect::<Vec<_>>();
|
||||||
|
JValue::Array(jvalue)
|
||||||
|
};
|
||||||
|
|
||||||
Ok((jvalue, tetraplets))
|
Ok((jvalue, tetraplets))
|
||||||
}
|
}
|
||||||
|
209
interpreter-lib/tests/flattening.rs
Normal file
209
interpreter-lib/tests/flattening.rs
Normal file
@ -0,0 +1,209 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2020 Fluence Labs Limited
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
use aqua_test_utils::call_vm;
|
||||||
|
use aqua_test_utils::create_aqua_vm;
|
||||||
|
use aqua_test_utils::set_variable_call_service;
|
||||||
|
use aqua_test_utils::CallServiceClosure;
|
||||||
|
use aqua_test_utils::IValue;
|
||||||
|
use aqua_test_utils::NEVec;
|
||||||
|
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
use std::cell::RefCell;
|
||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
|
type ClosureSettableVar<T> = Rc<RefCell<T>>;
|
||||||
|
|
||||||
|
#[derive(Default, Clone, Debug, PartialEq, Eq)]
|
||||||
|
struct ClosureCallArgs {
|
||||||
|
pub(self) service_id_var: Rc<RefCell<String>>,
|
||||||
|
pub(self) function_name_var: ClosureSettableVar<String>,
|
||||||
|
pub(self) args_var: ClosureSettableVar<Vec<i32>>,
|
||||||
|
pub(self) tetraplets: ClosureSettableVar<Vec<Vec<String>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_check_service_closure(closure_call_args: ClosureCallArgs) -> CallServiceClosure {
|
||||||
|
Box::new(move |_, args| -> Option<IValue> {
|
||||||
|
use std::ops::Deref;
|
||||||
|
|
||||||
|
let service_id = match &args[0] {
|
||||||
|
IValue::String(str) => str,
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
*closure_call_args.service_id_var.deref().borrow_mut() = service_id.clone();
|
||||||
|
|
||||||
|
let function_name = match &args[1] {
|
||||||
|
IValue::String(str) => str,
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
*closure_call_args.function_name_var.deref().borrow_mut() = function_name.clone();
|
||||||
|
|
||||||
|
let call_args = match &args[2] {
|
||||||
|
IValue::String(str) => str,
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let call_args: Vec<i32> = serde_json::from_str(call_args).expect("json deserialization shouldn't fail");
|
||||||
|
*closure_call_args.args_var.deref().borrow_mut() = call_args;
|
||||||
|
|
||||||
|
Some(IValue::Record(
|
||||||
|
NEVec::new(vec![IValue::S32(0), IValue::String(r#""""#.to_string())]).unwrap(),
|
||||||
|
))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn flattening_scalar_arrays() {
|
||||||
|
let scalar_array = json!({"iterable": [
|
||||||
|
{"peer_id" : "local_peer_id", "service_id": "local_service_id", "function_name": "local_function_name", "args": [0, 1]},
|
||||||
|
{"peer_id" : "local_peer_id", "service_id": "local_service_id", "function_name": "local_function_name", "args": [2, 3]},
|
||||||
|
]});
|
||||||
|
|
||||||
|
let scalar_array = serde_json::to_string(&scalar_array).expect("the default serializer shouldn't fail");
|
||||||
|
let set_variable_peer_id = "set_variable";
|
||||||
|
let mut set_variable_vm = create_aqua_vm(set_variable_call_service(scalar_array), set_variable_peer_id);
|
||||||
|
|
||||||
|
let closure_call_args = ClosureCallArgs::default();
|
||||||
|
let local_peer_id = "local_peer_id";
|
||||||
|
let mut local_vm = create_aqua_vm(create_check_service_closure(closure_call_args.clone()), local_peer_id);
|
||||||
|
|
||||||
|
let script = format!(
|
||||||
|
r#"
|
||||||
|
(seq
|
||||||
|
(call "{0}" ("" "") [] scalar_array)
|
||||||
|
(fold scalar_array.$.iterable! v
|
||||||
|
(seq
|
||||||
|
(call v.$.peer_id! (v.$.service_id! v.$.function_name!) [v.$.args[0]! v.$.args[1]!])
|
||||||
|
(next v)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
"#,
|
||||||
|
set_variable_peer_id
|
||||||
|
);
|
||||||
|
|
||||||
|
let res = call_vm!(set_variable_vm, "asd", script.clone(), "", "");
|
||||||
|
let res = call_vm!(local_vm, "asd", script.clone(), "", res.data);
|
||||||
|
|
||||||
|
assert_eq!(res.ret_code, 0);
|
||||||
|
assert_eq!(
|
||||||
|
closure_call_args.service_id_var,
|
||||||
|
Rc::new(RefCell::new("local_service_id".to_string()))
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
closure_call_args.function_name_var,
|
||||||
|
Rc::new(RefCell::new("local_function_name".to_string()))
|
||||||
|
);
|
||||||
|
assert_eq!(closure_call_args.args_var, Rc::new(RefCell::new(vec![2, 3])));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn flattening_streams() {
|
||||||
|
let stream_value = json!(
|
||||||
|
{"peer_id" : "local_peer_id", "service_id": "local_service_id", "function_name": "local_function_name", "args": [0, 1]}
|
||||||
|
);
|
||||||
|
|
||||||
|
let stream_value = serde_json::to_string(&stream_value).expect("the default serializer shouldn't fail");
|
||||||
|
let set_variable_peer_id = "set_variable";
|
||||||
|
let mut set_variable_vm = create_aqua_vm(set_variable_call_service(stream_value), set_variable_peer_id);
|
||||||
|
|
||||||
|
let closure_call_args = ClosureCallArgs::default();
|
||||||
|
let local_peer_id = "local_peer_id";
|
||||||
|
let mut local_vm = create_aqua_vm(create_check_service_closure(closure_call_args.clone()), local_peer_id);
|
||||||
|
|
||||||
|
let script = format!(
|
||||||
|
r#"
|
||||||
|
(seq
|
||||||
|
(seq
|
||||||
|
(seq
|
||||||
|
(call "{0}" ("" "") [] stream[])
|
||||||
|
(call "{0}" ("" "") [] stream[])
|
||||||
|
)
|
||||||
|
(call "{0}" ("" "") [] stream[])
|
||||||
|
)
|
||||||
|
(fold stream.$.[0,1,2] v
|
||||||
|
(seq
|
||||||
|
(call v.$.peer_id! (v.$.service_id! v.$.function_name!) [v.$.args[0]! v.$.args[1]!])
|
||||||
|
(next v)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
"#,
|
||||||
|
set_variable_peer_id
|
||||||
|
);
|
||||||
|
|
||||||
|
let res = call_vm!(set_variable_vm, "asd", script.clone(), "", "");
|
||||||
|
let res = call_vm!(local_vm, "asd", script.clone(), "", res.data);
|
||||||
|
|
||||||
|
assert_eq!(res.ret_code, 0);
|
||||||
|
assert_eq!(
|
||||||
|
closure_call_args.service_id_var,
|
||||||
|
Rc::new(RefCell::new("local_service_id".to_string()))
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
closure_call_args.function_name_var,
|
||||||
|
Rc::new(RefCell::new("local_function_name".to_string()))
|
||||||
|
);
|
||||||
|
assert_eq!(closure_call_args.args_var, Rc::new(RefCell::new(vec![0, 1])));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_handling_non_flattening_values() {
|
||||||
|
let stream_value = json!(
|
||||||
|
{"peer_id" : "local_peer_id", "service_id": "local_service_id", "function_name": "local_function_name", "args": [0, 1]}
|
||||||
|
);
|
||||||
|
|
||||||
|
let stream_value = serde_json::to_string(&stream_value).expect("the default serializer shouldn't fail");
|
||||||
|
let set_variable_peer_id = "set_variable";
|
||||||
|
let mut set_variable_vm = create_aqua_vm(set_variable_call_service(stream_value), set_variable_peer_id);
|
||||||
|
|
||||||
|
let closure_call_args = ClosureCallArgs::default();
|
||||||
|
let local_peer_id = "local_peer_id";
|
||||||
|
let mut local_vm = create_aqua_vm(create_check_service_closure(closure_call_args.clone()), local_peer_id);
|
||||||
|
|
||||||
|
let script = format!(
|
||||||
|
r#"
|
||||||
|
(seq
|
||||||
|
(seq
|
||||||
|
(seq
|
||||||
|
(call "{0}" ("" "") [] stream[])
|
||||||
|
(call "{0}" ("" "") [] stream[])
|
||||||
|
)
|
||||||
|
(call "{0}" ("" "") [] stream[])
|
||||||
|
)
|
||||||
|
(fold stream.$.[0,1,2]! v
|
||||||
|
(seq
|
||||||
|
(call v.$.peer_id! (v.$.service_id! v.$.function_name!) [v.$.args[0]! v.$.args[1]!])
|
||||||
|
(next v)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
"#,
|
||||||
|
set_variable_peer_id
|
||||||
|
);
|
||||||
|
|
||||||
|
let res = call_vm!(set_variable_vm, "asd", script.clone(), "", "");
|
||||||
|
let res = call_vm!(local_vm, "asd", script.clone(), "", res.data);
|
||||||
|
|
||||||
|
assert_eq!(res.ret_code, 1017);
|
||||||
|
assert_eq!(
|
||||||
|
res.error_message,
|
||||||
|
String::from(
|
||||||
|
r#"jvalue '[{"peer_id":"local_peer_id","service_id":"local_service_id","function_name":"local_function_name","args":[0,1]},{"peer_id":"local_peer_id","service_id":"local_service_id","function_name":"local_function_name","args":[0,1]},{"peer_id":"local_peer_id","service_id":"local_service_id","function_name":"local_function_name","args":[0,1]}]' can't be flattened, to be flattened a jvalue should have an array type and consist only one value"#
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
@ -57,8 +57,8 @@ fn join_chat() {
|
|||||||
(fold members m
|
(fold members m
|
||||||
(par
|
(par
|
||||||
(seq
|
(seq
|
||||||
(call m.$.[1] ("identity" "") [] void[])
|
(call m.$.[1]! ("identity" "") [] void[])
|
||||||
(call m.$.[0] ("fgemb3" "add") [] void3[])
|
(call m.$.[0]! ("fgemb3" "add") [] void3[])
|
||||||
)
|
)
|
||||||
(next m)
|
(next m)
|
||||||
)
|
)
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "aquamarine"
|
name = "aquamarine"
|
||||||
version = "0.6.0"
|
version = "0.7.0"
|
||||||
authors = ["Fluence Labs"]
|
authors = ["Fluence Labs"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user