Introduce custom lexer (#59)

This commit is contained in:
vms 2021-01-22 18:54:05 +03:00 committed by GitHub
parent 459615e828
commit f523f27705
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
26 changed files with 3483 additions and 2715 deletions

3
Cargo.lock generated
View File

@ -11,7 +11,7 @@ dependencies = [
[[package]]
name = "air-parser"
version = "0.1.0"
version = "0.2.0"
dependencies = [
"codespan",
"codespan-reporting",
@ -21,6 +21,7 @@ dependencies = [
"lalrpop-util",
"regex",
"serde",
"thiserror",
]
[[package]]

View File

@ -1,6 +1,6 @@
[package]
name = "air-parser"
version = "0.1.0"
version = "0.2.0"
authors = ["Fluence Labs"]
edition = "2018"
license = "Apache-2.0"
@ -14,9 +14,11 @@ regex = "1.4.1"
codespan = "0.9.5"
codespan-reporting = "0.9.5"
# TODO: hide serde behind feature
# TODO: hide serde behind a feature
serde = { version = "=1.0.118", features = ["rc"] }
thiserror = "1.0.23"
[dev-dependencies]
fstrings = "0.2.3"
criterion = "0.3.3"

View File

@ -23,7 +23,8 @@ use criterion::criterion_group;
use criterion::criterion_main;
use criterion::Criterion;
use air_parser::InstrParser;
use air_parser::AIRLexer;
use air_parser::AIRParser;
const SOURCE_CODE_BAD: &'static str = r#"(seq
(seq
@ -71,11 +72,11 @@ mod gen {
}
fn create_parser(c: &mut Criterion) {
c.bench_function("create_parser", move |b| b.iter(move || InstrParser::new()));
c.bench_function("create_parser", move |b| b.iter(move || AIRParser::new()));
}
fn clone_parser(c: &mut Criterion) {
let parser = InstrParser::new();
let parser = AIRParser::new();
c.bench_function("clone_parser", move |b| {
let parser = parser.clone();
b.iter(move || parser.clone())
@ -83,7 +84,7 @@ fn clone_parser(c: &mut Criterion) {
}
fn clone_parser_rc(c: &mut Criterion) {
let parser = Rc::new(InstrParser::new());
let parser = Rc::new(AIRParser::new());
c.bench_function("clone_parser_rc", move |b| {
let parser = parser.clone();
b.iter(move || parser.clone())
@ -91,15 +92,17 @@ fn clone_parser_rc(c: &mut Criterion) {
}
fn parse(c: &mut Criterion) {
let parser = Rc::new(InstrParser::new());
let parser = Rc::new(AIRParser::new());
c.bench_function(
format!("parse {} bytes", SOURCE_CODE_GOOD.len()).as_str(),
move |b| {
let parser = parser.clone();
b.iter(move || {
let lexer = AIRLexer::new(SOURCE_CODE_GOOD);
parser
.clone()
.parse(&mut Vec::new(), SOURCE_CODE_GOOD)
.parse("", &mut Vec::new(), lexer)
.expect("success")
})
},
@ -107,18 +110,21 @@ fn parse(c: &mut Criterion) {
}
fn parse_to_fail(c: &mut Criterion) {
let parser = Rc::new(InstrParser::new());
let parser = Rc::new(AIRParser::new());
c.bench_function(
format!("parse {} bytes to FAIL", SOURCE_CODE_BAD.len()).as_str(),
move |b| {
let parser = parser.clone();
b.iter(move || parser.clone().parse(&mut Vec::new(), SOURCE_CODE_BAD))
b.iter(move || {
let lexer = AIRLexer::new(SOURCE_CODE_BAD);
parser.clone().parse("", &mut Vec::new(), lexer)
})
},
);
}
fn parse_deep(c: &mut Criterion) {
let parser = Rc::new(InstrParser::new());
let parser = Rc::new(AIRParser::new());
let source_code: Vec<_> = (1..10).map(gen::deep_seq).collect();
let index: Vec<_> = source_code
.iter()
@ -132,9 +138,11 @@ fn parse_deep(c: &mut Criterion) {
let parser = parser.clone();
let code = &source_code[*i];
b.iter(move || {
let lexer = AIRLexer::new(code);
parser
.clone()
.parse(&mut Vec::new(), code)
.parse("", &mut Vec::new(), lexer)
.expect("success")
});
},
@ -142,11 +150,32 @@ fn parse_deep(c: &mut Criterion) {
);
}
fn parse_dashboard_script(c: &mut Criterion) {
let parser = Rc::new(AIRParser::new());
const DASHBOARD_SCRIPT: &str = include_str!("../../../stepper-lib/tests/scripts/dashboard.clj");
c.bench_function(
format!("parse {} bytes", DASHBOARD_SCRIPT.len()).as_str(),
move |b| {
let parser = parser.clone();
b.iter(move || {
let lexer = AIRLexer::new(DASHBOARD_SCRIPT);
parser
.clone()
.parse("", &mut Vec::new(), lexer)
.expect("success")
})
},
);
}
criterion_group!(
parser,
create_parser,
parse,
parse_to_fail,
parse_dashboard_script,
parse_deep,
clone_parser,
clone_parser_rc,

View File

@ -1,92 +0,0 @@
use crate::ast::*;
use crate::lalrpop::parser::InstructionError;
use lalrpop_util::ErrorRecovery;
use std::rc::Rc;
grammar<'err>(errors: &'err mut Vec<ErrorRecovery<usize, Token<'input>, InstructionError>>);
extern {
type Error = InstructionError;
}
pub Instr: Box<Instruction<'input>> = {
"(" "seq" <l:Instr> <r:Instr> ")" => Box::new(Instruction::Seq(Seq(l, r))),
"(" "par" <l:Instr> <r:Instr> ")" => Box::new(Instruction::Par(Par(l, r))),
"(" "xor" <l:Instr> <r:Instr> ")" => Box::new(Instruction::Xor(Xor(l, r))),
"(" "call" <p:PeerPart> <f:FPart> <args:Args> <output:Output?> ")" => {
let output = output.unwrap_or(CallOutput::None);
let args = Rc::new(args);
Box::new(Instruction::Call(Call{peer_part: p, function_part: f, args, output}))
},
"(" "fold" <iterable:InstructionValue> <iterator:Alphanumeric> <i:Instr> ")" => {
let instruction = Rc::new(*i);
Box::new(Instruction::Fold(Fold{ iterable, iterator, instruction }))
},
"(" "next" <i:Alphanumeric> ")" => Box::new(Instruction::Next(Next(i))),
"(" "null" ")" => Box::new(Instruction::Null(Null)),
! => { errors.push(<>); Box::new(Instruction::Error) },
}
Args: Vec<InstructionValue<'input>> = {
"[" <args:(<Arg>)*> "]" => args
}
FPart: FunctionPart<'input> = {
<f:Function> => FunctionPart::FuncName(f),
"(" <sid:ServiceId> <f:Function> ")" => FunctionPart::ServiceIdWithFuncName(sid, f),
}
PeerPart: PeerPart<'input> = {
<pid:PeerId> => PeerPart::PeerPk(pid),
"(" <pid:PeerId> <sid:ServiceId> ")" => PeerPart::PeerPkWithServiceId(pid, sid),
}
Output: CallOutput<'input> = {
<o:Alphanumeric> => CallOutput::Scalar(o),
<o:ACCUMULATOR> => CallOutput::Accumulator(&o[..o.len()-2]),
};
Function = InstructionValue;
PeerId = InstructionValue;
ServiceId = InstructionValue;
Arg = InstructionValue;
InstructionValue: InstructionValue<'input> = {
"\"" "\"" => InstructionValue::Literal(""), // TODO: signal absence somehow?
"\"" <v: Alphanumeric> "\"" => InstructionValue::Literal(v),
<v:Alphanumeric> => InstructionValue::Variable(v),
<v:JSON_PATH> => {
let mut path = v.splitn(2, ".");
let variable = path.next().expect("must contain dot");
let path = path.next().expect("contain component after dot");
InstructionValue::JsonPath { variable, path }
},
INIT_PEER_ID => InstructionValue::InitPeerId,
}
Alphanumeric = ALPHANUMERIC;
match {
r"[\w_-]+" => ALPHANUMERIC,
r"[\w_-]+\[\]" => ACCUMULATOR,
r#"[\w_-]+\.*\$([\w._-]*(\[[\w"]+\])*!*)+"# => JSON_PATH,
r#"%init_peer_id%"# => INIT_PEER_ID,
"seq",
"call",
"null",
"par",
"xor",
"fold",
"next",
// ignore patterns
// see: https://lalrpop.github.io/lalrpop/lexer_tutorial/001_lexer_gen.html#customizing-skipping-between-tokens
r"\s*" => { }, // The default whitespace skipping is disabled if an `ignore pattern` is specified
r";[^\n\r]*[\n\r]*" => { }, // Skip `; comments`
} else {
_
}

File diff suppressed because one or more lines are too long

View File

@ -1,22 +1,36 @@
#![deny(unused_imports, unused_variables, dead_code)]
/*
* Copyright 2020 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#![deny(
dead_code,
nonstandard_style,
unused_imports,
unused_mut,
unused_variables,
unused_unsafe,
unreachable_patterns
)]
mod parser;
pub use parser::ast;
pub use parser::parse;
pub use parser::AIRLexer;
pub use parser::AIRParser;
#[cfg(test)]
#[macro_use]
extern crate fstrings;
mod lalrpop {
#[cfg(test)]
pub mod tests;
// aqua is auto-generated, so exclude it from `cargo fmt -- --check` and `cargo clippy`
#[rustfmt::skip]
#[allow(clippy::all)]
pub mod aqua;
pub mod parser;
}
pub mod ast;
pub use lalrpop::parser::parse;
// #[cfg(test)]
pub use lalrpop::aqua::InstrParser;

View File

@ -0,0 +1,104 @@
use crate::parser::ast::*;
use crate::parser::into_variable_and_path;
use crate::parser::lexer::LexerError;
use crate::parser::lexer::Token;
use lalrpop_util::ErrorRecovery;
use std::rc::Rc;
// the only thing why input matters here is just introducing lifetime for Token
grammar<'err, 'input>(input: &'input str, errors: &'err mut Vec<ErrorRecovery<usize, Token<'input>, LexerError>>);
pub AIR = Instr;
Instr: Box<Instruction<'input>> = {
"(" call <p:PeerPart> <f:FPart> <args:Args> <output:Output?> ")" => {
let output = output.unwrap_or(CallOutput::None);
let args = Rc::new(args);
Box::new(Instruction::Call(Call{peer_part: p, function_part: f, args, output}))
},
"(" seq <l:Instr> <r:Instr> ")" => Box::new(Instruction::Seq(Seq(l, r))),
"(" par <l:Instr> <r:Instr> ")" => Box::new(Instruction::Par(Par(l, r))),
"(" null ")" => Box::new(Instruction::Null(Null)),
"(" fold <iterable:Iterable> <iterator:Alphanumeric> <i:Instr> ")" => {
let instruction = Rc::new(*i);
Box::new(Instruction::Fold(Fold{ iterable, iterator, instruction }))
},
"(" next <i:Alphanumeric> ")" => Box::new(Instruction::Next(Next(i))),
"(" xor <l:Instr> <r:Instr> ")" => Box::new(Instruction::Xor(Xor(l, r))),
! => { errors.push(<>); Box::new(Instruction::Error) },
}
Args: Vec<CallInstructionValue<'input>> = {
"[" <args:(<Arg>)*> "]" => args
}
FPart: FunctionPart<'input> = {
<f:Function> => FunctionPart::FuncName(f),
"(" <sid:ServiceId> <f:Function> ")" => FunctionPart::ServiceIdWithFuncName(sid, f),
}
PeerPart: PeerPart<'input> = {
<pid:PeerId> => PeerPart::PeerPk(pid),
"(" <pid:PeerId> <sid:ServiceId> ")" => PeerPart::PeerPkWithServiceId(pid, sid),
}
Output: CallOutput<'input> = {
<s:Alphanumeric> => CallOutput::Scalar(s),
<a:Accumulator> => CallOutput::Accumulator(a),
};
Iterable: IterableValue<'input> = {
<s:Alphanumeric> => IterableValue::Variable(s),
<v:JsonPath> => {
let (variable, path) = into_variable_and_path(v.0, v.1);
IterableValue::JsonPath { variable, path }
},
}
Function = CallInstructionValue;
PeerId = CallInstructionValue;
ServiceId = CallInstructionValue;
Arg = CallInstructionValue;
CallInstructionValue: CallInstructionValue<'input> = {
<s:Literal> => CallInstructionValue::Literal(s),
<s:Alphanumeric> => CallInstructionValue::Variable(s),
<v:JsonPath> => {
let (variable, path) = into_variable_and_path(v.0, v.1);
CallInstructionValue::JsonPath { variable, path }
},
InitPeerId => CallInstructionValue::InitPeerId,
}
extern {
type Location = usize;
type Error = LexerError;
enum Token<'input> {
"(" => Token::OpenRoundBracket,
")" => Token::CloseRoundBracket,
"[" => Token::OpenSquareBracket,
"]" => Token::CloseSquareBracket,
Alphanumeric => Token::Alphanumeric(<&'input str>),
Literal => Token::StringLiteral(<&'input str>),
JsonPath => Token::JsonPath(<&'input str>, <usize>),
Accumulator => Token::Accumulator(<&'input str>),
InitPeerId => Token::InitPeerId,
call => Token::Call,
seq => Token::Seq,
par => Token::Par,
null => Token::Null,
fold => Token::Fold,
xor => Token::Xor,
next => Token::Next,
}
}

File diff suppressed because it is too large Load Diff

View File

@ -14,9 +14,11 @@
* limitations under the License.
*/
use super::aqua;
use crate::ast::Instruction;
use crate::lalrpop::aqua::Token;
use super::air;
use super::ast::Instruction;
use super::lexer::AIRLexer;
use super::lexer::LexerError;
use super::lexer::Token;
use codespan_reporting::diagnostic::{Diagnostic, Label};
use codespan_reporting::files::SimpleFiles;
@ -24,34 +26,21 @@ use codespan_reporting::term;
use codespan_reporting::term::termcolor::{Buffer, ColorChoice, StandardStream};
use lalrpop_util::{ErrorRecovery, ParseError};
use std::fmt::Formatter;
#[derive(Debug)]
/// Represents custom parsing errors. Isn't used yet.
pub enum InstructionError {
#[allow(dead_code)]
InvalidPeerId,
}
impl std::error::Error for InstructionError {}
impl std::fmt::Display for InstructionError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "InstructionError")
}
}
use air::AIRParser;
// Caching parser to cache internal regexes, which are expensive to instantiate
// See also https://github.com/lalrpop/lalrpop/issues/269
thread_local!(static PARSER: aqua::InstrParser = aqua::InstrParser::new());
thread_local!(static PARSER: AIRParser = AIRParser::new());
/// Parse AIR `source_code` to `Box<Instruction>`
pub fn parse(source_code: &str) -> Result<Box<Instruction>, String> {
pub fn parse(air_script: &str) -> Result<Box<Instruction<'_>>, String> {
let mut files = SimpleFiles::new();
let file_id = files.add("script.aqua", source_code);
let file_id = files.add("script.aqua", air_script);
PARSER.with(|parser| {
let mut errors = Vec::new();
match parser.parse(&mut errors, source_code) {
let lexer = AIRLexer::new(air_script);
match parser.parse(air_script, &mut errors, lexer) {
Ok(r) if errors.is_empty() => Ok(r),
Ok(_) => Err(report_errors(file_id, files, errors)),
Err(err) => Err(report_errors(
@ -69,9 +58,29 @@ pub fn parse(source_code: &str) -> Result<Box<Instruction>, String> {
fn report_errors(
file_id: usize,
files: SimpleFiles<&str, &str>,
errors: Vec<ErrorRecovery<usize, Token, InstructionError>>,
errors: Vec<ErrorRecovery<usize, Token<'_>, LexerError>>,
) -> String {
let labels: Vec<Label<usize>> = errors
let labels = errors_to_labels(file_id, errors);
let diagnostic = Diagnostic::error().with_labels(labels);
// Write to stderr
let writer = StandardStream::stderr(ColorChoice::Auto);
let config = codespan_reporting::term::Config::default();
term::emit(&mut writer.lock(), &config, &files, &diagnostic).expect("term emit to stderr");
// Return as a string
let mut buffer = Buffer::no_color();
term::emit(&mut buffer, &config, &files, &diagnostic).expect("term emit to buffer");
String::from_utf8_lossy(buffer.as_slice())
.as_ref()
.to_string()
}
fn errors_to_labels(
file_id: usize,
errors: Vec<ErrorRecovery<usize, Token<'_>, LexerError>>,
) -> Vec<Label<usize>> {
errors
.into_iter()
.map(|err| match err.error {
ParseError::UnrecognizedToken {
@ -89,25 +98,9 @@ fn report_errors(
Label::primary(file_id, location..(location + 1))
.with_message(format!("expected {}", pretty_expected(expected)))
}
// TODO: capture start & end in user error; maybe return it as a separate Diagnostic::error?
ParseError::User { error } => {
Label::primary(file_id, 0..0).with_message(error.to_string())
}
ParseError::User { error } => lexical_error_to_label(file_id, error),
})
.collect();
let diagnostic = Diagnostic::error().with_labels(labels);
let config = codespan_reporting::term::Config::default();
// Write to stderr
let writer = StandardStream::stderr(ColorChoice::Auto);
term::emit(&mut writer.lock(), &config, &files, &diagnostic).expect("term emit to stderr");
// Return as a string
let mut buffer = Buffer::no_color();
term::emit(&mut buffer, &config, &files, &diagnostic).expect("term emit to buffer");
String::from_utf8_lossy(buffer.as_slice())
.as_ref()
.to_string()
.collect()
}
fn pretty_expected(expected: Vec<String>) -> String {
@ -117,3 +110,24 @@ fn pretty_expected(expected: Vec<String>) -> String {
expected.join(" or ")
}
}
fn lexical_error_to_label(file_id: usize, error: LexerError) -> Label<usize> {
use LexerError::*;
match error {
UnclosedQuote(start, end) => {
Label::primary(file_id, start..end).with_message(error.to_string())
}
EmptyString(start, end) => {
Label::primary(file_id, start..end).with_message(error.to_string())
}
IsNotAlphanumeric(start, end) => {
Label::primary(file_id, start..end).with_message(error.to_string())
}
EmptyAccName(start, end) => {
Label::primary(file_id, start..end).with_message(error.to_string())
}
InvalidJsonPath(start, end) => {
Label::primary(file_id, start..end).with_message(error.to_string())
}
}
}

View File

@ -34,32 +34,38 @@ pub enum Instruction<'i> {
#[derive(Serialize, Debug, PartialEq, Eq)]
pub enum PeerPart<'i> {
PeerPk(InstructionValue<'i>),
PeerPkWithServiceId(InstructionValue<'i>, InstructionValue<'i>),
PeerPk(CallInstructionValue<'i>),
PeerPkWithServiceId(CallInstructionValue<'i>, CallInstructionValue<'i>),
}
#[derive(Serialize, Debug, PartialEq, Eq)]
pub enum FunctionPart<'i> {
FuncName(InstructionValue<'i>),
ServiceIdWithFuncName(InstructionValue<'i>, InstructionValue<'i>),
FuncName(CallInstructionValue<'i>),
ServiceIdWithFuncName(CallInstructionValue<'i>, CallInstructionValue<'i>),
}
#[derive(Serialize, Debug, PartialEq, Eq)]
pub struct Call<'i> {
pub peer_part: PeerPart<'i>,
pub function_part: FunctionPart<'i>,
pub args: Rc<Vec<InstructionValue<'i>>>,
pub args: Rc<Vec<CallInstructionValue<'i>>>,
pub output: CallOutput<'i>,
}
#[derive(Debug, PartialEq, Eq, Clone, Hash, Serialize, Deserialize)]
pub enum InstructionValue<'i> {
pub enum CallInstructionValue<'i> {
Variable(&'i str),
Literal(&'i str),
JsonPath { variable: &'i str, path: &'i str },
InitPeerId,
}
#[derive(Debug, PartialEq, Eq, Clone, Hash, Serialize, Deserialize)]
pub enum IterableValue<'i> {
Variable(&'i str),
JsonPath { variable: &'i str, path: &'i str },
}
#[derive(Serialize, Debug, Hash, PartialEq, Eq, Clone)]
pub enum CallOutput<'i> {
Scalar(&'i str),
@ -78,7 +84,7 @@ pub struct Xor<'i>(pub Box<Instruction<'i>>, pub Box<Instruction<'i>>);
#[derive(Serialize, Debug, PartialEq, Eq)]
pub struct Fold<'i> {
pub iterable: InstructionValue<'i>,
pub iterable: IterableValue<'i>,
pub iterator: &'i str,
pub instruction: Rc<Instruction<'i>>,
}

View File

@ -0,0 +1,276 @@
/*
* Copyright 2020 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use super::errors::LexerError;
use super::token::Token;
use std::iter::Peekable;
use std::str::CharIndices;
pub type Spanned<Token, Loc, Error> = Result<(Loc, Token, Loc), Error>;
pub struct AIRLexer<'input> {
input: &'input str,
chars: Peekable<CharIndices<'input>>,
}
impl<'input> Iterator for AIRLexer<'input> {
type Item = Spanned<Token<'input>, usize, LexerError>;
fn next(&mut self) -> Option<Self::Item> {
self.next_token()
}
}
impl<'input> AIRLexer<'input> {
pub fn new(input: &'input str) -> Self {
Self {
input,
chars: input.char_indices().peekable(),
}
}
pub fn next_token(&mut self) -> Option<Spanned<Token<'input>, usize, LexerError>> {
while let Some((start_pos, ch)) = self.chars.next() {
match ch {
'(' => return Some(Ok((start_pos, Token::OpenRoundBracket, start_pos + 1))),
')' => return Some(Ok((start_pos, Token::CloseRoundBracket, start_pos + 1))),
'[' => return Some(Ok((start_pos, Token::OpenSquareBracket, start_pos + 1))),
']' => return Some(Ok((start_pos, Token::CloseSquareBracket, start_pos + 1))),
';' => self.skip_comment(),
ch if ch.is_whitespace() => {}
'"' => return self.tokenize_string_literal(start_pos),
_ => return self.tokenize_string(start_pos),
}
}
None
}
fn skip_comment(&mut self) {
const NEW_LINE: char = '\n'; // TODO: consider '\n\r'
while let Some((_, ch)) = self.chars.next() {
if ch == NEW_LINE {
break;
}
}
}
#[allow(clippy::unnecessary_wraps)]
fn tokenize_string_literal(
&mut self,
start_pos: usize,
) -> Option<Spanned<Token<'input>, usize, LexerError>> {
while let Some((pos, ch)) = self.chars.next() {
if ch == '"' {
// + 1 to count an open double quote
let string_size = pos - start_pos + 1;
return Some(Ok((
start_pos,
Token::StringLiteral(&self.input[start_pos + 1..pos]),
start_pos + string_size,
)));
}
}
Some(Err(LexerError::UnclosedQuote(start_pos, self.input.len())))
}
#[allow(clippy::unnecessary_wraps)]
fn tokenize_string(
&mut self,
start_pos: usize,
) -> Option<Spanned<Token<'input>, usize, LexerError>> {
let end_pos = self.advance_to_token_end(start_pos);
// this slicing is safe here because borders come from the chars iterator
let token_str = &self.input[start_pos..end_pos];
let token = match string_to_token(token_str, start_pos) {
Ok(token) => token,
Err(e) => return Some(Err(e)),
};
let token_str_len = end_pos - start_pos;
Some(Ok((start_pos, token, start_pos + token_str_len)))
}
fn advance_to_token_end(&mut self, start_pos: usize) -> usize {
let mut end_pos = start_pos;
let mut round_brackets_balance: i64 = 0;
let mut square_brackets_balance: i64 = 0;
while let Some((pos, ch)) = self.chars.peek() {
end_pos = *pos;
let ch = *ch;
update_brackets_count(
ch,
&mut round_brackets_balance,
&mut square_brackets_balance,
);
if should_stop(ch, round_brackets_balance, square_brackets_balance) {
break;
}
self.chars.next();
}
self.advance_end_pos(&mut end_pos);
end_pos
}
// if it was the last char, advance the end position.
fn advance_end_pos(&mut self, end_pos: &mut usize) {
if self.chars.peek().is_none() {
*end_pos = self.input.len();
}
}
}
fn update_brackets_count(
ch: char,
round_brackets_balance: &mut i64,
square_brackets_balance: &mut i64,
) {
if ch == '(' {
*round_brackets_balance += 1;
} else if ch == ')' {
*round_brackets_balance -= 1;
} else if ch == '[' {
*square_brackets_balance += 1;
} else if ch == ']' {
*square_brackets_balance -= 1;
}
}
fn should_stop(ch: char, round_brackets_balance: i64, open_square_brackets_balance: i64) -> bool {
ch.is_whitespace() || round_brackets_balance < 0 || open_square_brackets_balance < 0
}
fn string_to_token(input: &str, start_pos: usize) -> Result<Token, LexerError> {
match input {
"" => Err(LexerError::EmptyString(start_pos, start_pos)),
CALL_INSTR => Ok(Token::Call),
SEQ_INSTR => Ok(Token::Seq),
PAR_INSTR => Ok(Token::Par),
NULL_INSTR => Ok(Token::Null),
FOLD_INSTR => Ok(Token::Fold),
XOR_INSTR => Ok(Token::Xor),
NEXT_INSTR => Ok(Token::Next),
INIT_PEER_ID => Ok(Token::InitPeerId),
str if str.ends_with(ACC_END_TAG) => try_parse_accumulator(str, start_pos),
str => try_parse_call_variable(str, start_pos),
}
}
fn try_parse_accumulator(maybe_acc: &str, start: usize) -> Result<Token, LexerError> {
const ACC_END_TAG_SIZE: usize = 2;
let str_len = maybe_acc.len();
if str_len == ACC_END_TAG_SIZE {
return Err(LexerError::EmptyAccName(start, start));
}
// this slice is safe here because str's been checked for ending with "[]"
let maybe_acc = &maybe_acc[0..str_len - ACC_END_TAG_SIZE];
for (pos, ch) in maybe_acc.chars().enumerate() {
if !is_aqua_alphanumeric(ch) {
return Err(LexerError::IsNotAlphanumeric(start + pos, start + pos));
}
}
Ok(Token::Accumulator(maybe_acc))
}
fn try_parse_call_variable(maybe_var: &str, start: usize) -> Result<Token, LexerError> {
let mut json_path_start_pos = None;
for (pos, ch) in maybe_var.chars().enumerate() {
if !json_path_started(json_path_start_pos) && is_json_path_start_point(ch) {
json_path_start_pos = Some(pos);
} else if !json_path_started(json_path_start_pos) && !is_aqua_alphanumeric(ch) {
return Err(LexerError::IsNotAlphanumeric(start + pos, start + pos));
} else if json_path_started(json_path_start_pos) & !json_path_allowed_char(ch) {
return Err(LexerError::InvalidJsonPath(start + pos, start + pos));
}
}
match json_path_start_pos {
Some(pos) => Ok(Token::JsonPath(maybe_var, pos)),
None => Ok(Token::Alphanumeric(maybe_var)),
}
}
const CALL_INSTR: &str = "call";
const SEQ_INSTR: &str = "seq";
const PAR_INSTR: &str = "par";
const NULL_INSTR: &str = "null";
const FOLD_INSTR: &str = "fold";
const XOR_INSTR: &str = "xor";
const NEXT_INSTR: &str = "next";
const INIT_PEER_ID: &str = "%init_peer_id%";
const ACC_END_TAG: &str = "[]";
fn is_json_path_start_point(ch: char) -> bool {
ch == '.'
}
fn json_path_started(first_dot_pos: Option<usize>) -> bool {
first_dot_pos.is_some()
}
fn json_path_allowed_char(ch: char) -> bool {
// we don't have spec for json path now, but some possible example could be found here
// https://packagist.org/packages/softcreatr/jsonpath
// good old switch faster here than hash set
match ch {
'$' => true,
'@' => true,
'[' => true,
']' => true,
'(' => true,
')' => true,
':' => true,
'?' => true,
'.' => true,
'*' => true,
',' => true,
'"' => true,
'\'' => true,
'!' => true,
ch => is_aqua_alphanumeric(ch),
}
}
fn is_aqua_alphanumeric(ch: char) -> bool {
ch.is_alphanumeric() || ch == '_' || ch == '-'
}

View File

@ -0,0 +1,41 @@
/*
* Copyright 2020 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use thiserror::Error as ThisError;
#[derive(ThisError, Debug, Clone, PartialEq, Eq, Hash)]
pub enum LexerError {
#[error("this string literal has unclosed quote")]
UnclosedQuote(usize, usize),
#[error("empty string aren't allowed in this position")]
EmptyString(usize, usize),
#[error("only alphanumeric and _, - characters are allowed in this position")]
IsNotAlphanumeric(usize, usize),
#[error("an accumulator name should be non empty")]
EmptyAccName(usize, usize),
#[error("invalid character in json path")]
InvalidJsonPath(usize, usize),
}
impl From<std::convert::Infallible> for LexerError {
fn from(_: std::convert::Infallible) -> Self {
unreachable!()
}
}

View File

@ -0,0 +1,26 @@
/*
* Copyright 2020 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
mod air_lexer;
mod errors;
mod token;
#[cfg(test)]
pub mod tests;
pub use air_lexer::AIRLexer;
pub use errors::LexerError;
pub use token::Token;

View File

@ -0,0 +1,193 @@
/*
* Copyright 2020 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use super::air_lexer::Spanned;
use super::AIRLexer;
use super::LexerError;
use super::Token;
fn run_lexer(input: &str) -> Vec<Spanned<Token<'_>, usize, LexerError>> {
let lexer = AIRLexer::new(input);
lexer.collect()
}
#[test]
fn air_instructions() {
let call_tokens = run_lexer("call");
assert_eq!(call_tokens, vec![Ok((0, Token::Call, 4))]);
let call_tokens = run_lexer("(call)");
assert_eq!(
call_tokens,
vec![
Ok((0, Token::OpenRoundBracket, 1)),
Ok((1, Token::Call, 5)),
Ok((5, Token::CloseRoundBracket, 6))
]
);
let par_tokens = run_lexer("par");
assert_eq!(par_tokens, vec![Ok((0, Token::Par, 3))]);
let par_tokens = run_lexer("(par)");
assert_eq!(
par_tokens,
vec![
Ok((0, Token::OpenRoundBracket, 1)),
Ok((1, Token::Par, 4)),
Ok((4, Token::CloseRoundBracket, 5))
]
);
let seq_tokens = run_lexer("seq");
assert_eq!(seq_tokens, vec![Ok((0, Token::Seq, 3))]);
let seq_tokens = run_lexer("(seq)");
assert_eq!(
seq_tokens,
vec![
Ok((0, Token::OpenRoundBracket, 1)),
Ok((1, Token::Seq, 4)),
Ok((4, Token::CloseRoundBracket, 5))
]
);
let null_tokens = run_lexer("null");
assert_eq!(null_tokens, vec![Ok((0, Token::Null, 4))]);
let null_tokens = run_lexer("(null)");
assert_eq!(
null_tokens,
vec![
Ok((0, Token::OpenRoundBracket, 1)),
Ok((1, Token::Null, 5)),
Ok((5, Token::CloseRoundBracket, 6))
]
);
let fold_tokens = run_lexer("fold");
assert_eq!(fold_tokens, vec![Ok((0, Token::Fold, 4))]);
let fold_tokens = run_lexer("(fold)");
assert_eq!(
fold_tokens,
vec![
Ok((0, Token::OpenRoundBracket, 1)),
Ok((1, Token::Fold, 5)),
Ok((5, Token::CloseRoundBracket, 6))
]
);
let next_tokens = run_lexer("next");
assert_eq!(next_tokens, vec![Ok((0, Token::Next, 4))]);
let next_tokens = run_lexer("(next)");
assert_eq!(
next_tokens,
vec![
Ok((0, Token::OpenRoundBracket, 1)),
Ok((1, Token::Next, 5)),
Ok((5, Token::CloseRoundBracket, 6))
]
);
}
#[test]
fn init_peer_id() {
const INIT_PEER_ID: &str = "%init_peer_id%";
let init_peer_id_tokens = run_lexer(INIT_PEER_ID);
assert_eq!(
init_peer_id_tokens,
vec![Ok((0, Token::InitPeerId, INIT_PEER_ID.len()))]
);
}
#[test]
fn accumulator() {
const ACC: &str = "accumulator____asdasd[]";
let init_peer_id_tokens = run_lexer(ACC);
assert_eq!(
init_peer_id_tokens,
vec![Ok((
0,
Token::Accumulator(&ACC[0..ACC.len() - 2]),
ACC.len()
))]
);
}
#[test]
fn string_literal() {
const STRING_LITERAL: &str = r#""some_string""#;
let string_literal_tokens = run_lexer(STRING_LITERAL);
assert_eq!(
string_literal_tokens,
vec![Ok((
0,
Token::StringLiteral(&STRING_LITERAL[1..STRING_LITERAL.len() - 1]),
STRING_LITERAL.len()
))]
);
}
#[test]
fn json_path() {
// this json path contains all allowed in json path charactes
const JSON_PATH: &str = r#"value.$[$@[]():?.*,"!]"#;
let json_path_tokens = run_lexer(JSON_PATH);
assert_eq!(
json_path_tokens,
vec![Ok((0, Token::JsonPath(JSON_PATH, 5), JSON_PATH.len()))]
);
}
#[test]
fn unclosed_quote() {
const UNCLOSED_QUOTE_AIR: &str = r#"(call ("peer_name) ("service_name" "function_name") [])"#;
let unclosed_quote_air_tokens = run_lexer(UNCLOSED_QUOTE_AIR);
assert_eq!(
unclosed_quote_air_tokens[4],
Err(LexerError::IsNotAlphanumeric(33, 33))
);
}
#[test]
fn bad_value() {
// value contains ! that only allowed in json path
const INVALID_VALUE: &str = r#"val!ue.$[$@[]():?.*,"\!]"#;
let invalid_value_tokens = run_lexer(INVALID_VALUE);
assert_eq!(
invalid_value_tokens,
vec![Err(LexerError::IsNotAlphanumeric(3, 3))]
);
}
#[test]
fn invalid_json_path() {
const INVALID_JSON_PATH: &str = r#"value.$%"#;
let invalid_json_path_tokens = run_lexer(INVALID_JSON_PATH);
assert_eq!(
invalid_json_path_tokens,
vec![Err(LexerError::InvalidJsonPath(7, 7))]
);
}

View File

@ -0,0 +1,38 @@
/*
* Copyright 2020 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Token<'input> {
OpenRoundBracket,
CloseRoundBracket,
OpenSquareBracket,
CloseSquareBracket,
StringLiteral(&'input str),
Alphanumeric(&'input str),
JsonPath(&'input str, usize),
Accumulator(&'input str),
InitPeerId,
Call,
Seq,
Par,
Null,
Fold,
Xor,
Next,
}

View File

@ -0,0 +1,36 @@
/*
* Copyright 2020 Fluence Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
pub mod air_parser;
mod lexer;
// air is auto-generated, so exclude it from `cargo fmt -- --check` and `cargo clippy`
#[rustfmt::skip]
#[allow(clippy::all)]
mod air;
pub mod ast;
#[cfg(test)]
pub mod tests;
pub use self::air_parser::parse;
pub use air::AIRParser;
pub use lexer::AIRLexer;
fn into_variable_and_path(str: &str, pos: usize) -> (&str, &str) {
(&str[0..pos], &str[pos + 1..])
}

View File

@ -14,11 +14,8 @@
* limitations under the License.
*/
use crate::ast::*;
use CallOutput::*;
use FunctionPart::*;
use InstructionValue::*;
use PeerPart::*;
use crate::ast;
use ast::Instruction;
use fstrings::f;
use std::rc::Rc;
@ -29,6 +26,12 @@ fn parse(source_code: &str) -> Instruction {
#[test]
fn parse_seq() {
use ast::Call;
use ast::CallInstructionValue::*;
use ast::CallOutput::*;
use ast::FunctionPart::*;
use ast::PeerPart::*;
let source_code = r#"
(seq
(call peerid function [] output)
@ -55,6 +58,12 @@ fn parse_seq() {
#[test]
fn parse_seq_seq() {
use ast::Call;
use ast::CallInstructionValue::*;
use ast::CallOutput::*;
use ast::FunctionPart::*;
use ast::PeerPart::*;
let source_code = r#"
(seq
(seq
@ -92,6 +101,12 @@ fn parse_seq_seq() {
#[test]
fn parse_json_path() {
use ast::Call;
use ast::CallInstructionValue::*;
use ast::CallOutput::*;
use ast::FunctionPart::*;
use ast::PeerPart::*;
let source_code = r#"
(call id.$.a "f" ["hello" name] void[])
"#;
@ -110,6 +125,12 @@ fn parse_json_path() {
#[test]
fn parse_json_path_complex() {
use ast::Call;
use ast::CallInstructionValue::*;
use ast::CallOutput::*;
use ast::FunctionPart::*;
use ast::PeerPart::*;
let source_code = r#"
(seq
(call m.$.[1] "f" [] void)
@ -142,6 +163,12 @@ fn parse_json_path_complex() {
#[test]
fn json_path_square_braces() {
use ast::Call;
use ast::CallInstructionValue::*;
use ast::CallOutput::*;
use ast::FunctionPart::*;
use ast::PeerPart::*;
let source_code = r#"
(call u.$["peer_id"] ("return" "") [u.$["peer_id"].cde[0]["abc"].abc u.$["name"]] void[])
"#;
@ -178,7 +205,7 @@ fn parse_null() {
)
"#;
let instruction = parse(source_code);
let expected = Instruction::Seq(Seq(Box::new(null()), Box::new(null())));
let expected = Instruction::Seq(ast::Seq(Box::new(null()), Box::new(null())));
assert_eq!(instruction, expected)
}
@ -213,7 +240,7 @@ fn parse_fold() {
)
"#;
let instruction = parse(&source_code.as_ref());
let expected = fold(InstructionValue::Variable("iterable"), "i", null());
let expected = fold(ast::IterableValue::Variable("iterable"), "i", null());
assert_eq!(instruction, expected);
}
@ -229,7 +256,7 @@ fn parse_fold_with_xor_par_seq() {
let instruction = parse(&source_code.as_ref());
let instr = binary_instruction(*name);
let expected = fold(
InstructionValue::Variable("iterable"),
ast::IterableValue::Variable("iterable"),
"i",
instr(null(), null()),
);
@ -239,6 +266,12 @@ fn parse_fold_with_xor_par_seq() {
#[test]
fn parse_init_peer_id() {
use ast::Call;
use ast::CallInstructionValue::*;
use ast::CallOutput::*;
use ast::FunctionPart::*;
use ast::PeerPart::*;
let peer_id = String::from("some_peer_id");
let source_code = format!(
r#"
@ -252,7 +285,7 @@ fn parse_init_peer_id() {
let instruction = parse(&source_code.as_ref());
let expected = seq(
Instruction::Call(Call {
peer_part: PeerPk(InstructionValue::Literal(&peer_id)),
peer_part: PeerPk(Literal(&peer_id)),
function_part: ServiceIdWithFuncName(
Literal("local_service_id"),
Literal("local_fn_name"),
@ -273,6 +306,12 @@ fn parse_init_peer_id() {
#[test]
fn seq_par_call() {
use ast::Call;
use ast::CallInstructionValue::*;
use ast::CallOutput::*;
use ast::FunctionPart::*;
use ast::PeerPart::*;
let peer_id = String::from("some_peer_id");
let source_code = format!(
r#"
@ -290,7 +329,7 @@ fn seq_par_call() {
let expected = seq(
par(
Instruction::Call(Call {
peer_part: PeerPk(InstructionValue::Literal(&peer_id)),
peer_part: PeerPk(Literal(&peer_id)),
function_part: ServiceIdWithFuncName(
Literal("local_service_id"),
Literal("local_fn_name"),
@ -306,7 +345,7 @@ fn seq_par_call() {
}),
),
Instruction::Call(Call {
peer_part: PeerPk(InstructionValue::Literal(&peer_id)),
peer_part: PeerPk(Literal(&peer_id)),
function_part: ServiceIdWithFuncName(
Literal("local_service_id"),
Literal("local_fn_name"),
@ -321,6 +360,12 @@ fn seq_par_call() {
#[test]
fn seq_with_empty_and_dash() {
use ast::Call;
use ast::CallInstructionValue::*;
use ast::CallOutput::*;
use ast::FunctionPart::*;
use ast::PeerPart::*;
let source_code = r#"
(seq
(seq
@ -403,6 +448,12 @@ fn seq_with_empty_and_dash() {
#[test]
fn no_output() {
use ast::Call;
use ast::CallInstructionValue::*;
use ast::CallOutput::*;
use ast::FunctionPart::*;
use ast::PeerPart::*;
let source_code = r#"
(call peer (service fname) [])
"#;
@ -418,6 +469,9 @@ fn no_output() {
#[test]
fn fold_json_path() {
use ast::Fold;
use ast::IterableValue::*;
let source_code = r#"
; comment
(fold members.$.["users"] m (null)) ;;; comment
@ -437,6 +491,9 @@ fn fold_json_path() {
#[test]
fn comments() {
use ast::Fold;
use ast::IterableValue::*;
let source_code = r#"
; comment
(fold members.$.["users"] m (null)) ;;; comment ;;?()()
@ -457,26 +514,26 @@ fn comments() {
// Test DSL
fn seq<'a>(l: Instruction<'a>, r: Instruction<'a>) -> Instruction<'a> {
Instruction::Seq(Seq(Box::new(l), Box::new(r)))
Instruction::Seq(ast::Seq(Box::new(l), Box::new(r)))
}
fn par<'a>(l: Instruction<'a>, r: Instruction<'a>) -> Instruction<'a> {
Instruction::Par(Par(Box::new(l), Box::new(r)))
Instruction::Par(ast::Par(Box::new(l), Box::new(r)))
}
fn xor<'a>(l: Instruction<'a>, r: Instruction<'a>) -> Instruction<'a> {
Instruction::Xor(Xor(Box::new(l), Box::new(r)))
Instruction::Xor(ast::Xor(Box::new(l), Box::new(r)))
}
fn seqnn() -> Instruction<'static> {
seq(null(), null())
}
fn null() -> Instruction<'static> {
Instruction::Null(Null)
Instruction::Null(ast::Null)
}
fn fold<'a>(
iterable: InstructionValue<'a>,
iterable: ast::IterableValue<'a>,
iterator: &'a str,
instruction: Instruction<'a>,
) -> Instruction<'a> {
Instruction::Fold(Fold {
Instruction::Fold(ast::Fold {
iterable,
iterator,
instruction: std::rc::Rc::new(instruction),

View File

@ -29,7 +29,7 @@ use crate::JValue;
use crate::ResolvedTriplet;
use crate::SecurityTetraplet;
use air_parser::ast::{CallOutput, InstructionValue};
use air_parser::ast::{CallInstructionValue, CallOutput};
use std::rc::Rc;
@ -37,7 +37,7 @@ use std::rc::Rc;
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub(super) struct ResolvedCall<'i> {
triplet: Rc<ResolvedTriplet>,
function_arg_paths: Rc<Vec<InstructionValue<'i>>>,
function_arg_paths: Rc<Vec<CallInstructionValue<'i>>>,
output: CallOutput<'i>,
}

View File

@ -19,15 +19,15 @@ use super::ExecutionError;
use super::ExecutionResult;
use crate::JValue;
use air_parser::ast::{FunctionPart, InstructionValue, PeerPart};
use air_parser::ast::{CallInstructionValue, FunctionPart, PeerPart};
use polyplets::ResolvedTriplet;
/// Triplet represents a location of the executable code in the network.
/// It is build from `PeerPart` and `FunctionPart` of a `Call` instruction.
pub(super) struct Triplet<'a, 'i> {
pub(super) peer_pk: &'a InstructionValue<'i>,
pub(super) service_id: &'a InstructionValue<'i>,
pub(super) function_name: &'a InstructionValue<'i>,
pub(super) peer_pk: &'a CallInstructionValue<'i>,
pub(super) service_id: &'a CallInstructionValue<'i>,
pub(super) function_name: &'a CallInstructionValue<'i>,
}
impl<'a, 'i> Triplet<'a, 'i> {
@ -77,18 +77,18 @@ impl<'a, 'i> Triplet<'a, 'i> {
/// Resolve value to string by either resolving variable from `ExecutionCtx`, taking literal value, or etc.
// TODO: return Rc<String> to avoid excess cloning
fn resolve_to_string<'i>(value: &InstructionValue<'i>, ctx: &ExecutionCtx<'i>) -> ExecutionResult<String> {
fn resolve_to_string<'i>(value: &CallInstructionValue<'i>, ctx: &ExecutionCtx<'i>) -> ExecutionResult<String> {
use crate::execution::utils::resolve_to_jvaluable;
let resolved = match value {
InstructionValue::InitPeerId => ctx.init_peer_id.clone(),
InstructionValue::Literal(value) => value.to_string(),
InstructionValue::Variable(name) => {
CallInstructionValue::InitPeerId => ctx.init_peer_id.clone(),
CallInstructionValue::Literal(value) => value.to_string(),
CallInstructionValue::Variable(name) => {
let resolved = resolve_to_jvaluable(name, ctx)?;
let jvalue = resolved.into_jvalue();
jvalue_to_string(jvalue)?
}
InstructionValue::JsonPath { variable, path } => {
CallInstructionValue::JsonPath { variable, path } => {
let resolved = resolve_to_jvaluable(variable, ctx)?;
let resolved = resolved.apply_json_path(path)?;
vec_to_string(resolved, path)?

View File

@ -19,7 +19,7 @@ use crate::JValue;
use crate::ResolvedTriplet;
use crate::SecurityTetraplet;
use air_parser::ast::InstructionValue;
use air_parser::ast;
use jsonpath_lib::select;
use jsonpath_lib::select_with_iter;
@ -31,16 +31,12 @@ pub(super) type IterableValue = Box<dyn for<'ctx> Iterable<'ctx, Item = Iterable
/// Constructs iterable value for given instruction value,
/// return Some if iterable isn't empty and None otherwise.
pub(super) fn construct_iterable_value<'ctx>(
value: &InstructionValue<'ctx>,
ast_iterable: &ast::IterableValue<'ctx>,
exec_ctx: &ExecutionCtx<'ctx>,
) -> ExecutionResult<Option<IterableValue>> {
use ExecutionError::InvalidFoldIterable;
match value {
InstructionValue::Variable(name) => handle_instruction_variable(exec_ctx, name),
InstructionValue::JsonPath { variable, path } => handle_instruction_json_path(exec_ctx, variable, path),
// TODO: check statically that it isn't possible to use string literals and so on as fold iterable
v => return Err(InvalidFoldIterable(format!("{:?}", v))),
match ast_iterable {
ast::IterableValue::Variable(name) => handle_instruction_variable(exec_ctx, name),
ast::IterableValue::JsonPath { variable, path } => handle_instruction_json_path(exec_ctx, variable, path),
}
}

View File

@ -81,10 +81,6 @@ pub(crate) enum ExecutionError {
/// Errors encountered while shadowing non-scalar values.
#[error("variable with name '{0}' can't be shadowed, shadowing is supported only for scalar values")]
ShadowingError(String),
/// Errors occured when instruction value can't be used as a fold iterable.
#[error("instruction value '{0}' can't be used as a fold iterable")]
InvalidFoldIterable(String),
}
impl ExecutionError {
@ -106,7 +102,6 @@ impl ExecutionError {
MultipleFoldStates(_) => 12,
InvalidExecutedState(..) => 13,
ShadowingError(_) => 14,
InvalidFoldIterable(_) => 15,
}
}
}

View File

@ -22,11 +22,11 @@ use crate::execution::ExecutionResult;
use crate::JValue;
use crate::SecurityTetraplet;
use air_parser::ast::InstructionValue;
use air_parser::ast::CallInstructionValue;
/// Resolve value to called function arguments.
pub(crate) fn resolve_to_args<'i>(
value: &InstructionValue<'i>,
value: &CallInstructionValue<'i>,
ctx: &ExecutionCtx<'i>,
) -> ExecutionResult<(JValue, Vec<SecurityTetraplet>)> {
fn handle_string_arg<'i>(arg: &str, ctx: &ExecutionCtx<'i>) -> ExecutionResult<(JValue, Vec<SecurityTetraplet>)> {
@ -37,16 +37,16 @@ pub(crate) fn resolve_to_args<'i>(
}
match value {
InstructionValue::InitPeerId => handle_string_arg(ctx.init_peer_id.as_str(), ctx),
InstructionValue::Literal(value) => handle_string_arg(value, ctx),
InstructionValue::Variable(name) => {
CallInstructionValue::InitPeerId => handle_string_arg(ctx.init_peer_id.as_str(), ctx),
CallInstructionValue::Literal(value) => handle_string_arg(value, ctx),
CallInstructionValue::Variable(name) => {
let resolved = resolve_to_jvaluable(name, ctx)?;
let tetraplets = resolved.as_tetraplets();
let jvalue = resolved.into_jvalue();
Ok((jvalue, tetraplets))
}
InstructionValue::JsonPath { variable, path } => {
CallInstructionValue::JsonPath { variable, path } => {
let resolved = resolve_to_jvaluable(variable, ctx)?;
let (jvalue, tetraplets) = resolved.apply_json_path_with_tetraplets(path)?;
let jvalue = jvalue.into_iter().cloned().collect::<Vec<_>>();

View File

@ -92,7 +92,7 @@ impl fmt::Display for PreparationError {
"an error occurred while executed trace deserialization on '{:?}': {:?}",
trace, serde_error
)
};
}
match String::from_utf8(executed_trace.to_vec()) {
Ok(str) => print_error(f, str, serde_error),

View File

@ -11,7 +11,7 @@ dependencies = [
[[package]]
name = "air-parser"
version = "0.1.0"
version = "0.2.0"
dependencies = [
"codespan",
"codespan-reporting",
@ -19,6 +19,7 @@ dependencies = [
"lalrpop-util",
"regex",
"serde",
"thiserror",
]
[[package]]
@ -671,7 +672,7 @@ dependencies = [
[[package]]
name = "stepper-lib"
version = "0.2.3"
version = "0.3.0"
dependencies = [
"air-parser",
"boolinator",

View File

@ -11,7 +11,7 @@ dependencies = [
[[package]]
name = "air-parser"
version = "0.1.0"
version = "0.2.0"
dependencies = [
"codespan",
"codespan-reporting",
@ -19,6 +19,7 @@ dependencies = [
"lalrpop-util",
"regex",
"serde",
"thiserror",
]
[[package]]
@ -671,7 +672,7 @@ dependencies = [
[[package]]
name = "stepper-lib"
version = "0.2.3"
version = "0.3.0"
dependencies = [
"air-parser",
"boolinator",

View File

@ -18,14 +18,10 @@ use stepper_lib::log_targets::TARGET_MAP;
use log::Level as LogLevel;
use std::collections::HashMap;
pub const DEFAULT_LOG_LEVEL: LogLevel = LogLevel::Info;
pub fn init_logger() {
use std::iter::FromIterator;
let target_map = HashMap::from_iter(TARGET_MAP.iter().cloned());
let target_map = TARGET_MAP.iter().cloned().collect();
fluence::WasmLoggerBuilder::new()
.with_target_map(target_map)
.build()