chore: Rust channel update dedicated to Software Engineer Day (#699)

This commit is contained in:
Ivan Boldyrev 2023-09-14 18:55:06 +07:00 committed by GitHub
parent 4e72abe9a7
commit d41f7646d9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
23 changed files with 1827 additions and 388 deletions

4
Cargo.lock generated
View File

@ -4217,9 +4217,9 @@ dependencies = [
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.66" version = "1.0.67"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" checksum = "3d433d9f1a3e8c1263d9456598b16fec66f4acc9a74dacffd35c7bb09b3a1328"
dependencies = [ dependencies = [
"unicode-ident", "unicode-ident",
] ]

View File

@ -70,7 +70,7 @@ fn populate_context<'ctx>(
fn resolve_key_if_needed<'ctx>( fn resolve_key_if_needed<'ctx>(
key: &StreamMapKeyClause<'ctx>, key: &StreamMapKeyClause<'ctx>,
exec_ctx: &mut ExecutionCtx<'ctx>, exec_ctx: &ExecutionCtx<'ctx>,
map_name: &str, map_name: &str,
) -> Result<StreamMapKey<'ctx>, ExecutionError> { ) -> Result<StreamMapKey<'ctx>, ExecutionError> {
match key { match key {
@ -84,7 +84,7 @@ fn resolve_key_if_needed<'ctx>(
fn resolve<'ctx>( fn resolve<'ctx>(
resolvable: &impl Resolvable, resolvable: &impl Resolvable,
exec_ctx: &mut ExecutionCtx<'_>, exec_ctx: &ExecutionCtx<'_>,
map_name: &str, map_name: &str,
) -> Result<StreamMapKey<'ctx>, ExecutionError> { ) -> Result<StreamMapKey<'ctx>, ExecutionError> {
let (value, _, _) = resolvable.resolve(exec_ctx)?; let (value, _, _) = resolvable.resolve(exec_ctx)?;

View File

@ -217,14 +217,14 @@ fn select_by_functor_from_scalar(value: &JValue, functor: &Functor) -> Execution
} }
impl<'value> LambdaResult<'value> { impl<'value> LambdaResult<'value> {
pub(self) fn from_cow(result: Cow<'value, JValue>, tetraplet_idx: usize) -> Self { fn from_cow(result: Cow<'value, JValue>, tetraplet_idx: usize) -> Self {
Self { Self {
result, result,
tetraplet_idx: Some(tetraplet_idx), tetraplet_idx: Some(tetraplet_idx),
} }
} }
pub(self) fn from_value(result: JValue) -> Self { fn from_value(result: JValue) -> Self {
Self { Self {
result: Cow::Owned(result), result: Cow::Owned(result),
tetraplet_idx: None, tetraplet_idx: None,

View File

@ -51,7 +51,7 @@ pub(super) use value_types::ScalarRef;
pub(super) use value_types::ServiceResultAggregate; pub(super) use value_types::ServiceResultAggregate;
pub(super) use value_types::Stream; pub(super) use value_types::Stream;
pub(super) use value_types::ValueAggregate; pub(super) use value_types::ValueAggregate;
pub(self) use value_types::STREAM_MAX_SIZE; use value_types::STREAM_MAX_SIZE;
pub(crate) use air_trace_handler::TraceHandler; pub(crate) use air_trace_handler::TraceHandler;

View File

@ -16,7 +16,7 @@
mod recursive_stream; mod recursive_stream;
mod stream_definition; mod stream_definition;
pub(self) mod values_matrix; mod values_matrix;
pub(crate) use recursive_stream::IterableValue; pub(crate) use recursive_stream::IterableValue;
pub(crate) use recursive_stream::RecursiveCursorState; pub(crate) use recursive_stream::RecursiveCursorState;

View File

@ -598,7 +598,9 @@ mod test {
let add_value_result = stream.add_value(value.clone(), Generation::new()); let add_value_result = stream.add_value(value.clone(), Generation::new());
let Err(ExecutionError::Uncatchable(error)) = add_value_result else { panic!("there must be CatchableError")}; let Err(ExecutionError::Uncatchable(error)) = add_value_result else {
panic!("there must be CatchableError")
};
assert!(matches!(error, UncatchableError::StreamSizeLimitExceeded)); assert!(matches!(error, UncatchableError::StreamSizeLimitExceeded));
} }
} }

View File

@ -164,8 +164,8 @@ fn executed_trace_par_par_call() {
fn executed_trace_seq_seq() { fn executed_trace_seq_seq() {
let peer_id_1 = "12D3KooWHk9BjDQBUqnavciRPhAYFvqKBe4ZiPPvde7vDaqgn5er"; let peer_id_1 = "12D3KooWHk9BjDQBUqnavciRPhAYFvqKBe4ZiPPvde7vDaqgn5er";
let peer_id_2 = "12D3KooWAzJcYitiZrerycVB4Wryrx22CFKdDGx7c4u31PFdfTbR"; let peer_id_2 = "12D3KooWAzJcYitiZrerycVB4Wryrx22CFKdDGx7c4u31PFdfTbR";
let mut vm1 = create_avm(unit_call_service(), peer_id_1.clone()); let mut vm1 = create_avm(unit_call_service(), peer_id_1);
let mut vm2 = create_avm(unit_call_service(), peer_id_2.clone()); let mut vm2 = create_avm(unit_call_service(), peer_id_2);
let script = format!( let script = format!(
r#" r#"

View File

@ -23,10 +23,10 @@ type ClosureSettableVar<T> = Rc<RefCell<T>>;
#[derive(Default, Clone, Debug, PartialEq, Eq)] #[derive(Default, Clone, Debug, PartialEq, Eq)]
struct ClosureCallArgs { struct ClosureCallArgs {
pub(self) service_id_var: Rc<RefCell<String>>, service_id_var: Rc<RefCell<String>>,
pub(self) function_name_var: ClosureSettableVar<String>, function_name_var: ClosureSettableVar<String>,
pub(self) args_var: ClosureSettableVar<Vec<i32>>, args_var: ClosureSettableVar<Vec<i32>>,
pub(self) tetraplets: ClosureSettableVar<Vec<Vec<String>>>, tetraplets: ClosureSettableVar<Vec<Vec<String>>>,
} }
fn create_check_service_closure(closure_call_args: ClosureCallArgs) -> CallServiceClosure { fn create_check_service_closure(closure_call_args: ClosureCallArgs) -> CallServiceClosure {

View File

@ -50,7 +50,7 @@ pub struct ErrorAVMOutcome {
} }
impl AVMOutcome { impl AVMOutcome {
pub(self) fn new( fn new(
data: Vec<u8>, data: Vec<u8>,
call_requests: CallRequests, call_requests: CallRequests,
next_peer_pks: Vec<String>, next_peer_pks: Vec<String>,
@ -99,7 +99,7 @@ impl AVMOutcome {
} }
impl ErrorAVMOutcome { impl ErrorAVMOutcome {
pub(self) fn new(error_code: i64, error_message: String, outcome: AVMOutcome) -> Self { fn new(error_code: i64, error_message: String, outcome: AVMOutcome) -> Self {
Self { Self {
error_code, error_code,
error_message, error_message,

View File

@ -42,14 +42,14 @@ enum MetTag {
#[derive(Debug)] #[derive(Debug)]
struct ParserState { struct ParserState {
pub(self) first_dot_met_pos: Option<usize>, first_dot_met_pos: Option<usize>,
pub(self) non_numeric_met: bool, non_numeric_met: bool,
pub(self) digit_met: bool, digit_met: bool,
pub(self) flattening_met: bool, flattening_met: bool,
pub(self) met_tag: MetTag, met_tag: MetTag,
pub(self) is_first_char: bool, is_first_char: bool,
pub(self) current_char: char, current_char: char,
pub(self) current_offset: usize, current_offset: usize,
} }
struct CallVariableParser<'input> { struct CallVariableParser<'input> {
@ -88,10 +88,7 @@ impl<'input> CallVariableParser<'input> {
Ok(parser) Ok(parser)
} }
pub(self) fn try_parse( fn try_parse(string_to_parse: &'input str, start_pos: AirPos) -> LexerResult<Token<'input>> {
string_to_parse: &'input str,
start_pos: AirPos,
) -> LexerResult<Token<'input>> {
let mut parser = Self::new(string_to_parse, start_pos)?; let mut parser = Self::new(string_to_parse, start_pos)?;
loop { loop {

View File

@ -33,5 +33,5 @@ pub use token::Token;
pub(super) type LexerResult<T> = std::result::Result<T, LexerError>; pub(super) type LexerResult<T> = std::result::Result<T, LexerError>;
pub(self) use utils::is_air_alphanumeric; use utils::is_air_alphanumeric;
pub(self) use utils::is_json_path_allowed_char; use utils::is_json_path_allowed_char;

View File

@ -26,4 +26,4 @@ pub use errors::LexerError;
pub use lambda_ast_lexer::LambdaASTLexer; pub use lambda_ast_lexer::LambdaASTLexer;
pub use token::Token; pub use token::Token;
pub(self) use utils::is_air_alphanumeric; use utils::is_air_alphanumeric;

View File

@ -25,7 +25,7 @@ pub use trace_slider::TraceSlider;
pub(crate) use keeper::DataKeeper; pub(crate) use keeper::DataKeeper;
pub(self) type KeeperResult<T> = std::result::Result<T, KeeperError>; type KeeperResult<T> = std::result::Result<T, KeeperError>;
use super::ExecutedState; use super::ExecutedState;
use super::ExecutionTrace; use super::ExecutionTrace;

View File

@ -170,7 +170,7 @@ impl TraceHandler {
pub fn meet_iteration_end(&mut self, fold_id: u32) -> TraceHandlerResult<()> { pub fn meet_iteration_end(&mut self, fold_id: u32) -> TraceHandlerResult<()> {
let fold_fsm = self.fsm_keeper.fold_mut(fold_id)?; let fold_fsm = self.fsm_keeper.fold_mut(fold_id)?;
fold_fsm.meet_iteration_end(&mut self.data_keeper); fold_fsm.meet_iteration_end(&self.data_keeper);
Ok(()) Ok(())
} }
@ -184,7 +184,7 @@ impl TraceHandler {
pub fn meet_generation_end(&mut self, fold_id: u32) -> TraceHandlerResult<()> { pub fn meet_generation_end(&mut self, fold_id: u32) -> TraceHandlerResult<()> {
let fold_fsm = self.fsm_keeper.fold_mut(fold_id)?; let fold_fsm = self.fsm_keeper.fold_mut(fold_id)?;
fold_fsm.meet_generation_end(&mut self.data_keeper); fold_fsm.meet_generation_end(&self.data_keeper);
Ok(()) Ok(())
} }

View File

@ -49,11 +49,7 @@ pub(crate) fn try_merge_next_state_as_fold(data_keeper: &mut DataKeeper) -> Merg
} }
impl MergerFoldResult { impl MergerFoldResult {
pub(self) fn from_fold_result( fn from_fold_result(fold: &FoldResult, ctx_type: MergeCtxType, data_keeper: &DataKeeper) -> MergeResult<Self> {
fold: &FoldResult,
ctx_type: MergeCtxType,
data_keeper: &DataKeeper,
) -> MergeResult<Self> {
let (prev_fold_lore, current_fold_lore) = match ctx_type { let (prev_fold_lore, current_fold_lore) = match ctx_type {
MergeCtxType::Previous => { MergeCtxType::Previous => {
let fold_lore = resolve_fold_lore(fold, &data_keeper.prev_ctx)?; let fold_lore = resolve_fold_lore(fold, &data_keeper.prev_ctx)?;
@ -73,7 +69,7 @@ impl MergerFoldResult {
Ok(merge_result) Ok(merge_result)
} }
pub(self) fn from_fold_results( fn from_fold_results(
prev_fold: &FoldResult, prev_fold: &FoldResult,
current_fold: &FoldResult, current_fold: &FoldResult,
data_keeper: &DataKeeper, data_keeper: &DataKeeper,

View File

@ -176,12 +176,12 @@ impl ResolvedSubTraceDescs {
#[derive(Clone, Copy, Debug, PartialEq, Eq)] #[derive(Clone, Copy, Debug, PartialEq, Eq)]
struct LoresLen { struct LoresLen {
pub(self) before_len: u32, before_len: u32,
pub(self) after_len: u32, after_len: u32,
} }
impl LoresLen { impl LoresLen {
pub(self) fn new(before_len: u32, after_len: u32) -> Self { fn new(before_len: u32, after_len: u32) -> Self {
Self { before_len, after_len } Self { before_len, after_len }
} }
} }

View File

@ -39,21 +39,21 @@ pub(crate) fn try_merge_next_state_as_par(data_keeper: &mut DataKeeper) -> Merge
} }
impl MergerParResult { impl MergerParResult {
pub(self) fn from_pars(prev_par: ParResult, current_par: ParResult) -> Self { fn from_pars(prev_par: ParResult, current_par: ParResult) -> Self {
Self { Self {
prev_par: Some(prev_par), prev_par: Some(prev_par),
current_par: Some(current_par), current_par: Some(current_par),
} }
} }
pub(self) fn from_prev_par(prev_par: ParResult) -> Self { fn from_prev_par(prev_par: ParResult) -> Self {
Self { Self {
prev_par: Some(prev_par), prev_par: Some(prev_par),
current_par: None, current_par: None,
} }
} }
pub(self) fn from_current_par(current_par: ParResult) -> Self { fn from_current_par(current_par: ParResult) -> Self {
Self { Self {
prev_par: None, prev_par: None,
current_par: Some(current_par), current_par: Some(current_par),

View File

@ -90,7 +90,7 @@ impl FoldFSM {
Ok(()) Ok(())
} }
pub(crate) fn meet_iteration_end(&mut self, data_keeper: &mut DataKeeper) { pub(crate) fn meet_iteration_end(&mut self, data_keeper: &DataKeeper) {
self.ctor_queue.current().ctor.before_end(data_keeper); self.ctor_queue.current().ctor.before_end(data_keeper);
} }
@ -125,7 +125,7 @@ impl FoldFSM {
Ok(()) Ok(())
} }
pub(crate) fn meet_generation_end(&mut self, data_keeper: &mut DataKeeper) { pub(crate) fn meet_generation_end(&mut self, data_keeper: &DataKeeper) {
self.ctor_queue.finish(data_keeper); self.ctor_queue.finish(data_keeper);
self.ctor_queue.end_back_traverse(); self.ctor_queue.end_back_traverse();
@ -143,7 +143,7 @@ impl FoldFSM {
} }
#[derive(Clone, Copy)] #[derive(Clone, Copy)]
pub(self) enum ByNextPosition { enum ByNextPosition {
/// Represents executed states before next. /// Represents executed states before next.
Before, Before,

View File

@ -28,8 +28,8 @@ pub(super) struct SubTraceLoreCtor {
#[derive(Debug, Default, PartialEq, Eq, Clone, Copy)] #[derive(Debug, Default, PartialEq, Eq, Clone, Copy)]
struct PositionsTracker { struct PositionsTracker {
pub(self) start_pos: TracePos, start_pos: TracePos,
pub(self) end_pos: TracePos, end_pos: TracePos,
} }
#[derive(Debug, PartialEq, Eq, Clone, Copy)] #[derive(Debug, PartialEq, Eq, Clone, Copy)]
@ -119,7 +119,7 @@ impl SubTraceLoreCtor {
} }
impl PositionsTracker { impl PositionsTracker {
pub(self) fn len(&self) -> usize { fn len(&self) -> usize {
(self.end_pos - self.start_pos).into() (self.end_pos - self.start_pos).into()
} }
} }
@ -131,7 +131,7 @@ impl Default for CtorState {
} }
impl CtorState { impl CtorState {
pub(self) fn next(&mut self) { fn next(&mut self) {
use CtorState::*; use CtorState::*;
let next_state = match self { let next_state = match self {

View File

@ -65,11 +65,7 @@ pub(super) struct CtxStateHandler {
impl CtxStateHandler { impl CtxStateHandler {
/// Prepare new states that sliders will have after finishing executing of each subgraph. /// Prepare new states that sliders will have after finishing executing of each subgraph.
pub(super) fn prepare( pub(super) fn prepare(prev_par: ParResult, current_par: ParResult, data_keeper: &DataKeeper) -> FSMResult<Self> {
prev_par: ParResult,
current_par: ParResult,
data_keeper: &mut DataKeeper,
) -> FSMResult<Self> {
let left_pair = compute_new_states(data_keeper, prev_par, current_par, SubgraphType::Left)?; let left_pair = compute_new_states(data_keeper, prev_par, current_par, SubgraphType::Left)?;
let right_pair = compute_new_states(data_keeper, prev_par, current_par, SubgraphType::Right)?; let right_pair = compute_new_states(data_keeper, prev_par, current_par, SubgraphType::Right)?;

View File

@ -1,6 +1,6 @@
[toolchain] [toolchain]
# AquaVM can be built with "stable", "nightly" required only to build Marine tests # AquaVM can be built with "stable", "nightly" required only to build Marine tests
channel = "nightly-2023-04-12" channel = "nightly-2023-09-13"
components = [ "rustfmt", "clippy", "rust-src", "llvm-tools-preview" ] components = [ "rustfmt", "clippy", "rust-src", "llvm-tools-preview" ]
targets = [ "x86_64-unknown-linux-gnu", "x86_64-apple-darwin", "wasm32-wasi", "wasm32-unknown-unknown" ] targets = [ "x86_64-unknown-linux-gnu", "x86_64-apple-darwin", "wasm32-wasi", "wasm32-unknown-unknown" ]
profile = "minimal" profile = "minimal"