From e6ea006ac63637b451d7e29a3921cba2c51cd079 Mon Sep 17 00:00:00 2001 From: Stephen Chung Date: Mon, 5 Apr 2021 00:05:56 +0800 Subject: [PATCH] Fix builds. --- src/lib.rs | 5 ++++- src/parser.rs | 12 ++++++++++-- src/token.rs | 12 ++++++------ tests/functions.rs | 2 +- 4 files changed, 21 insertions(+), 10 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 146a3717..f7e97d1d 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -211,7 +211,10 @@ pub use dynamic::Variant; // Expose internal data structures. #[cfg(feature = "internals")] #[deprecated = "this type is volatile and may change"] -pub use token::{get_next_token, parse_string_literal, InputStream, Token, TokenizeState}; +pub use token::{ + get_next_token, parse_string_literal, InputStream, Token, TokenizeState, TokenizerControl, + TokenizerControlBlock, +}; #[cfg(feature = "internals")] #[deprecated = "this type is volatile and may change"] diff --git a/src/parser.rs b/src/parser.rs index 25622144..83f5df33 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -977,7 +977,11 @@ fn parse_primary( #[cfg(not(feature = "no_function"))] Token::Pipe | Token::Or if settings.allow_anonymous_fn => { let mut new_state = ParseState::new(state.engine, state.tokenizer_control.clone()); - new_state.max_expr_depth = new_state.max_function_expr_depth; + + #[cfg(not(feature = "unchecked"))] + { + new_state.max_expr_depth = new_state.max_function_expr_depth; + } let settings = ParseSettings { allow_if_expr: true, @@ -2532,7 +2536,11 @@ fn parse_stmt( (Token::Fn, pos) => { let mut new_state = ParseState::new(state.engine, state.tokenizer_control.clone()); - new_state.max_expr_depth = new_state.max_function_expr_depth; + + #[cfg(not(feature = "unchecked"))] + { + new_state.max_expr_depth = new_state.max_function_expr_depth; + } let settings = ParseSettings { allow_if_expr: true, diff --git a/src/token.rs b/src/token.rs index 55124fa6..389ad8f7 100644 --- a/src/token.rs +++ b/src/token.rs @@ -27,16 +27,16 @@ use rust_decimal::Decimal; #[cfg(not(feature = "no_function"))] use crate::engine::KEYWORD_IS_DEF_FN; -/// A type containing commands to control the tokenizer. +/// _(INTERNALS)_ A type containing commands to control the tokenizer. #[derive(Debug, Clone, Eq, PartialEq, Hash, Copy, Default)] -pub struct TokenizeControlBlock { +pub struct TokenizerControlBlock { /// Is the current tokenizer position within an interpolated text string? /// This flag allows switching the tokenizer back to _text_ parsing after an interpolation stream. pub is_within_text: bool, } -/// A shared object that allows control of the tokenizer from outside. -pub type TokenizerControl = Rc>; +/// _(INTERNALS)_ A shared object that allows control of the tokenizer from outside. +pub type TokenizerControl = Rc>; type LERR = LexError; @@ -1989,7 +1989,7 @@ impl Engine { pub fn lex<'a>( &'a self, input: impl IntoIterator, - ) -> (TokenIterator<'a>, ExternalBuffer) { + ) -> (TokenIterator<'a>, TokenizerControl) { self.lex_raw(input, None) } /// _(INTERNALS)_ Tokenize an input text stream with a mapping function. @@ -2000,7 +2000,7 @@ impl Engine { &'a self, input: impl IntoIterator, map: fn(Token) -> Token, - ) -> (TokenIterator<'a>, ExternalBuffer) { + ) -> (TokenIterator<'a>, TokenizerControl) { self.lex_raw(input, Some(map)) } /// Tokenize an input text stream with an optional mapping function. diff --git a/tests/functions.rs b/tests/functions.rs index e6b56245..05805a9e 100644 --- a/tests/functions.rs +++ b/tests/functions.rs @@ -1,5 +1,5 @@ #![cfg(not(feature = "no_function"))] -use rhai::{Engine, EvalAltResult, FnNamespace, Module, ParseErrorType, Shared, INT}; +use rhai::{Engine, EvalAltResult, FnNamespace, Module, Shared, INT}; #[cfg(not(feature = "no_object"))] #[test]