Fix builds.

This commit is contained in:
Stephen Chung 2021-04-05 00:05:56 +08:00
parent 8956a77c8c
commit e6ea006ac6
4 changed files with 21 additions and 10 deletions

View File

@ -211,7 +211,10 @@ pub use dynamic::Variant;
// Expose internal data structures. // Expose internal data structures.
#[cfg(feature = "internals")] #[cfg(feature = "internals")]
#[deprecated = "this type is volatile and may change"] #[deprecated = "this type is volatile and may change"]
pub use token::{get_next_token, parse_string_literal, InputStream, Token, TokenizeState}; pub use token::{
get_next_token, parse_string_literal, InputStream, Token, TokenizeState, TokenizerControl,
TokenizerControlBlock,
};
#[cfg(feature = "internals")] #[cfg(feature = "internals")]
#[deprecated = "this type is volatile and may change"] #[deprecated = "this type is volatile and may change"]

View File

@ -977,7 +977,11 @@ fn parse_primary(
#[cfg(not(feature = "no_function"))] #[cfg(not(feature = "no_function"))]
Token::Pipe | Token::Or if settings.allow_anonymous_fn => { Token::Pipe | Token::Or if settings.allow_anonymous_fn => {
let mut new_state = ParseState::new(state.engine, state.tokenizer_control.clone()); let mut new_state = ParseState::new(state.engine, state.tokenizer_control.clone());
#[cfg(not(feature = "unchecked"))]
{
new_state.max_expr_depth = new_state.max_function_expr_depth; new_state.max_expr_depth = new_state.max_function_expr_depth;
}
let settings = ParseSettings { let settings = ParseSettings {
allow_if_expr: true, allow_if_expr: true,
@ -2532,7 +2536,11 @@ fn parse_stmt(
(Token::Fn, pos) => { (Token::Fn, pos) => {
let mut new_state = let mut new_state =
ParseState::new(state.engine, state.tokenizer_control.clone()); ParseState::new(state.engine, state.tokenizer_control.clone());
#[cfg(not(feature = "unchecked"))]
{
new_state.max_expr_depth = new_state.max_function_expr_depth; new_state.max_expr_depth = new_state.max_function_expr_depth;
}
let settings = ParseSettings { let settings = ParseSettings {
allow_if_expr: true, allow_if_expr: true,

View File

@ -27,16 +27,16 @@ use rust_decimal::Decimal;
#[cfg(not(feature = "no_function"))] #[cfg(not(feature = "no_function"))]
use crate::engine::KEYWORD_IS_DEF_FN; use crate::engine::KEYWORD_IS_DEF_FN;
/// A type containing commands to control the tokenizer. /// _(INTERNALS)_ A type containing commands to control the tokenizer.
#[derive(Debug, Clone, Eq, PartialEq, Hash, Copy, Default)] #[derive(Debug, Clone, Eq, PartialEq, Hash, Copy, Default)]
pub struct TokenizeControlBlock { pub struct TokenizerControlBlock {
/// Is the current tokenizer position within an interpolated text string? /// Is the current tokenizer position within an interpolated text string?
/// This flag allows switching the tokenizer back to _text_ parsing after an interpolation stream. /// This flag allows switching the tokenizer back to _text_ parsing after an interpolation stream.
pub is_within_text: bool, pub is_within_text: bool,
} }
/// A shared object that allows control of the tokenizer from outside. /// _(INTERNALS)_ A shared object that allows control of the tokenizer from outside.
pub type TokenizerControl = Rc<Cell<TokenizeControlBlock>>; pub type TokenizerControl = Rc<Cell<TokenizerControlBlock>>;
type LERR = LexError; type LERR = LexError;
@ -1989,7 +1989,7 @@ impl Engine {
pub fn lex<'a>( pub fn lex<'a>(
&'a self, &'a self,
input: impl IntoIterator<Item = &'a &'a str>, input: impl IntoIterator<Item = &'a &'a str>,
) -> (TokenIterator<'a>, ExternalBuffer) { ) -> (TokenIterator<'a>, TokenizerControl) {
self.lex_raw(input, None) self.lex_raw(input, None)
} }
/// _(INTERNALS)_ Tokenize an input text stream with a mapping function. /// _(INTERNALS)_ Tokenize an input text stream with a mapping function.
@ -2000,7 +2000,7 @@ impl Engine {
&'a self, &'a self,
input: impl IntoIterator<Item = &'a &'a str>, input: impl IntoIterator<Item = &'a &'a str>,
map: fn(Token) -> Token, map: fn(Token) -> Token,
) -> (TokenIterator<'a>, ExternalBuffer) { ) -> (TokenIterator<'a>, TokenizerControl) {
self.lex_raw(input, Some(map)) self.lex_raw(input, Some(map))
} }
/// Tokenize an input text stream with an optional mapping function. /// Tokenize an input text stream with an optional mapping function.

View File

@ -1,5 +1,5 @@
#![cfg(not(feature = "no_function"))] #![cfg(not(feature = "no_function"))]
use rhai::{Engine, EvalAltResult, FnNamespace, Module, ParseErrorType, Shared, INT}; use rhai::{Engine, EvalAltResult, FnNamespace, Module, Shared, INT};
#[cfg(not(feature = "no_object"))] #[cfg(not(feature = "no_object"))]
#[test] #[test]