Enhance signature of on_parse_token callback.
This commit is contained in:
parent
65326d87f4
commit
0715262c44
@ -1368,15 +1368,17 @@ impl Engine {
|
|||||||
};
|
};
|
||||||
let (stream, tokenizer_control) = engine.lex_raw(
|
let (stream, tokenizer_control) = engine.lex_raw(
|
||||||
&scripts,
|
&scripts,
|
||||||
Some(if has_null {
|
if has_null {
|
||||||
&|token| match token {
|
Some(&|token, _, _| {
|
||||||
// If `null` is present, make sure `null` is treated as a variable
|
match token {
|
||||||
Token::Reserved(s) if s == "null" => Token::Identifier(s),
|
// If `null` is present, make sure `null` is treated as a variable
|
||||||
_ => token,
|
Token::Reserved(s) if s == "null" => Token::Identifier(s),
|
||||||
}
|
_ => token,
|
||||||
|
}
|
||||||
|
})
|
||||||
} else {
|
} else {
|
||||||
&|t| t
|
None
|
||||||
}),
|
},
|
||||||
);
|
);
|
||||||
let mut state = ParseState::new(engine, tokenizer_control);
|
let mut state = ParseState::new(engine, tokenizer_control);
|
||||||
let ast = engine.parse_global_expr(
|
let ast = engine.parse_global_expr(
|
||||||
@ -2130,7 +2132,7 @@ impl Engine {
|
|||||||
/// let mut engine = Engine::new();
|
/// let mut engine = Engine::new();
|
||||||
///
|
///
|
||||||
/// // Register a token mapper.
|
/// // Register a token mapper.
|
||||||
/// engine.on_parse_token(|token| {
|
/// engine.on_parse_token(|token, _, _| {
|
||||||
/// match token {
|
/// match token {
|
||||||
/// // Convert all integer literals to strings
|
/// // Convert all integer literals to strings
|
||||||
/// Token::IntegerConstant(n) => Token::StringConstant(n.to_string()),
|
/// Token::IntegerConstant(n) => Token::StringConstant(n.to_string()),
|
||||||
@ -2153,8 +2155,12 @@ impl Engine {
|
|||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
pub fn on_parse_token(
|
pub fn on_parse_token(
|
||||||
&mut self,
|
&mut self,
|
||||||
callback: impl Fn(crate::token::Token) -> crate::token::Token + SendSync + 'static,
|
callback: impl Fn(crate::token::Token, Position, &crate::token::TokenizeState) -> crate::token::Token
|
||||||
|
+ SendSync
|
||||||
|
+ 'static,
|
||||||
) -> &mut Self {
|
) -> &mut Self {
|
||||||
|
use std::string::ParseError;
|
||||||
|
|
||||||
self.token_mapper = Some(Box::new(callback));
|
self.token_mapper = Some(Box::new(callback));
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,7 @@ use crate::ast::{FnAccess, FnCallHashes};
|
|||||||
use crate::engine::Imports;
|
use crate::engine::Imports;
|
||||||
use crate::fn_call::FnCallArgs;
|
use crate::fn_call::FnCallArgs;
|
||||||
use crate::plugin::PluginFunction;
|
use crate::plugin::PluginFunction;
|
||||||
use crate::token::Token;
|
use crate::token::{Token, TokenizeState};
|
||||||
use crate::{
|
use crate::{
|
||||||
calc_fn_hash, Dynamic, Engine, EvalAltResult, EvalContext, Module, Position, RhaiResult,
|
calc_fn_hash, Dynamic, Engine, EvalAltResult, EvalContext, Module, Position, RhaiResult,
|
||||||
};
|
};
|
||||||
@ -310,10 +310,11 @@ pub type OnDebugCallback = Box<dyn Fn(&str, Option<&str>, Position) + Send + Syn
|
|||||||
|
|
||||||
/// A standard callback function for mapping tokens during parsing.
|
/// A standard callback function for mapping tokens during parsing.
|
||||||
#[cfg(not(feature = "sync"))]
|
#[cfg(not(feature = "sync"))]
|
||||||
pub type OnParseTokenCallback = dyn Fn(Token) -> Token;
|
pub type OnParseTokenCallback = dyn Fn(Token, Position, &TokenizeState) -> Token;
|
||||||
/// A standard callback function for mapping tokens during parsing.
|
/// A standard callback function for mapping tokens during parsing.
|
||||||
#[cfg(feature = "sync")]
|
#[cfg(feature = "sync")]
|
||||||
pub type OnParseTokenCallback = dyn Fn(Token) -> Token + Send + Sync + 'static;
|
pub type OnParseTokenCallback =
|
||||||
|
dyn Fn(Token, Position, &TokenizeState) -> Token + Send + Sync + 'static;
|
||||||
|
|
||||||
/// A standard callback function for variable access.
|
/// A standard callback function for variable access.
|
||||||
#[cfg(not(feature = "sync"))]
|
#[cfg(not(feature = "sync"))]
|
||||||
|
26
src/token.rs
26
src/token.rs
@ -29,6 +29,10 @@ use rust_decimal::Decimal;
|
|||||||
use crate::engine::KEYWORD_IS_DEF_FN;
|
use crate::engine::KEYWORD_IS_DEF_FN;
|
||||||
|
|
||||||
/// _(internals)_ A type containing commands to control the tokenizer.
|
/// _(internals)_ A type containing commands to control the tokenizer.
|
||||||
|
///
|
||||||
|
/// # Volatile Data Structure
|
||||||
|
///
|
||||||
|
/// This type is volatile and may change.
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Hash, Copy, Default)]
|
#[derive(Debug, Clone, Eq, PartialEq, Hash, Copy, Default)]
|
||||||
pub struct TokenizerControlBlock {
|
pub struct TokenizerControlBlock {
|
||||||
/// Is the current tokenizer position within an interpolated text string?
|
/// Is the current tokenizer position within an interpolated text string?
|
||||||
@ -992,7 +996,7 @@ pub struct TokenizeState {
|
|||||||
/// Maximum length of a string.
|
/// Maximum length of a string.
|
||||||
pub max_string_size: Option<NonZeroUsize>,
|
pub max_string_size: Option<NonZeroUsize>,
|
||||||
/// Can the next token be a unary operator?
|
/// Can the next token be a unary operator?
|
||||||
pub non_unary: bool,
|
pub next_token_cannot_be_unary: bool,
|
||||||
/// Is the tokenizer currently inside a block comment?
|
/// Is the tokenizer currently inside a block comment?
|
||||||
pub comment_level: usize,
|
pub comment_level: usize,
|
||||||
/// Include comments?
|
/// Include comments?
|
||||||
@ -1327,7 +1331,7 @@ pub fn get_next_token(
|
|||||||
|
|
||||||
// Save the last token's state
|
// Save the last token's state
|
||||||
if let Some((ref token, _)) = result {
|
if let Some((ref token, _)) = result {
|
||||||
state.non_unary = !token.is_next_unary();
|
state.next_token_cannot_be_unary = !token.is_next_unary();
|
||||||
}
|
}
|
||||||
|
|
||||||
result
|
result
|
||||||
@ -1678,10 +1682,12 @@ fn get_next_token_inner(
|
|||||||
eat_next(stream, pos);
|
eat_next(stream, pos);
|
||||||
return Some((Token::Reserved("++".into()), start_pos));
|
return Some((Token::Reserved("++".into()), start_pos));
|
||||||
}
|
}
|
||||||
('+', _) if !state.non_unary => return Some((Token::UnaryPlus, start_pos)),
|
('+', _) if !state.next_token_cannot_be_unary => {
|
||||||
|
return Some((Token::UnaryPlus, start_pos))
|
||||||
|
}
|
||||||
('+', _) => return Some((Token::Plus, start_pos)),
|
('+', _) => return Some((Token::Plus, start_pos)),
|
||||||
|
|
||||||
('-', '0'..='9') if !state.non_unary => negated = Some(start_pos),
|
('-', '0'..='9') if !state.next_token_cannot_be_unary => negated = Some(start_pos),
|
||||||
('-', '0'..='9') => return Some((Token::Minus, start_pos)),
|
('-', '0'..='9') => return Some((Token::Minus, start_pos)),
|
||||||
('-', '=') => {
|
('-', '=') => {
|
||||||
eat_next(stream, pos);
|
eat_next(stream, pos);
|
||||||
@ -1695,7 +1701,9 @@ fn get_next_token_inner(
|
|||||||
eat_next(stream, pos);
|
eat_next(stream, pos);
|
||||||
return Some((Token::Reserved("--".into()), start_pos));
|
return Some((Token::Reserved("--".into()), start_pos));
|
||||||
}
|
}
|
||||||
('-', _) if !state.non_unary => return Some((Token::UnaryMinus, start_pos)),
|
('-', _) if !state.next_token_cannot_be_unary => {
|
||||||
|
return Some((Token::UnaryMinus, start_pos))
|
||||||
|
}
|
||||||
('-', _) => return Some((Token::Minus, start_pos)),
|
('-', _) => return Some((Token::Minus, start_pos)),
|
||||||
|
|
||||||
('*', ')') => {
|
('*', ')') => {
|
||||||
@ -2117,6 +2125,10 @@ impl InputStream for MultiInputsStream<'_> {
|
|||||||
|
|
||||||
/// _(internals)_ An iterator on a [`Token`] stream.
|
/// _(internals)_ An iterator on a [`Token`] stream.
|
||||||
/// Exported under the `internals` feature only.
|
/// Exported under the `internals` feature only.
|
||||||
|
///
|
||||||
|
/// # Volatile Data Structure
|
||||||
|
///
|
||||||
|
/// This type is volatile and may change.
|
||||||
pub struct TokenIterator<'a> {
|
pub struct TokenIterator<'a> {
|
||||||
/// Reference to the scripting `Engine`.
|
/// Reference to the scripting `Engine`.
|
||||||
pub engine: &'a Engine,
|
pub engine: &'a Engine,
|
||||||
@ -2224,7 +2236,7 @@ impl<'a> Iterator for TokenIterator<'a> {
|
|||||||
|
|
||||||
// Run the mapper, if any
|
// Run the mapper, if any
|
||||||
let token = if let Some(map_func) = self.token_mapper {
|
let token = if let Some(map_func) = self.token_mapper {
|
||||||
map_func(token)
|
map_func(token, pos, &self.state)
|
||||||
} else {
|
} else {
|
||||||
token
|
token
|
||||||
};
|
};
|
||||||
@ -2278,7 +2290,7 @@ impl Engine {
|
|||||||
max_string_size: self.limits.max_string_size,
|
max_string_size: self.limits.max_string_size,
|
||||||
#[cfg(feature = "unchecked")]
|
#[cfg(feature = "unchecked")]
|
||||||
max_string_size: None,
|
max_string_size: None,
|
||||||
non_unary: false,
|
next_token_cannot_be_unary: false,
|
||||||
comment_level: 0,
|
comment_level: 0,
|
||||||
include_comments: false,
|
include_comments: false,
|
||||||
is_within_text_terminated_by: None,
|
is_within_text_terminated_by: None,
|
||||||
|
Loading…
Reference in New Issue
Block a user