Add custom syntax.

This commit is contained in:
Stephen Chung 2020-07-09 19:54:28 +08:00
parent e33760a7d4
commit 99164ebceb
10 changed files with 483 additions and 132 deletions

View File

@ -34,7 +34,8 @@ Features
* Rugged - protected against malicious attacks (such as [stack-overflow](https://schungx.github.io/rhai/safety/max-call-stack.html), [over-sized data](https://schungx.github.io/rhai/safety/max-string-size.html), and [runaway scripts](https://schungx.github.io/rhai/safety/max-operations.html) etc.) that may come from untrusted third-party user-land scripts. * Rugged - protected against malicious attacks (such as [stack-overflow](https://schungx.github.io/rhai/safety/max-call-stack.html), [over-sized data](https://schungx.github.io/rhai/safety/max-string-size.html), and [runaway scripts](https://schungx.github.io/rhai/safety/max-operations.html) etc.) that may come from untrusted third-party user-land scripts.
* Track script evaluation [progress](https://schungx.github.io/rhai/safety/progress.html) and manually terminate a script run. * Track script evaluation [progress](https://schungx.github.io/rhai/safety/progress.html) and manually terminate a script run.
* [Function overloading](https://schungx.github.io/rhai/language/overload.html). * [Function overloading](https://schungx.github.io/rhai/language/overload.html).
* [Operator overloading](https://schungx.github.io/rhai/rust/operators.html) and [custom operators](https://schungx.github.io/rhai/engine/custom-op.html). * [Operator overloading](https://schungx.github.io/rhai/rust/operators.html).
* Support for use as a [DSL](https://schungx.github.io/rhai/engine/dsl.html) - [disabling keywords/operators](https://schungx.github.io/rhai/engine/disable.html), [custom operators](https://schungx.github.io/rhai/engine/custom-op.html).
* Dynamic dispatch via [function pointers](https://schungx.github.io/rhai/language/fn-ptr.html). * Dynamic dispatch via [function pointers](https://schungx.github.io/rhai/language/fn-ptr.html).
* Some support for [object-oriented programming (OOP)](https://schungx.github.io/rhai/language/oop.html). * Some support for [object-oriented programming (OOP)](https://schungx.github.io/rhai/language/oop.html).
* Organize code base with dynamically-loadable [modules](https://schungx.github.io/rhai/language/modules.html). * Organize code base with dynamically-loadable [modules](https://schungx.github.io/rhai/language/modules.html).

View File

@ -31,6 +31,7 @@ New features
* The boolean `^` (XOR) operator is added. * The boolean `^` (XOR) operator is added.
* `FnPtr` is exposed as the function pointer type. * `FnPtr` is exposed as the function pointer type.
* `rhai::module_resolvers::ModuleResolversCollection` added to try a list of module resolvers. * `rhai::module_resolvers::ModuleResolversCollection` added to try a list of module resolvers.
* It is now possible to mutate the first argument of a module-qualified function call when the argument is a simple variable (but not a module constant).
Version 0.16.1 Version 0.16.1

View File

@ -11,6 +11,7 @@ use crate::parser::{Expr, FnAccess, ImmutableString, ReturnType, ScriptFnDef, St
use crate::r#unsafe::unsafe_cast_var_name_to_lifetime; use crate::r#unsafe::unsafe_cast_var_name_to_lifetime;
use crate::result::EvalAltResult; use crate::result::EvalAltResult;
use crate::scope::{EntryType as ScopeEntryType, Scope}; use crate::scope::{EntryType as ScopeEntryType, Scope};
use crate::syntax::CustomSyntax;
use crate::token::Position; use crate::token::Position;
use crate::utils::StaticVec; use crate::utils::StaticVec;
@ -82,8 +83,12 @@ pub const KEYWORD_THIS: &str = "this";
pub const FN_TO_STRING: &str = "to_string"; pub const FN_TO_STRING: &str = "to_string";
pub const FN_GET: &str = "get$"; pub const FN_GET: &str = "get$";
pub const FN_SET: &str = "set$"; pub const FN_SET: &str = "set$";
pub const FN_IDX_GET: &str = "$index$get$"; pub const FN_IDX_GET: &str = "index$get$";
pub const FN_IDX_SET: &str = "$index$set$"; pub const FN_IDX_SET: &str = "index$set$";
pub const MARKER_EXPR: &str = "$expr$";
pub const MARKER_STMT: &str = "$stmt$";
pub const MARKER_BLOCK: &str = "$block$";
pub const MARKER_IDENT: &str = "$ident$";
/// A type specifying the method of chaining. /// A type specifying the method of chaining.
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
@ -273,6 +278,8 @@ pub struct Engine {
pub(crate) disabled_symbols: Option<HashSet<String>>, pub(crate) disabled_symbols: Option<HashSet<String>>,
/// A hashset containing custom keywords and precedence to recognize. /// A hashset containing custom keywords and precedence to recognize.
pub(crate) custom_keywords: Option<HashMap<String, u8>>, pub(crate) custom_keywords: Option<HashMap<String, u8>>,
/// Custom syntax.
pub(crate) custom_syntax: Option<HashMap<String, CustomSyntax>>,
/// Callback closure for implementing the `print` command. /// Callback closure for implementing the `print` command.
pub(crate) print: Callback<str, ()>, pub(crate) print: Callback<str, ()>,
@ -322,6 +329,7 @@ impl Default for Engine {
type_names: None, type_names: None,
disabled_symbols: None, disabled_symbols: None,
custom_keywords: None, custom_keywords: None,
custom_syntax: None,
// default print/debug implementations // default print/debug implementations
print: Box::new(default_print), print: Box::new(default_print),
@ -554,6 +562,7 @@ impl Engine {
type_names: None, type_names: None,
disabled_symbols: None, disabled_symbols: None,
custom_keywords: None, custom_keywords: None,
custom_syntax: None,
print: Box::new(|_| {}), print: Box::new(|_| {}),
debug: Box::new(|_| {}), debug: Box::new(|_| {}),
@ -1595,6 +1604,26 @@ impl Engine {
} }
} }
/// Evaluate an expression inside an AST.
///
/// ## WARNING - Low Level API
///
/// This function is very low level. It evaluates an expression from an AST.
#[cfg(feature = "internals")]
#[deprecated(note = "this method is volatile and may change")]
pub fn eval_expr_from_ast(
&self,
scope: &mut Scope,
mods: &mut Imports,
state: &mut State,
lib: &Module,
this_ptr: &mut Option<&mut Dynamic>,
expr: &Expr,
level: usize,
) -> Result<Dynamic, Box<EvalAltResult>> {
self.eval_expr(scope, mods, state, lib, this_ptr, expr, level)
}
/// Evaluate an expression /// Evaluate an expression
fn eval_expr( fn eval_expr(
&self, &self,
@ -2026,6 +2055,12 @@ impl Engine {
Expr::False(_) => Ok(false.into()), Expr::False(_) => Ok(false.into()),
Expr::Unit(_) => Ok(().into()), Expr::Unit(_) => Ok(().into()),
Expr::Custom(x) => {
let func = (x.0).1.as_ref();
let exprs = (x.0).0.as_ref();
func(self, scope, mods, state, lib, this_ptr, exprs, level)
}
_ => unreachable!(), _ => unreachable!(),
}; };

View File

@ -91,6 +91,7 @@ mod scope;
mod serde; mod serde;
mod settings; mod settings;
mod stdlib; mod stdlib;
mod syntax;
mod token; mod token;
mod r#unsafe; mod r#unsafe;
mod utils; mod utils;
@ -153,13 +154,21 @@ pub use optimize::OptimizationLevel;
// Expose internal data structures. // Expose internal data structures.
#[cfg(feature = "internals")]
#[deprecated(note = "this type is volatile and may change")]
pub use error::LexError;
#[cfg(feature = "internals")] #[cfg(feature = "internals")]
#[deprecated(note = "this type is volatile and may change")] #[deprecated(note = "this type is volatile and may change")]
pub use token::{get_next_token, parse_string_literal, InputStream, Token, TokenizeState}; pub use token::{get_next_token, parse_string_literal, InputStream, Token, TokenizeState};
#[cfg(feature = "internals")] #[cfg(feature = "internals")]
#[deprecated(note = "this type is volatile and may change")] #[deprecated(note = "this type is volatile and may change")]
pub use parser::{Expr, ReturnType, ScriptFnDef, Stmt}; pub use parser::{CustomExpr, Expr, ReturnType, ScriptFnDef, Stmt};
#[cfg(feature = "internals")]
#[deprecated(note = "this type is volatile and may change")]
pub use engine::{Imports, State as EvalState};
#[cfg(feature = "internals")] #[cfg(feature = "internals")]
#[deprecated(note = "this type is volatile and may change")] #[deprecated(note = "this type is volatile and may change")]

View File

@ -2,7 +2,7 @@ use crate::any::Dynamic;
use crate::calc_fn_hash; use crate::calc_fn_hash;
use crate::engine::{Engine, Imports, KEYWORD_DEBUG, KEYWORD_EVAL, KEYWORD_PRINT, KEYWORD_TYPE_OF}; use crate::engine::{Engine, Imports, KEYWORD_DEBUG, KEYWORD_EVAL, KEYWORD_PRINT, KEYWORD_TYPE_OF};
use crate::module::Module; use crate::module::Module;
use crate::parser::{map_dynamic_to_expr, Expr, ReturnType, ScriptFnDef, Stmt, AST}; use crate::parser::{map_dynamic_to_expr, CustomExpr, Expr, ReturnType, ScriptFnDef, Stmt, AST};
use crate::scope::{Entry as ScopeEntry, EntryType as ScopeEntryType, Scope}; use crate::scope::{Entry as ScopeEntry, EntryType as ScopeEntryType, Scope};
use crate::utils::StaticVec; use crate::utils::StaticVec;
@ -598,6 +598,14 @@ fn optimize_expr(expr: Expr, state: &mut State) -> Expr {
state.find_constant(&name).expect("should find constant in scope!").clone().set_position(pos) state.find_constant(&name).expect("should find constant in scope!").clone().set_position(pos)
} }
// Custom syntax
Expr::Custom(x) => Expr::Custom(Box::new((
CustomExpr(
(x.0).0.into_iter().map(|expr| optimize_expr(expr, state)).collect(),
(x.0).1),
x.1
))),
// All other expressions - skip // All other expressions - skip
expr => expr, expr => expr,
} }

View File

@ -2,11 +2,16 @@
use crate::any::{Dynamic, Union}; use crate::any::{Dynamic, Union};
use crate::calc_fn_hash; use crate::calc_fn_hash;
use crate::engine::{make_getter, make_setter, Engine, KEYWORD_THIS}; use crate::engine::{
make_getter, make_setter, Engine, KEYWORD_THIS, MARKER_BLOCK, MARKER_EXPR, MARKER_IDENT,
MARKER_STMT,
};
use crate::error::{LexError, ParseError, ParseErrorType}; use crate::error::{LexError, ParseError, ParseErrorType};
use crate::fn_native::Shared;
use crate::module::{Module, ModuleRef}; use crate::module::{Module, ModuleRef};
use crate::optimize::{optimize_into_ast, OptimizationLevel}; use crate::optimize::{optimize_into_ast, OptimizationLevel};
use crate::scope::{EntryType as ScopeEntryType, Scope}; use crate::scope::{EntryType as ScopeEntryType, Scope};
use crate::syntax::FnCustomSyntaxEval;
use crate::token::{Position, Token, TokenStream}; use crate::token::{Position, Token, TokenStream};
use crate::utils::{StaticVec, StraightHasherBuilder}; use crate::utils::{StaticVec, StraightHasherBuilder};
@ -568,6 +573,15 @@ impl Stmt {
} }
} }
#[derive(Clone)]
pub struct CustomExpr(pub StaticVec<Expr>, pub Shared<FnCustomSyntaxEval>);
impl fmt::Debug for CustomExpr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&self.0, f)
}
}
/// An expression. /// An expression.
/// ///
/// Each variant is at most one pointer in size (for speed), /// Each variant is at most one pointer in size (for speed),
@ -632,6 +646,8 @@ pub enum Expr {
False(Position), False(Position),
/// () /// ()
Unit(Position), Unit(Position),
/// Custom syntax
Custom(Box<(CustomExpr, Position)>),
} }
impl Default for Expr { impl Default for Expr {
@ -726,6 +742,8 @@ impl Expr {
Self::True(pos) | Self::False(pos) | Self::Unit(pos) => *pos, Self::True(pos) | Self::False(pos) | Self::Unit(pos) => *pos,
Self::Dot(x) | Self::Index(x) => x.0.position(), Self::Dot(x) | Self::Index(x) => x.0.position(),
Self::Custom(x) => x.1,
} }
} }
@ -758,6 +776,7 @@ impl Expr {
Self::Assignment(x) => x.3 = new_pos, Self::Assignment(x) => x.3 = new_pos,
Self::Dot(x) => x.2 = new_pos, Self::Dot(x) => x.2 = new_pos,
Self::Index(x) => x.2 = new_pos, Self::Index(x) => x.2 = new_pos,
Self::Custom(x) => x.1 = new_pos,
} }
self self
@ -861,6 +880,8 @@ impl Expr {
Token::LeftParen => true, Token::LeftParen => true,
_ => false, _ => false,
}, },
Self::Custom(_) => false,
} }
} }
@ -2024,6 +2045,85 @@ fn parse_expr(
settings.pos = input.peek().unwrap().1; settings.pos = input.peek().unwrap().1;
settings.ensure_level_within_max_limit(state.max_expr_depth)?; settings.ensure_level_within_max_limit(state.max_expr_depth)?;
// Check if it is a custom syntax.
if let Some(ref custom) = state.engine.custom_syntax {
let (token, pos) = input.peek().unwrap();
let token_pos = *pos;
match token {
Token::Custom(key) if custom.contains_key(key) => {
let custom = custom.get_key_value(key).unwrap();
let (key, syntax) = custom;
input.next().unwrap();
let mut exprs: StaticVec<Expr> = Default::default();
// Adjust the variables stack
match syntax.scope_delta {
delta if delta > 0 => {
state.stack.push(("".to_string(), ScopeEntryType::Normal))
}
delta if delta < 0 && state.stack.len() <= delta.abs() as usize => {
state.stack.clear()
}
delta if delta < 0 => state
.stack
.truncate(state.stack.len() - delta.abs() as usize),
_ => (),
}
for segment in syntax.segments.iter() {
settings.pos = input.peek().unwrap().1;
let settings = settings.level_up();
match segment.as_str() {
MARKER_IDENT => match input.next().unwrap() {
(Token::Identifier(s), pos) => {
exprs.push(Expr::Variable(Box::new(((s, pos), None, 0, None))));
}
(_, pos) => return Err(PERR::VariableExpected.into_err(pos)),
},
MARKER_EXPR => exprs.push(parse_expr(input, state, lib, settings)?),
MARKER_STMT => {
let stmt = parse_stmt(input, state, lib, settings)?
.unwrap_or_else(|| Stmt::Noop(settings.pos));
let pos = stmt.position();
exprs.push(Expr::Stmt(Box::new((stmt, pos))))
}
MARKER_BLOCK => {
let stmt = parse_block(input, state, lib, settings)?;
let pos = stmt.position();
exprs.push(Expr::Stmt(Box::new((stmt, pos))))
}
s => match input.peek().unwrap() {
(Token::Custom(custom), _) if custom == s => {
input.next().unwrap();
}
(t, _) if t.syntax().as_ref() == s => {
input.next().unwrap();
}
(_, pos) => {
return Err(PERR::MissingToken(
s.to_string(),
format!("for '{}' expression", key),
)
.into_err(*pos))
}
},
}
}
return Ok(Expr::Custom(Box::new((
CustomExpr(exprs, syntax.func.clone()),
token_pos,
))));
}
_ => (),
}
}
// Parse expression normally.
let lhs = parse_unary(input, state, lib, settings.level_up())?; let lhs = parse_unary(input, state, lib, settings.level_up())?;
parse_binary_op(input, state, lib, 1, lhs, settings.level_up()) parse_binary_op(input, state, lib, 1, lhs, settings.level_up())
} }
@ -2297,7 +2397,7 @@ fn parse_import(
fn parse_export( fn parse_export(
input: &mut TokenStream, input: &mut TokenStream,
state: &mut ParseState, state: &mut ParseState,
lib: &mut FunctionsLib, _lib: &mut FunctionsLib,
mut settings: ParseSettings, mut settings: ParseSettings,
) -> Result<Stmt, ParseError> { ) -> Result<Stmt, ParseError> {
settings.pos = eat_token(input, Token::Export); settings.pos = eat_token(input, Token::Export);

132
src/syntax.rs Normal file
View File

@ -0,0 +1,132 @@
use crate::any::Dynamic;
use crate::engine::{Engine, Imports, State, MARKER_BLOCK, MARKER_EXPR, MARKER_IDENT, MARKER_STMT};
use crate::error::LexError;
use crate::fn_native::{SendSync, Shared};
use crate::module::Module;
use crate::parser::Expr;
use crate::result::EvalAltResult;
use crate::scope::Scope;
use crate::token::{is_valid_identifier, Token};
use crate::utils::StaticVec;
use crate::stdlib::{
fmt,
rc::Rc,
string::{String, ToString},
sync::Arc,
};
/// A general function trail object.
#[cfg(not(feature = "sync"))]
pub type FnCustomSyntaxEval = dyn Fn(
&Engine,
&mut Scope,
&mut Imports,
&mut State,
&Module,
&mut Option<&mut Dynamic>,
&[Expr],
usize,
) -> Result<Dynamic, Box<EvalAltResult>>;
/// A general function trail object.
#[cfg(feature = "sync")]
pub type FnCustomSyntaxEval = dyn Fn(
&Engine,
&mut Scope,
&mut Imports,
&mut State,
&Module,
&mut Option<&mut Dynamic>,
&[Expr],
usize,
) -> Result<Dynamic, Box<EvalAltResult>>
+ Send
+ Sync;
#[derive(Clone)]
pub struct CustomSyntax {
pub segments: StaticVec<String>,
pub func: Shared<FnCustomSyntaxEval>,
pub scope_delta: isize,
}
impl fmt::Debug for CustomSyntax {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&self.segments, f)
}
}
impl Engine {
pub fn add_custom_syntax<S: AsRef<str> + ToString>(
&mut self,
value: &[S],
scope_delta: isize,
func: impl Fn(
&Engine,
&mut Scope,
&mut Imports,
&mut State,
&Module,
&mut Option<&mut Dynamic>,
&[Expr],
usize,
) -> Result<Dynamic, Box<EvalAltResult>>
+ SendSync
+ 'static,
) -> Result<(), Box<LexError>> {
if value.is_empty() {
return Err(Box::new(LexError::ImproperSymbol("".to_string())));
}
let mut segments: StaticVec<_> = Default::default();
for s in value {
let seg = match s.as_ref() {
// Markers not in first position
MARKER_EXPR | MARKER_STMT | MARKER_BLOCK | MARKER_IDENT if !segments.is_empty() => {
s.to_string()
}
// Standard symbols not in first position
s if !segments.is_empty() && Token::lookup_from_syntax(s).is_some() => s.into(),
// Custom keyword
s if is_valid_identifier(s.chars()) => {
if self.custom_keywords.is_none() {
self.custom_keywords = Some(Default::default());
}
if !self.custom_keywords.as_ref().unwrap().contains_key(s) {
self.custom_keywords.as_mut().unwrap().insert(s.into(), 0);
}
s.into()
}
// Anything else is an error
_ => return Err(Box::new(LexError::ImproperSymbol(s.to_string()))),
};
segments.push(seg);
}
let key = segments.remove(0);
let syntax = CustomSyntax {
segments,
#[cfg(not(feature = "sync"))]
func: Rc::new(func),
#[cfg(feature = "sync")]
func: Arc::new(func),
scope_delta,
};
if self.custom_syntax.is_none() {
self.custom_syntax = Some(Default::default());
}
self.custom_syntax
.as_mut()
.unwrap()
.insert(key, syntax.into());
Ok(())
}
}

View File

@ -312,6 +312,87 @@ impl Token {
} }
} }
/// Reverse lookup a token from a piece of syntax.
pub fn lookup_from_syntax(syntax: &str) -> Option<Self> {
use Token::*;
Some(match syntax {
"{" => LeftBrace,
"}" => RightBrace,
"(" => LeftParen,
")" => RightParen,
"[" => LeftBracket,
"]" => RightBracket,
"+" => Plus,
"-" => Minus,
"*" => Multiply,
"/" => Divide,
";" => SemiColon,
":" => Colon,
"::" => DoubleColon,
"," => Comma,
"." => Period,
"#{" => MapStart,
"=" => Equals,
"true" => True,
"false" => False,
"let" => Let,
"const" => Const,
"if" => If,
"else" => Else,
"while" => While,
"loop" => Loop,
"for" => For,
"in" => In,
"<" => LessThan,
">" => GreaterThan,
"!" => Bang,
"<=" => LessThanEqualsTo,
">=" => GreaterThanEqualsTo,
"==" => EqualsTo,
"!=" => NotEqualsTo,
"|" => Pipe,
"||" => Or,
"&" => Ampersand,
"&&" => And,
#[cfg(not(feature = "no_function"))]
"fn" => Fn,
"continue" => Continue,
"break" => Break,
"return" => Return,
"throw" => Throw,
"+=" => PlusAssign,
"-=" => MinusAssign,
"*=" => MultiplyAssign,
"/=" => DivideAssign,
"<<=" => LeftShiftAssign,
">>=" => RightShiftAssign,
"&=" => AndAssign,
"|=" => OrAssign,
"^=" => XOrAssign,
"<<" => LeftShift,
">>" => RightShift,
"^" => XOr,
"%" => Modulo,
"%=" => ModuloAssign,
"~" => PowerOf,
"~=" => PowerOfAssign,
#[cfg(not(feature = "no_function"))]
"private" => Private,
#[cfg(not(feature = "no_module"))]
"import" => Import,
#[cfg(not(feature = "no_module"))]
"export" => Export,
#[cfg(not(feature = "no_module"))]
"as" => As,
"===" | "!==" | "->" | "<-" | "=>" | ":=" | "::<" | "(*" | "*)" | "#" => {
Reserved(syntax.into())
}
_ => return None,
})
}
// Is this token EOF? // Is this token EOF?
pub fn is_eof(&self) -> bool { pub fn is_eof(&self) -> bool {
use Token::*; use Token::*;
@ -628,9 +709,9 @@ pub fn parse_string_literal(
} }
/// Consume the next character. /// Consume the next character.
fn eat_next(stream: &mut impl InputStream, pos: &mut Position) { fn eat_next(stream: &mut impl InputStream, pos: &mut Position) -> Option<char> {
stream.get_next();
pos.advance(); pos.advance();
stream.get_next()
} }
/// Scan for a block comment until the end. /// Scan for a block comment until the end.
@ -858,35 +939,8 @@ fn get_next_token_inner(
} }
return Some(( return Some((
match identifier.as_str() { Token::lookup_from_syntax(&identifier)
"true" => Token::True, .unwrap_or_else(|| Token::Identifier(identifier)),
"false" => Token::False,
"let" => Token::Let,
"const" => Token::Const,
"if" => Token::If,
"else" => Token::Else,
"while" => Token::While,
"loop" => Token::Loop,
"continue" => Token::Continue,
"break" => Token::Break,
"return" => Token::Return,
"throw" => Token::Throw,
"for" => Token::For,
"in" => Token::In,
#[cfg(not(feature = "no_function"))]
"private" => Token::Private,
#[cfg(not(feature = "no_module"))]
"import" => Token::Import,
#[cfg(not(feature = "no_module"))]
"export" => Token::Export,
#[cfg(not(feature = "no_module"))]
"as" => Token::As,
#[cfg(not(feature = "no_function"))]
"fn" => Token::Fn,
_ => Token::Identifier(identifier),
},
start_pos, start_pos,
)); ));
} }
@ -947,6 +1001,7 @@ fn get_next_token_inner(
eat_next(stream, pos); eat_next(stream, pos);
return Some((Token::MapStart, start_pos)); return Some((Token::MapStart, start_pos));
} }
('#', _) => return Some((Token::Reserved("#".into()), start_pos)),
// Operators // Operators
('+', '=') => { ('+', '=') => {
@ -1163,40 +1218,42 @@ fn get_next_token_inner(
} }
/// A type that implements the `InputStream` trait. /// A type that implements the `InputStream` trait.
/// Multiple charaacter streams are jointed together to form one single stream. /// Multiple character streams are jointed together to form one single stream.
pub struct MultiInputsStream<'a> { pub struct MultiInputsStream<'a> {
/// The input character streams. /// The input character streams.
streams: StaticVec<Peekable<Chars<'a>>>, streams: StaticVec<Peekable<Chars<'a>>>,
/// The current stream index.
index: usize,
} }
impl InputStream for MultiInputsStream<'_> { impl InputStream for MultiInputsStream<'_> {
/// Get the next character /// Get the next character
fn get_next(&mut self) -> Option<char> { fn get_next(&mut self) -> Option<char> {
loop { loop {
if self.streams.is_empty() { if self.index >= self.streams.len() {
// No more streams // No more streams
return None; return None;
} else if let Some(ch) = self.streams[0].next() { } else if let Some(ch) = self.streams[self.index].next() {
// Next character in current stream // Next character in current stream
return Some(ch); return Some(ch);
} else { } else {
// Jump to the next stream // Jump to the next stream
let _ = self.streams.remove(0); self.index += 1;
} }
} }
} }
/// Peek the next character /// Peek the next character
fn peek_next(&mut self) -> Option<char> { fn peek_next(&mut self) -> Option<char> {
loop { loop {
if self.streams.is_empty() { if self.index >= self.streams.len() {
// No more streams // No more streams
return None; return None;
} else if let Some(ch) = self.streams[0].peek() { } else if let Some(&ch) = self.streams[self.index].peek() {
// Next character in current stream // Next character in current stream
return Some(*ch); return Some(ch);
} else { } else {
// Jump to the next stream // Jump to the next stream
let _ = self.streams.remove(0); self.index += 1;
} }
} }
} }
@ -1252,7 +1309,11 @@ impl<'a> Iterator for TokenIterator<'a, '_> {
.to_string(), .to_string(),
))), ))),
"(*" | "*)" => Token::LexError(Box::new(LERR::ImproperSymbol( "(*" | "*)" => Token::LexError(Box::new(LERR::ImproperSymbol(
"'(* .. *)' is not a valid comment style. This is not Pascal! Should it be '/* .. */'?" "'(* .. *)' is not a valid comment format. This is not Pascal! Should it be '/* .. */'?"
.to_string(),
))),
"#" => Token::LexError(Box::new(LERR::ImproperSymbol(
"'#' is not a valid symbol. Should it be '#{'?"
.to_string(), .to_string(),
))), ))),
token => Token::LexError(Box::new(LERR::ImproperSymbol( token => Token::LexError(Box::new(LERR::ImproperSymbol(
@ -1298,6 +1359,7 @@ pub fn lex<'a, 'e>(input: &'a [&'a str], engine: &'e Engine) -> TokenIterator<'a
pos: Position::new(1, 0), pos: Position::new(1, 0),
stream: MultiInputsStream { stream: MultiInputsStream {
streams: input.iter().map(|s| s.chars().peekable()).collect(), streams: input.iter().map(|s| s.chars().peekable()).collect(),
index: 0,
}, },
} }
} }

View File

@ -21,7 +21,7 @@ fn test_map_indexing() -> Result<(), Box<EvalAltResult>> {
r#" r#"
let y = #{d: 1, "e": #{a: 42, b: 88, "": "hello"}, " 123 xyz": 9}; let y = #{d: 1, "e": #{a: 42, b: 88, "": "hello"}, " 123 xyz": 9};
y.e[""][4] y.e[""][4]
"# "#
)?, )?,
'o' 'o'
); );
@ -47,7 +47,7 @@ fn test_map_indexing() -> Result<(), Box<EvalAltResult>> {
let x = #{a: 1, b: 2, c: 3}; let x = #{a: 1, b: 2, c: 3};
let c = x.remove("c"); let c = x.remove("c");
x.len() + c x.len() + c
"# "#
)?, )?,
5 5
); );
@ -58,7 +58,7 @@ fn test_map_indexing() -> Result<(), Box<EvalAltResult>> {
let y = #{b: 42, d: 9}; let y = #{b: 42, d: 9};
x.mixin(y); x.mixin(y);
x.len() + x.b x.len() + x.b
" "
)?, )?,
46 46
); );
@ -68,7 +68,7 @@ fn test_map_indexing() -> Result<(), Box<EvalAltResult>> {
let x = #{a: 1, b: 2, c: 3}; let x = #{a: 1, b: 2, c: 3};
x += #{b: 42, d: 9}; x += #{b: 42, d: 9};
x.len() + x.b x.len() + x.b
" "
)?, )?,
46 46
); );
@ -79,7 +79,7 @@ fn test_map_indexing() -> Result<(), Box<EvalAltResult>> {
let x = #{a: 1, b: 2, c: 3}; let x = #{a: 1, b: 2, c: 3};
let y = #{b: 42, d: 9}; let y = #{b: 42, d: 9};
x + y x + y
" "
)? )?
.len(), .len(),
4 4
@ -94,27 +94,9 @@ fn test_map_assign() -> Result<(), Box<EvalAltResult>> {
let x = engine.eval::<Map>(r#"let x = #{a: 1, b: true, "c$": "hello"}; x"#)?; let x = engine.eval::<Map>(r#"let x = #{a: 1, b: true, "c$": "hello"}; x"#)?;
assert_eq!( assert_eq!(x["a"].clone().cast::<INT>(), 1);
x.get("a") assert_eq!(x["b"].clone().cast::<bool>(), true);
.cloned() assert_eq!(x["c$"].clone().cast::<String>(), "hello");
.expect("should have property a")
.cast::<INT>(),
1
);
assert_eq!(
x.get("b")
.cloned()
.expect("should have property b")
.cast::<bool>(),
true
);
assert_eq!(
x.get("c$")
.cloned()
.expect("should have property c$")
.cast::<String>(),
"hello"
);
Ok(()) Ok(())
} }
@ -125,27 +107,9 @@ fn test_map_return() -> Result<(), Box<EvalAltResult>> {
let x = engine.eval::<Map>(r#"#{a: 1, b: true, "c$": "hello"}"#)?; let x = engine.eval::<Map>(r#"#{a: 1, b: true, "c$": "hello"}"#)?;
assert_eq!( assert_eq!(x["a"].clone().cast::<INT>(), 1);
x.get("a") assert_eq!(x["b"].clone().cast::<bool>(), true);
.cloned() assert_eq!(x["c$"].clone().cast::<String>(), "hello");
.expect("should have property a")
.cast::<INT>(),
1
);
assert_eq!(
x.get("b")
.cloned()
.expect("should have property b")
.cast::<bool>(),
true
);
assert_eq!(
x.get("c$")
.cloned()
.expect("should have property c$")
.cast::<String>(),
"hello"
);
Ok(()) Ok(())
} }
@ -167,7 +131,7 @@ fn test_map_for() -> Result<(), Box<EvalAltResult>> {
} }
s s
"# "#
)? )?
.len(), .len(),
11 11
@ -188,41 +152,11 @@ fn test_map_json() -> Result<(), Box<EvalAltResult>> {
assert!(!map.contains_key("x")); assert!(!map.contains_key("x"));
assert_eq!( assert_eq!(map["a"].clone().cast::<INT>(), 1);
map.get("a") assert_eq!(map["b"].clone().cast::<bool>(), true);
.cloned() assert_eq!(map["c"].clone().cast::<INT>(), 42);
.expect("should have property a") assert_eq!(map["$d e f!"].clone().cast::<String>(), "hello");
.cast::<INT>(), assert_eq!(map["z"].clone().cast::<()>(), ());
1
);
assert_eq!(
map.get("b")
.cloned()
.expect("should have property b")
.cast::<bool>(),
true
);
assert_eq!(
map.get("c")
.cloned()
.expect("should have property a")
.cast::<INT>(),
42
);
assert_eq!(
map.get("$d e f!")
.cloned()
.expect("should have property $d e f!")
.cast::<String>(),
"hello"
);
assert_eq!(
map.get("z")
.cloned()
.expect("should have property z")
.cast::<()>(),
()
);
#[cfg(not(feature = "no_index"))] #[cfg(not(feature = "no_index"))]
{ {
@ -241,7 +175,7 @@ fn test_map_json() -> Result<(), Box<EvalAltResult>> {
} }
s s
"# "#
)? )?
.len(), .len(),
11 11
@ -265,7 +199,7 @@ fn test_map_oop() -> Result<(), Box<EvalAltResult>> {
obj.action(2); obj.action(2);
obj.data obj.data
"#, "#,
)?, )?,
42 42
); );

69
tests/syntax.rs Normal file
View File

@ -0,0 +1,69 @@
#![cfg(feature = "internals")]
use rhai::{
Dynamic, Engine, EvalAltResult, EvalState, Expr, Imports, LexError, Module, Scope, INT,
};
#[test]
fn test_custom_syntax() -> Result<(), Box<EvalAltResult>> {
let mut engine = Engine::new();
engine
.add_custom_syntax(
&["do", "$ident$", "$block$", "while", "$expr$"],
1,
|engine: &Engine,
scope: &mut Scope,
mods: &mut Imports,
state: &mut EvalState,
lib: &Module,
this_ptr: &mut Option<&mut Dynamic>,
exprs: &[Expr],
level: usize| {
let var_name = match exprs.get(0).unwrap() {
Expr::Variable(s) => (s.0).0.clone(),
_ => unreachable!(),
};
let stmt = exprs.get(1).unwrap();
let expr = exprs.get(2).unwrap();
scope.push(var_name, 0 as INT);
loop {
engine.eval_expr_from_ast(scope, mods, state, lib, this_ptr, stmt, level)?;
if !engine
.eval_expr_from_ast(scope, mods, state, lib, this_ptr, expr, level)?
.as_bool()
.map_err(|_| {
EvalAltResult::ErrorBooleanArgMismatch(
"do-while".into(),
expr.position(),
)
})?
{
break;
}
}
Ok(().into())
},
)
.unwrap();
assert!(matches!(
*engine.add_custom_syntax(&["!"], 0, |_, _, _, _, _, _, _, _| Ok(().into())).expect_err("should error"),
LexError::ImproperSymbol(s) if s == "!"
));
assert_eq!(
engine.eval::<INT>(
r"
do x { x += 1 } while x < 42;
x
"
)?,
42
);
Ok(())
}