Better handling of reserved keywords.

This commit is contained in:
Stephen Chung 2020-07-26 22:25:30 +08:00
parent ce20248792
commit ec5082c328
6 changed files with 80 additions and 28 deletions

View File

@ -43,8 +43,9 @@ Reserved Keywords
| `static` | Variable declaration |
| `do` | Looping |
| `each` | Looping |
| `then` | Conditional |
| `goto` | Jump |
| `then` | Control flow |
| `goto` | Control flow |
| `exit` | Control flow |
| `switch` | Matching |
| `match` | Matching |
| `case` | Matching |

View File

@ -9,7 +9,7 @@ The following are reserved keywords in Rhai:
| ------------------------------------------------- | ---------------------------------------- | --------------------- | :--------------------: |
| `true`, `false` | | Boolean constants | |
| `let`, `const` | `var`, `static` | Variable declarations | |
| `if`, `else` | `then`, `goto` | Control flow | |
| `if`, `else` | `then`, `goto`, `exit` | Control flow | |
| | `switch`, `match`, `case` | Matching | |
| `while`, `loop`, `for`, `in`, `continue`, `break` | `do`, `each` | Looping | |
| `fn`, `private` | `public`, `new` | Functions | [`no_function`] |

View File

@ -19,6 +19,7 @@ use crate::engine::{FN_IDX_GET, FN_IDX_SET};
use crate::{
engine::{make_getter, make_setter, Map},
fn_register::RegisterFn,
token::Token,
};
#[cfg(not(feature = "no_function"))]
@ -590,7 +591,7 @@ impl Engine {
scripts: &[&str],
optimization_level: OptimizationLevel,
) -> Result<AST, ParseError> {
let stream = lex(scripts, self);
let stream = lex(scripts, None, self);
self.parse(&mut stream.peekable(), scope, optimization_level)
}
@ -715,7 +716,19 @@ impl Engine {
// Trims the JSON string and add a '#' in front
let scripts = ["#", json.trim()];
let stream = lex(&scripts, self);
let stream = lex(
&scripts,
if has_null {
Some(Box::new(|token| match token {
// If `null` is present, make sure `null` is treated as a variable
Token::Reserved(s) if s == "null" => Token::Identifier(s),
_ => token,
}))
} else {
None
},
self,
);
let ast =
self.parse_global_expr(&mut stream.peekable(), &scope, OptimizationLevel::None)?;
@ -796,7 +809,7 @@ impl Engine {
script: &str,
) -> Result<AST, ParseError> {
let scripts = [script];
let stream = lex(&scripts, self);
let stream = lex(&scripts, None, self);
{
let mut peekable = stream.peekable();
self.parse_global_expr(&mut peekable, scope, self.optimization_level)
@ -951,7 +964,7 @@ impl Engine {
script: &str,
) -> Result<T, Box<EvalAltResult>> {
let scripts = [script];
let stream = lex(&scripts, self);
let stream = lex(&scripts, None, self);
// No need to optimize a lone expression
let ast = self.parse_global_expr(&mut stream.peekable(), scope, OptimizationLevel::None)?;
@ -1084,7 +1097,7 @@ impl Engine {
script: &str,
) -> Result<(), Box<EvalAltResult>> {
let scripts = [script];
let stream = lex(&scripts, self);
let stream = lex(&scripts, None, self);
let ast = self.parse(&mut stream.peekable(), scope, self.optimization_level)?;
self.consume_ast_with_scope(scope, &ast)
}

View File

@ -172,7 +172,7 @@ impl ParseErrorType {
Self::VariableExpected => "Expecting name of a variable",
Self::Reserved(_) => "Invalid use of reserved keyword",
Self::ExprExpected(_) => "Expecting an expression",
Self::FnMissingName => "Expecting name in function declaration",
Self::FnMissingName => "Expecting function name in function declaration",
Self::FnMissingParams(_) => "Expecting parameters in function declaration",
Self::FnDuplicatedParam(_,_) => "Duplicated parameters in function declaration",
Self::FnMissingBody(_) => "Expecting body statement block for function declaration",

View File

@ -9,7 +9,7 @@ use crate::module::{Module, ModuleRef};
use crate::optimize::{optimize_into_ast, OptimizationLevel};
use crate::scope::{EntryType as ScopeEntryType, Scope};
use crate::syntax::FnCustomSyntaxEval;
use crate::token::{is_valid_identifier, Position, Token, TokenStream};
use crate::token::{is_keyword_function, is_valid_identifier, Position, Token, TokenStream};
use crate::utils::{StaticVec, StraightHasherBuilder};
#[cfg(not(feature = "no_function"))]
@ -1044,7 +1044,7 @@ fn parse_paren_expr(
}
/// Parse a function call.
fn parse_call_expr(
fn parse_fn_call(
input: &mut TokenStream,
state: &mut ParseState,
lib: &mut FunctionsLib,
@ -1553,8 +1553,12 @@ fn parse_primary(
Expr::Variable(Box::new(((s, settings.pos), None, 0, index)))
}
// Function call is allowed to have reserved keyword
Token::Reserved(s) if s != KEYWORD_THIS && input.peek().unwrap().0 == Token::LeftParen => {
Expr::Variable(Box::new(((s, settings.pos), None, 0, None)))
Token::Reserved(s) if input.peek().unwrap().0 == Token::LeftParen => {
if is_keyword_function(&s) {
Expr::Variable(Box::new(((s, settings.pos), None, 0, None)))
} else {
return Err(PERR::Reserved(s).into_err(settings.pos));
}
}
// Access to `this` as a variable is OK
Token::Reserved(s) if s == KEYWORD_THIS && input.peek().unwrap().0 != Token::LeftParen => {
@ -1601,7 +1605,7 @@ fn parse_primary(
(Expr::Variable(x), Token::LeftParen) => {
let ((name, pos), modules, _, _) = *x;
settings.pos = pos;
parse_call_expr(input, state, lib, name, modules, settings.level_up())?
parse_fn_call(input, state, lib, name, modules, settings.level_up())?
}
(Expr::Property(_), _) => unreachable!(),
// module access
@ -2882,14 +2886,12 @@ fn parse_fn(
#[cfg(not(feature = "unchecked"))]
settings.ensure_level_within_max_limit(state.max_expr_depth)?;
let name = match input.next().unwrap() {
(Token::Identifier(s), _) | (Token::Custom(s), _) | (Token::Reserved(s), _)
if s != KEYWORD_THIS && is_valid_identifier(s.chars()) =>
{
s
}
(_, pos) => return Err(PERR::FnMissingName.into_err(pos)),
};
let (token, pos) = input.next().unwrap();
let name = token.into_function_name().map_err(|t| match t {
Token::Reserved(s) => PERR::Reserved(s).into_err(pos),
_ => PERR::FnMissingName.into_err(pos),
})?;
match input.peek().unwrap() {
(Token::LeftParen, _) => eat_token(input, Token::LeftParen),

View File

@ -494,10 +494,9 @@ impl Token {
"===" | "!==" | "->" | "<-" | "=>" | ":=" | "::<" | "(*" | "*)" | "#" | "public"
| "new" | "use" | "module" | "package" | "var" | "static" | "with" | "do" | "each"
| "then" | "goto" | "switch" | "match" | "case" | "try" | "catch" | "default"
| "void" | "null" | "nil" | "spawn" | "go" | "async" | "await" | "yield" => {
Reserved(syntax.into())
}
| "then" | "goto" | "exit" | "switch" | "match" | "case" | "try" | "catch"
| "default" | "void" | "null" | "nil" | "spawn" | "go" | "async" | "await"
| "yield" => Reserved(syntax.into()),
KEYWORD_PRINT | KEYWORD_DEBUG | KEYWORD_TYPE_OF | KEYWORD_EVAL | KEYWORD_FN_PTR
| KEYWORD_FN_PTR_CALL | KEYWORD_FN_PTR_CURRY | KEYWORD_THIS => Reserved(syntax.into()),
@ -670,6 +669,15 @@ impl Token {
}
}
/// Convert a token into a function name, if possible.
pub fn into_function_name(self) -> Result<String, Self> {
match self {
Self::Reserved(s) if is_keyword_function(&s) => Ok(s),
Self::Custom(s) | Self::Identifier(s) if is_valid_identifier(s.chars()) => Ok(s),
_ => Err(self),
}
}
/// Is this token a custom keyword?
pub fn is_custom(&self) -> bool {
match self {
@ -718,6 +726,16 @@ pub trait InputStream {
fn peek_next(&mut self) -> Option<char>;
}
pub fn is_keyword_function(name: &str) -> bool {
name == KEYWORD_PRINT
|| name == KEYWORD_DEBUG
|| name == KEYWORD_TYPE_OF
|| name == KEYWORD_EVAL
|| name == KEYWORD_FN_PTR
|| name == KEYWORD_FN_PTR_CALL
|| name == KEYWORD_FN_PTR_CURRY
}
pub fn is_valid_identifier(name: impl Iterator<Item = char>) -> bool {
let mut first_alphabetic = false;
@ -1456,13 +1474,15 @@ pub struct TokenIterator<'a, 'e> {
pos: Position,
/// Input character stream.
stream: MultiInputsStream<'a>,
/// A processor function (if any) that maps a token to another.
map: Option<Box<dyn Fn(Token) -> Token>>,
}
impl<'a> Iterator for TokenIterator<'a, '_> {
type Item = (Token, Position);
fn next(&mut self) -> Option<Self::Item> {
match (
let token = match (
get_next_token(&mut self.stream, &mut self.state, &mut self.pos),
self.engine.disabled_symbols.as_ref(),
self.engine.custom_keywords.as_ref(),
@ -1544,12 +1564,27 @@ impl<'a> Iterator for TokenIterator<'a, '_> {
Some((Token::Reserved(token.syntax().into()), pos))
}
(r, _, _) => r,
};
match token {
None => None,
Some((token, pos)) => {
if let Some(ref map) = self.map {
Some((map(token), pos))
} else {
Some((token, pos))
}
}
}
}
}
/// Tokenize an input text stream.
pub fn lex<'a, 'e>(input: &'a [&'a str], engine: &'e Engine) -> TokenIterator<'a, 'e> {
pub fn lex<'a, 'e>(
input: &'a [&'a str],
map: Option<Box<dyn Fn(Token) -> Token>>,
engine: &'e Engine,
) -> TokenIterator<'a, 'e> {
TokenIterator {
engine,
state: TokenizeState {
@ -1567,5 +1602,6 @@ pub fn lex<'a, 'e>(input: &'a [&'a str], engine: &'e Engine) -> TokenIterator<'a
streams: input.iter().map(|s| s.chars().peekable()).collect(),
index: 0,
},
map,
}
}