Simplify parsing by expecting the tokens stream will never be exhausted.
This commit is contained in:
parent
a4bf572d5a
commit
5aaaa7be3b
@ -178,7 +178,7 @@ fn optimize_stmt<'a>(stmt: Stmt, state: &mut State<'a>, preserve_result: bool) -
|
|||||||
Box::new(optimize_expr(expr, state)),
|
Box::new(optimize_expr(expr, state)),
|
||||||
Box::new(optimize_stmt(*if_block, state, true)),
|
Box::new(optimize_stmt(*if_block, state, true)),
|
||||||
match optimize_stmt(*else_block, state, true) {
|
match optimize_stmt(*else_block, state, true) {
|
||||||
stmt if matches!(stmt, Stmt::Noop(_)) => None, // Noop -> no else block
|
Stmt::Noop(_) => None, // Noop -> no else block
|
||||||
stmt => Some(Box::new(stmt)),
|
stmt => Some(Box::new(stmt)),
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
|
@ -508,7 +508,8 @@ impl Expr {
|
|||||||
|
|
||||||
/// Consume a particular token, checking that it is the expected one.
|
/// Consume a particular token, checking that it is the expected one.
|
||||||
fn eat_token(input: &mut Peekable<TokenIterator>, token: Token) -> Position {
|
fn eat_token(input: &mut Peekable<TokenIterator>, token: Token) -> Position {
|
||||||
if let Some((t, pos)) = input.next() {
|
let (t, pos) = input.next().unwrap();
|
||||||
|
|
||||||
if t != token {
|
if t != token {
|
||||||
panic!(
|
panic!(
|
||||||
"expecting {} (found {}) at {}",
|
"expecting {} (found {}) at {}",
|
||||||
@ -518,9 +519,6 @@ fn eat_token(input: &mut Peekable<TokenIterator>, token: Token) -> Position {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
pos
|
pos
|
||||||
} else {
|
|
||||||
panic!("expecting {} but already EOF", token.syntax());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Match a particular token, consuming it if matched.
|
/// Match a particular token, consuming it if matched.
|
||||||
@ -1269,14 +1267,11 @@ fn parse_binary_op<'a>(
|
|||||||
return Ok(current_lhs);
|
return Ok(current_lhs);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some((op_token, pos)) = input.next() {
|
let (op_token, pos) = input.next().unwrap();
|
||||||
|
|
||||||
let rhs = parse_unary(input, allow_stmt_expr)?;
|
let rhs = parse_unary(input, allow_stmt_expr)?;
|
||||||
|
|
||||||
let next_precedence = if let Some((next_op, _)) = input.peek() {
|
let next_precedence = input.peek().unwrap().0.precedence();
|
||||||
next_op.precedence()
|
|
||||||
} else {
|
|
||||||
0
|
|
||||||
};
|
|
||||||
|
|
||||||
// Bind to right if the next operator has higher precedence
|
// Bind to right if the next operator has higher precedence
|
||||||
// If same precedence, then check if the operator binds right
|
// If same precedence, then check if the operator binds right
|
||||||
@ -1292,17 +1287,11 @@ fn parse_binary_op<'a>(
|
|||||||
current_lhs = match op_token {
|
current_lhs = match op_token {
|
||||||
Token::Plus => Expr::FunctionCall("+".into(), vec![current_lhs, rhs], None, pos),
|
Token::Plus => Expr::FunctionCall("+".into(), vec![current_lhs, rhs], None, pos),
|
||||||
Token::Minus => Expr::FunctionCall("-".into(), vec![current_lhs, rhs], None, pos),
|
Token::Minus => Expr::FunctionCall("-".into(), vec![current_lhs, rhs], None, pos),
|
||||||
Token::Multiply => {
|
Token::Multiply => Expr::FunctionCall("*".into(), vec![current_lhs, rhs], None, pos),
|
||||||
Expr::FunctionCall("*".into(), vec![current_lhs, rhs], None, pos)
|
|
||||||
}
|
|
||||||
Token::Divide => Expr::FunctionCall("/".into(), vec![current_lhs, rhs], None, pos),
|
Token::Divide => Expr::FunctionCall("/".into(), vec![current_lhs, rhs], None, pos),
|
||||||
|
|
||||||
Token::LeftShift => {
|
Token::LeftShift => Expr::FunctionCall("<<".into(), vec![current_lhs, rhs], None, pos),
|
||||||
Expr::FunctionCall("<<".into(), vec![current_lhs, rhs], None, pos)
|
Token::RightShift => Expr::FunctionCall(">>".into(), vec![current_lhs, rhs], None, pos),
|
||||||
}
|
|
||||||
Token::RightShift => {
|
|
||||||
Expr::FunctionCall(">>".into(), vec![current_lhs, rhs], None, pos)
|
|
||||||
}
|
|
||||||
Token::Modulo => Expr::FunctionCall("%".into(), vec![current_lhs, rhs], None, pos),
|
Token::Modulo => Expr::FunctionCall("%".into(), vec![current_lhs, rhs], None, pos),
|
||||||
Token::PowerOf => Expr::FunctionCall("~".into(), vec![current_lhs, rhs], None, pos),
|
Token::PowerOf => Expr::FunctionCall("~".into(), vec![current_lhs, rhs], None, pos),
|
||||||
|
|
||||||
@ -1328,9 +1317,7 @@ fn parse_binary_op<'a>(
|
|||||||
|
|
||||||
Token::Or => Expr::Or(Box::new(current_lhs), Box::new(rhs), pos),
|
Token::Or => Expr::Or(Box::new(current_lhs), Box::new(rhs), pos),
|
||||||
Token::And => Expr::And(Box::new(current_lhs), Box::new(rhs), pos),
|
Token::And => Expr::And(Box::new(current_lhs), Box::new(rhs), pos),
|
||||||
Token::Ampersand => {
|
Token::Ampersand => Expr::FunctionCall("&".into(), vec![current_lhs, rhs], None, pos),
|
||||||
Expr::FunctionCall("&".into(), vec![current_lhs, rhs], None, pos)
|
|
||||||
}
|
|
||||||
Token::Pipe => Expr::FunctionCall("|".into(), vec![current_lhs, rhs], None, pos),
|
Token::Pipe => Expr::FunctionCall("|".into(), vec![current_lhs, rhs], None, pos),
|
||||||
Token::XOr => Expr::FunctionCall("^".into(), vec![current_lhs, rhs], None, pos),
|
Token::XOr => Expr::FunctionCall("^".into(), vec![current_lhs, rhs], None, pos),
|
||||||
|
|
||||||
@ -1367,7 +1354,6 @@ fn parse_binary_op<'a>(
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
/// Parse an expression.
|
/// Parse an expression.
|
||||||
fn parse_expr<'a>(
|
fn parse_expr<'a>(
|
||||||
@ -1439,7 +1425,7 @@ fn parse_if<'a>(
|
|||||||
|
|
||||||
// if guard { if_body } else ...
|
// if guard { if_body } else ...
|
||||||
let else_body = if match_token(input, Token::Else).unwrap_or(false) {
|
let else_body = if match_token(input, Token::Else).unwrap_or(false) {
|
||||||
Some(Box::new(if matches!(input.peek(), Some((Token::If, _))) {
|
Some(Box::new(if let (Token::If, _) = input.peek().unwrap() {
|
||||||
// if guard { if_body } else if ...
|
// if guard { if_body } else if ...
|
||||||
parse_if(input, breakable, allow_stmt_expr)?
|
parse_if(input, breakable, allow_stmt_expr)?
|
||||||
} else {
|
} else {
|
||||||
@ -1676,9 +1662,9 @@ fn parse_stmt<'a>(
|
|||||||
Token::Return | Token::Throw => {
|
Token::Return | Token::Throw => {
|
||||||
let pos = *pos;
|
let pos = *pos;
|
||||||
|
|
||||||
let return_type = match input.next() {
|
let return_type = match input.next().unwrap() {
|
||||||
Some((Token::Return, _)) => ReturnType::Return,
|
(Token::Return, _) => ReturnType::Return,
|
||||||
Some((Token::Throw, _)) => ReturnType::Exception,
|
(Token::Throw, _) => ReturnType::Exception,
|
||||||
_ => panic!("token should be return or throw"),
|
_ => panic!("token should be return or throw"),
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -1819,7 +1805,7 @@ fn parse_global_level<'a>(
|
|||||||
// Collect all the function definitions
|
// Collect all the function definitions
|
||||||
#[cfg(not(feature = "no_function"))]
|
#[cfg(not(feature = "no_function"))]
|
||||||
{
|
{
|
||||||
if matches!(input.peek().expect("should not be None"), (Token::Fn, _)) {
|
if let (Token::Fn, _) = input.peek().unwrap() {
|
||||||
let f = parse_fn(input, true)?;
|
let f = parse_fn(input, true)?;
|
||||||
functions.insert(calc_fn_def(&f.name, f.params.len()), f);
|
functions.insert(calc_fn_def(&f.name, f.params.len()), f);
|
||||||
continue;
|
continue;
|
||||||
|
@ -991,6 +991,8 @@ impl<'a> TokenIterator<'a> {
|
|||||||
}
|
}
|
||||||
('~', _) => return Some((Token::PowerOf, pos)),
|
('~', _) => return Some((Token::PowerOf, pos)),
|
||||||
|
|
||||||
|
('\0', _) => panic!("should not be EOF"),
|
||||||
|
|
||||||
(ch, _) if ch.is_whitespace() => (),
|
(ch, _) if ch.is_whitespace() => (),
|
||||||
(ch, _) => return Some((Token::LexError(Box::new(LERR::UnexpectedChar(ch))), pos)),
|
(ch, _) => return Some((Token::LexError(Box::new(LERR::UnexpectedChar(ch))), pos)),
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user