Merge branch 'master' into master

This commit is contained in:
Lukáš Hozda [magnusi] 2017-10-30 08:48:19 +01:00 committed by GitHub
commit b95c402e3f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 73 additions and 9 deletions

View File

@ -154,6 +154,8 @@ pub enum Token {
Fn,
Break,
Return,
PlusEquals,
MinusEquals,
LexErr(LexError),
}
@ -436,13 +438,25 @@ impl<'a> TokenIterator<'a> {
'[' => return Some(Token::LSquare),
']' => return Some(Token::RSquare),
'+' => {
if self.last.is_next_unary() { return Some(Token::UnaryPlus) }
else { return Some(Token::Plus) }
}
return match self.char_stream.peek() {
Some(&'=') => {
self.char_stream.next();
Some(Token::PlusEquals)
},
_ if self.last.is_next_unary() => Some(Token::UnaryPlus),
_ => Some(Token::Plus),
}
},
'-' => {
if self.last.is_next_unary() { return Some(Token::UnaryMinus) }
else { return Some(Token::Minus) }
}
return match self.char_stream.peek() {
Some(&'=') => {
self.char_stream.next();
Some(Token::MinusEquals)
},
_ if self.last.is_next_unary() => Some(Token::UnaryMinus),
_ => Some(Token::Minus),
}
},
'*' => return Some(Token::Multiply),
'/' => {
match self.char_stream.peek() {
@ -544,7 +558,7 @@ impl<'a> TokenIterator<'a> {
impl<'a> Iterator for TokenIterator<'a> {
type Item = Token;
// TODO - perhaps this could be optimized to only keep track of last for operators?
// TODO - perhaps this could be optimized?
fn next(&mut self) -> Option<Self::Item> {
self.last = match self.inner_next() {
Some(c) => c,
@ -560,7 +574,9 @@ pub fn lex(input: &str) -> TokenIterator {
fn get_precedence(token: &Token) -> i32 {
match *token {
Token::Equals => 10,
Token::Equals
| Token::PlusEquals
| Token::MinusEquals => 10,
Token::Or => 11,
Token::And => 12,
Token::LessThan
@ -591,7 +607,7 @@ fn parse_call_expr<'a>(id: String,
input: &mut Peekable<TokenIterator<'a>>)
-> Result<Expr, ParseError> {
let mut args = Vec::new();
if let Some(&Token::RParen) = input.peek() {
input.next();
return Ok(Expr::FnCall(id, args));
@ -756,6 +772,20 @@ fn parse_binop<'a>(input: &mut Peekable<TokenIterator<'a>>,
Token::Multiply => Expr::FnCall("*".to_string(), vec![lhs_curr, rhs]),
Token::Divide => Expr::FnCall("/".to_string(), vec![lhs_curr, rhs]),
Token::Equals => Expr::Assignment(Box::new(lhs_curr), Box::new(rhs)),
Token::PlusEquals => {
let lhs_copy = lhs_curr.clone();
Expr::Assignment(
Box::new(lhs_curr),
Box::new(Expr::FnCall("+".to_string(), vec![lhs_copy, rhs]))
)
},
Token::MinusEquals => {
let lhs_copy = lhs_curr.clone();
Expr::Assignment(
Box::new(lhs_curr),
Box::new(Expr::FnCall("-".to_string(), vec![lhs_copy, rhs]))
)
},
Token::Period => Expr::Dot(Box::new(lhs_curr), Box::new(rhs)),
Token::EqualTo => Expr::FnCall("==".to_string(), vec![lhs_curr, rhs]),
Token::NotEqualTo => Expr::FnCall("!=".to_string(), vec![lhs_curr, rhs]),

View File

@ -457,3 +457,37 @@ fn test_loop() {
").unwrap()
)
}
#[test]
fn test_increment() {
let mut engine = Engine::new();
if let Ok(result) = engine.eval::<i64>("let x = 1; x += 2; x") {
assert_eq!(result, 3);
} else {
assert!(false);
}
if let Ok(result) = engine.eval::<String>("let s = \"test\"; s += \"ing\"; s") {
assert_eq!(result, "testing".to_owned());
} else {
assert!(false);
}
}
#[test]
fn test_decrement() {
let mut engine = Engine::new();
if let Ok(result) = engine.eval::<i64>("let x = 10; x -= 7; x") {
assert_eq!(result, 3);
} else {
assert!(false);
}
if let Ok(_) = engine.eval::<String>("let s = \"test\"; s -= \"ing\"; s") {
assert!(false);
} else {
assert!(true);
}
}