diff --git a/README.md b/README.md index b200f5ec..e4d6d354 100644 --- a/README.md +++ b/README.md @@ -1379,8 +1379,7 @@ integer and floating-point values by always serializing a floating-point number (i.e. `123.0` instead of `123` which is assumed to be an integer). This style can be used successfully with Rhai object maps. -Use the [`eval_expression`]`::` method (or [`eval_expression_with_scope`]`::` in order to -handle `null` values) to parse a piece of JSON (with the hash character `#` attached) into an object map: +Use the `parse_json` method to parse a piece of JSON into an object map: ```rust // JSON string - notice that JSON property names are always quoted @@ -1395,23 +1394,19 @@ let json = r#"{ } "#; -// Create a new scope -let mut scope = Scope::new(); -scope.push_constant("null", ()); // map 'null' to '()' - -// Parse the JSON expression as an object map by attaching '#' in front -let expr = format!("#{}", json); -let map = engine.eval_expression_with_scope::(&mut scope, expr)?; +// Parse the JSON expression as an object map +// Set the second boolean parameter to true in order to map 'null' to '()' +let map = engine.parse_json(json, true)?; map.len() == 6; // 'map' contains all properties int the JSON string -// Push the map back into the scope -scope.clear(); +// Put the object map into a 'Scope' +let mut scope = Scope::new(); scope.push("map", map); let result = engine.eval_with_scope::(r#"map["^^^!!!"].len()"#)?; -result == 3; // the object map is used in a script +result == 3; // the object map is successfully used in the script ``` Comparison operators diff --git a/src/api.rs b/src/api.rs index a2c7755a..9b25a7cc 100644 --- a/src/api.rs +++ b/src/api.rs @@ -1,7 +1,7 @@ //! Module that defines the extern API of `Engine`. use crate::any::{Any, AnyExt, Dynamic}; -use crate::engine::{make_getter, make_setter, Engine, FnAny, FnSpec}; +use crate::engine::{make_getter, make_setter, Engine, FnAny, FnSpec, Map}; use crate::error::ParseError; use crate::fn_call::FuncArgs; use crate::fn_register::RegisterFn; @@ -395,14 +395,9 @@ impl<'e> Engine<'e> { script: &str, optimization_level: OptimizationLevel, ) -> Result { - let tokens_stream = lex(script); - - parse( - &mut tokens_stream.peekable(), - self, - scope, - optimization_level, - ) + let scripts = [script]; + let stream = lex(&scripts); + parse(&mut stream.peekable(), self, scope, optimization_level) } /// Read the contents of a file into a string. @@ -483,6 +478,51 @@ impl<'e> Engine<'e> { Self::read_file(path).and_then(|contents| Ok(self.compile_with_scope(scope, &contents)?)) } + /// Parse a JSON string into a map. + /// + /// Set `has_null` to `true` in order to map `null` values to `()`. + /// Setting it to `false` will cause a _variable not found_ error during parsing. + /// + /// # Example + /// + /// ``` + /// # fn main() -> Result<(), rhai::EvalAltResult> { + /// use rhai::{Engine, AnyExt}; + /// + /// let engine = Engine::new(); + /// + /// let map = engine.parse_json(r#"{"a":123, "b":42, "c":false, "d":null}"#, true)?; + /// + /// assert_eq!(map.len(), 4); + /// assert_eq!(map.get("a").cloned().unwrap().cast::(), 123); + /// assert_eq!(map.get("b").cloned().unwrap().cast::(), 42); + /// assert_eq!(map.get("c").cloned().unwrap().cast::(), false); + /// assert_eq!(map.get("d").cloned().unwrap().cast::<()>(), ()); + /// # Ok(()) + /// # } + /// ``` + #[cfg(not(feature = "no_object"))] + pub fn parse_json(&self, json: &str, has_null: bool) -> Result { + let mut scope = Scope::new(); + + // Trims the JSON string and add a '#' in front + let scripts = ["#", json.trim()]; + let stream = lex(&scripts); + let ast = parse_global_expr( + &mut stream.peekable(), + self, + &scope, + OptimizationLevel::None, + )?; + + // Handle null - map to () + if has_null { + scope.push_constant("null", ()); + } + + self.eval_ast_with_scope(&mut scope, &ast) + } + /// Compile a string containing an expression into an `AST`, /// which can be used later for evaluation. /// @@ -551,8 +591,9 @@ impl<'e> Engine<'e> { scope: &Scope, script: &str, ) -> Result { - let tokens_stream = lex(script); - parse_global_expr(&mut tokens_stream.peekable(), self, scope) + let scripts = [script]; + let stream = lex(&scripts); + parse_global_expr(&mut stream.peekable(), self, scope, self.optimization_level) } /// Evaluate a script file. @@ -807,15 +848,9 @@ impl<'e> Engine<'e> { /// Evaluate a string with own scope, but throw away the result and only return error (if any). /// Useful for when you don't need the result, but still need to keep track of possible errors. pub fn consume_with_scope(&self, scope: &mut Scope, script: &str) -> Result<(), EvalAltResult> { - let tokens_stream = lex(script); - - let ast = parse( - &mut tokens_stream.peekable(), - self, - scope, - self.optimization_level, - )?; - + let scripts = [script]; + let stream = lex(&scripts); + let ast = parse(&mut stream.peekable(), self, scope, self.optimization_level)?; self.consume_ast_with_scope(scope, &ast) } diff --git a/src/engine.rs b/src/engine.rs index c308b236..5abe17eb 100644 --- a/src/engine.rs +++ b/src/engine.rs @@ -1503,7 +1503,7 @@ impl Engine<'_> { Expr::False(_) => Ok(false.into_dynamic()), Expr::Unit(_) => Ok(().into_dynamic()), - expr => panic!("should not appear: {:?}", expr), + _ => panic!("should not appear: {:?}", expr), } } diff --git a/src/parser.rs b/src/parser.rs index 07d5433c..21f04e1f 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -828,16 +828,40 @@ pub struct TokenIterator<'a> { can_be_unary: bool, /// Current position. pos: Position, - /// The input characters stream. - stream: Peekable>, + /// The input character streams. + streams: Vec>>, } impl<'a> TokenIterator<'a> { /// Consume the next character. fn eat_next(&mut self) { - self.stream.next(); + self.get_next(); self.advance(); } + /// Get the next character + fn get_next(&mut self) -> Option { + loop { + if self.streams.is_empty() { + return None; + } else if let Some(ch) = self.streams[0].next() { + return Some(ch); + } else { + let _ = self.streams.remove(0); + } + } + } + /// Peek the next character + fn peek_next(&mut self) -> Option { + loop { + if self.streams.is_empty() { + return None; + } else if let Some(ch) = self.streams[0].peek() { + return Some(*ch); + } else { + let _ = self.streams.remove(0); + } + } + } /// Move the current position one character ahead. fn advance(&mut self) { self.pos.advance(); @@ -864,7 +888,7 @@ impl<'a> TokenIterator<'a> { let mut escape = String::with_capacity(12); loop { - let next_char = self.stream.next(); + let next_char = self.get_next(); self.advance(); match next_char.ok_or((LERR::UnterminatedString, Position::eof()))? { @@ -907,7 +931,7 @@ impl<'a> TokenIterator<'a> { }; for _ in 0..len { - let c = self.stream.next().ok_or_else(|| { + let c = self.get_next().ok_or_else(|| { (LERR::MalformedEscapeSequence(seq.to_string()), self.pos) })?; @@ -958,12 +982,12 @@ impl<'a> TokenIterator<'a> { fn inner_next(&mut self) -> Option<(Token, Position)> { let mut negated = false; - while let Some(c) = self.stream.next() { + while let Some(c) = self.get_next() { self.advance(); let pos = self.pos; - match (c, self.stream.peek().copied().unwrap_or('\0')) { + match (c, self.peek_next().unwrap_or('\0')) { // \n ('\n', _) => self.new_line(), @@ -973,7 +997,7 @@ impl<'a> TokenIterator<'a> { let mut radix_base: Option = None; result.push(c); - while let Some(&next_char) = self.stream.peek() { + while let Some(next_char) = self.peek_next() { match next_char { '0'..='9' | '_' => { result.push(next_char); @@ -983,7 +1007,7 @@ impl<'a> TokenIterator<'a> { '.' => { result.push(next_char); self.eat_next(); - while let Some(&next_char_in_float) = self.stream.peek() { + while let Some(next_char_in_float) = self.peek_next() { match next_char_in_float { '0'..='9' | '_' => { result.push(next_char_in_float); @@ -1023,7 +1047,7 @@ impl<'a> TokenIterator<'a> { _ => panic!("unexpected character {}", ch), }); - while let Some(&next_char_in_hex) = self.stream.peek() { + while let Some(next_char_in_hex) = self.peek_next() { if !valid.contains(&next_char_in_hex) { break; } @@ -1079,7 +1103,7 @@ impl<'a> TokenIterator<'a> { let mut result = Vec::new(); result.push(c); - while let Some(&next_char) = self.stream.peek() { + while let Some(next_char) = self.peek_next() { match next_char { x if x.is_ascii_alphanumeric() || x == '_' => { result.push(x); @@ -1207,7 +1231,7 @@ impl<'a> TokenIterator<'a> { ('/', '/') => { self.eat_next(); - while let Some(c) = self.stream.next() { + while let Some(c) = self.get_next() { if c == '\n' { self.new_line(); break; @@ -1221,18 +1245,18 @@ impl<'a> TokenIterator<'a> { self.eat_next(); - while let Some(c) = self.stream.next() { + while let Some(c) = self.get_next() { self.advance(); match c { '/' => { - if self.stream.next() == Some('*') { + if self.get_next() == Some('*') { level += 1; } self.advance(); } '*' => { - if self.stream.next() == Some('/') { + if self.get_next() == Some('/') { level -= 1; } self.advance(); @@ -1272,7 +1296,7 @@ impl<'a> TokenIterator<'a> { self.eat_next(); return Some(( - if self.stream.peek() == Some(&'=') { + if self.peek_next() == Some('=') { self.eat_next(); Token::LeftShiftAssign } else { @@ -1291,7 +1315,7 @@ impl<'a> TokenIterator<'a> { self.eat_next(); return Some(( - if self.stream.peek() == Some(&'=') { + if self.peek_next() == Some('=') { self.eat_next(); Token::RightShiftAssign } else { @@ -1368,11 +1392,11 @@ impl<'a> Iterator for TokenIterator<'a> { } /// Tokenize an input text stream. -pub fn lex(input: &str) -> TokenIterator<'_> { +pub fn lex<'a>(input: &'a [&'a str]) -> TokenIterator<'a> { TokenIterator { can_be_unary: true, pos: Position::new(1, 0), - stream: input.chars().peekable(), + streams: input.iter().map(|s| s.chars().peekable()).collect(), } } @@ -2696,6 +2720,7 @@ pub fn parse_global_expr<'a, 'e>( input: &mut Peekable>, engine: &Engine<'e>, scope: &Scope, + optimization_level: OptimizationLevel, ) -> Result { let expr = parse_expr(input, false)?; @@ -2711,7 +2736,7 @@ pub fn parse_global_expr<'a, 'e>( scope, vec![Stmt::Expr(Box::new(expr))], vec![], - engine.optimization_level, + optimization_level, ), ) } diff --git a/tests/maps.rs b/tests/maps.rs index f6bc45db..2002d420 100644 --- a/tests/maps.rs +++ b/tests/maps.rs @@ -163,13 +163,9 @@ fn test_map_for() -> Result<(), EvalAltResult> { fn test_map_json() -> Result<(), EvalAltResult> { let engine = Engine::new(); - let mut scope = Scope::new(); - scope.push_constant("null", ()); - scope.push_constant("undefined", ()); - let json = r#"{"a":1, "b":true, "c":42, "$d e f!":"hello", "z":null}"#; - let map = engine.eval_expression_with_scope::(&mut scope, &("#".to_string() + json))?; + let map = engine.parse_json(json, true)?; assert!(!map.contains_key("x")); @@ -211,7 +207,7 @@ fn test_map_json() -> Result<(), EvalAltResult> { #[cfg(not(feature = "no_index"))] { - scope.clear(); + let mut scope = Scope::new(); scope.push_constant("map", map); assert_eq!(