Add module documentation.

This commit is contained in:
Stephen Chung 2022-07-25 13:40:23 +08:00
parent 45acb65f4f
commit 5d799fd325
8 changed files with 126 additions and 37 deletions

View File

@ -16,6 +16,7 @@ New features
------------
* A new feature, `no_custom_syntax`, is added to remove custom syntax support from Rhai for applications that do not require it (which should be most).
* Comment lines beginning with `//!` (requires the `metadata` feature) are now collected as the script file's _module documentation_.
Enhancements
------------

View File

@ -2,6 +2,7 @@
use crate::parser::{ParseResult, ParseState};
use crate::{Engine, OptimizationLevel, Scope, AST};
use std::mem;
#[cfg(feature = "no_std")]
use std::prelude::v1::*;
@ -222,7 +223,12 @@ impl Engine {
self.token_mapper.as_ref().map(<_>::as_ref),
);
let mut state = ParseState::new(self, scope, tokenizer_control);
self.parse(&mut stream.peekable(), &mut state, optimization_level)
let mut ast = self.parse(&mut stream.peekable(), &mut state, optimization_level)?;
#[cfg(feature = "metadata")]
ast.set_doc(mem::take(
&mut state.tokenizer_control.borrow_mut().global_comments,
));
Ok(ast)
}
/// Compile a string containing an expression into an [`AST`],
/// which can be used later for evaluation.

View File

@ -2,6 +2,7 @@
#![cfg(not(feature = "no_optimize"))]
use crate::{Engine, OptimizationLevel, Scope, AST};
use std::mem;
impl Engine {
/// Control whether and how the [`Engine`] will optimize an [`AST`] after compilation.
@ -59,13 +60,18 @@ impl Engine {
.map(|f| f.func.get_script_fn_def().unwrap().clone())
.collect();
crate::optimizer::optimize_into_ast(
let mut new_ast = crate::optimizer::optimize_into_ast(
self,
scope,
ast.take_statements(),
#[cfg(not(feature = "no_function"))]
lib,
optimization_level,
)
);
#[cfg(feature = "metadata")]
new_ast.set_doc(mem::take(ast.doc_mut()));
new_ast
}
}

View File

@ -25,9 +25,7 @@ impl Engine {
let (stream, tokenizer_control) =
self.lex_raw(&scripts, self.token_mapper.as_ref().map(<_>::as_ref));
let mut state = ParseState::new(self, scope, tokenizer_control);
let ast = self.parse(&mut stream.peekable(), &mut state, self.optimization_level)?;
self.run_ast_with_scope(scope, &ast)
}
/// Evaluate an [`AST`], returning any error (if any).

View File

@ -1,7 +1,7 @@
//! Module defining the AST (abstract syntax tree).
use super::{ASTFlags, Expr, FnAccess, Stmt, StmtBlock, StmtBlockContainer};
use crate::{Dynamic, FnNamespace, Identifier, Position};
use crate::{Dynamic, FnNamespace, Identifier, Position, SmartString};
#[cfg(feature = "no_std")]
use std::prelude::v1::*;
use std::{
@ -21,6 +21,9 @@ pub struct AST {
/// Source of the [`AST`].
/// No source if string is empty.
source: Identifier,
/// [`AST`] documentation.
#[cfg(feature = "metadata")]
doc: Vec<SmartString>,
/// Global statements.
body: StmtBlock,
/// Script-defined functions.
@ -42,13 +45,11 @@ impl fmt::Debug for AST {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut fp = f.debug_struct("AST");
if !self.source.is_empty() {
fp.field("source: ", &self.source);
}
fp.field("source", &self.source);
#[cfg(feature = "metadata")]
fp.field("doc", &self.doc());
#[cfg(not(feature = "no_module"))]
if let Some(ref resolver) = self.resolver {
fp.field("resolver: ", resolver);
}
fp.field("resolver", &self.resolver);
fp.field("body", &self.body.as_slice());
@ -92,6 +93,8 @@ impl AST {
) -> Self {
Self {
source: Identifier::new_const(),
#[cfg(feature = "metadata")]
doc: Vec::new(),
body: StmtBlock::new(statements, Position::NONE, Position::NONE),
#[cfg(not(feature = "no_function"))]
lib: functions.into(),
@ -140,6 +143,8 @@ impl AST {
pub fn empty() -> Self {
Self {
source: Identifier::new_const(),
#[cfg(feature = "metadata")]
doc: Vec::new(),
body: StmtBlock::NONE,
#[cfg(not(feature = "no_function"))]
lib: crate::Module::new().into(),
@ -180,6 +185,39 @@ impl AST {
self.source.clear();
self
}
/// Get the documentation.
///
/// Only available under `metadata`.
#[cfg(feature = "metadata")]
#[inline(always)]
pub fn doc(&self) -> String {
self.doc.join("\n")
}
/// Clear the documentation.
///
/// Only available under `metadata`.
#[cfg(feature = "metadata")]
#[inline(always)]
pub fn clear_doc(&mut self) -> &mut Self {
self.doc.clear();
self
}
/// Get a mutable reference to the documentation.
///
/// Only available under `metadata`.
#[cfg(feature = "metadata")]
#[inline(always)]
pub(crate) fn doc_mut(&mut self) -> &mut Vec<SmartString> {
&mut self.doc
}
/// Set the documentation.
///
/// Only available under `metadata`.
#[cfg(feature = "metadata")]
#[inline(always)]
pub(crate) fn set_doc(&mut self, doc: Vec<SmartString>) {
self.doc = doc;
}
/// Get the statements.
#[cfg(not(feature = "internals"))]
#[inline(always)]
@ -292,6 +330,8 @@ impl AST {
lib.merge_filtered(&self.lib, &filter);
Self {
source: self.source.clone(),
#[cfg(feature = "metadata")]
doc: self.doc.clone(),
body: StmtBlock::NONE,
lib: lib.into(),
#[cfg(not(feature = "no_module"))]
@ -305,6 +345,8 @@ impl AST {
pub fn clone_statements_only(&self) -> Self {
Self {
source: self.source.clone(),
#[cfg(feature = "metadata")]
doc: self.doc.clone(),
body: self.body.clone(),
#[cfg(not(feature = "no_function"))]
lib: crate::Module::new().into(),
@ -543,6 +585,9 @@ impl AST {
}
}
#[cfg(feature = "metadata")]
_ast.doc.extend(other.doc.iter().cloned());
_ast
}
/// Combine one [`AST`] with another. The second [`AST`] is consumed.
@ -636,6 +681,9 @@ impl AST {
crate::func::shared_make_mut(&mut self.lib).merge_filtered(&other.lib, &_filter);
}
#[cfg(feature = "metadata")]
self.doc.extend(other.doc.into_iter());
self
}
/// Filter out the functions, retaining only some based on a filter predicate.

View File

@ -1404,9 +1404,7 @@ impl Engine {
}
// Make sure to parse the following as text
let mut control = state.tokenizer_control.get();
control.is_within_text = true;
state.tokenizer_control.set(control);
state.tokenizer_control.borrow_mut().is_within_text = true;
match input.next().expect(NEVER_ENDS) {
(Token::StringConstant(s), pos) => {

View File

@ -10,7 +10,7 @@ use crate::{Engine, Identifier, LexError, SmartString, StaticVec, INT, UNSIGNED_
use std::prelude::v1::*;
use std::{
borrow::Cow,
cell::Cell,
cell::RefCell,
char, fmt,
iter::{FusedIterator, Peekable},
num::NonZeroUsize,
@ -20,11 +20,14 @@ use std::{
};
/// _(internals)_ A type containing commands to control the tokenizer.
#[derive(Debug, Clone, Eq, PartialEq, Hash, Copy)]
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct TokenizerControlBlock {
/// Is the current tokenizer position within an interpolated text string?
/// This flag allows switching the tokenizer back to _text_ parsing after an interpolation stream.
pub is_within_text: bool,
/// Collection of global comments.
#[cfg(feature = "metadata")]
pub global_comments: Vec<SmartString>,
}
impl TokenizerControlBlock {
@ -34,12 +37,14 @@ impl TokenizerControlBlock {
pub const fn new() -> Self {
Self {
is_within_text: false,
#[cfg(feature = "metadata")]
global_comments: Vec::new(),
}
}
}
/// _(internals)_ A shared object that allows control of the tokenizer from outside.
pub type TokenizerControl = Rc<Cell<TokenizerControlBlock>>;
pub type TokenizerControl = Rc<RefCell<TokenizerControlBlock>>;
type LERR = LexError;
@ -1098,12 +1103,14 @@ impl From<Token> for String {
/// _(internals)_ State of the tokenizer.
/// Exported under the `internals` feature only.
#[derive(Debug, Clone, Eq, PartialEq, Default)]
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct TokenizeState {
/// Maximum length of a string.
pub max_string_size: Option<NonZeroUsize>,
/// Can the next token be a unary operator?
pub next_token_cannot_be_unary: bool,
/// Shared object to allow controlling the tokenizer externally.
pub tokenizer_control: TokenizerControl,
/// Is the tokenizer currently inside a block comment?
pub comment_level: usize,
/// Include comments?
@ -1866,6 +1873,11 @@ fn get_next_token_inner(
_ => Some("///".into()),
}
}
#[cfg(feature = "metadata")]
Some('!') => {
eat_next(stream, pos);
Some("//!".into())
}
_ if state.include_comments => Some("//".into()),
_ => None,
};
@ -1890,7 +1902,15 @@ fn get_next_token_inner(
}
if let Some(comment) = comment {
return Some((Token::Comment(comment), start_pos));
match comment {
#[cfg(feature = "metadata")]
_ if comment.starts_with("//!") => state
.tokenizer_control
.borrow_mut()
.global_comments
.push(comment),
_ => return Some((Token::Comment(comment), start_pos)),
}
}
}
('/', '*') => {
@ -2300,8 +2320,6 @@ pub struct TokenIterator<'a> {
pub state: TokenizeState,
/// Current position.
pub pos: Position,
/// Shared object to allow controlling the tokenizer externally.
pub tokenizer_control: TokenizerControl,
/// Input character stream.
pub stream: MultiInputsStream<'a>,
/// A processor function that maps a token to another.
@ -2312,14 +2330,15 @@ impl<'a> Iterator for TokenIterator<'a> {
type Item = (Token, Position);
fn next(&mut self) -> Option<Self::Item> {
let mut control = self.tokenizer_control.get();
{
let control = &mut *self.state.tokenizer_control.borrow_mut();
if control.is_within_text {
// Switch to text mode terminated by back-tick
self.state.is_within_text_terminated_by = Some('`');
// Reset it
control.is_within_text = false;
self.tokenizer_control.set(control);
if control.is_within_text {
// Switch to text mode terminated by back-tick
self.state.is_within_text_terminated_by = Some('`');
// Reset it
control.is_within_text = false;
}
}
let (token, pos) = match get_next_token(&mut self.stream, &mut self.state, &mut self.pos) {
@ -2450,7 +2469,7 @@ impl Engine {
input: impl IntoIterator<Item = &'a (impl AsRef<str> + 'a)>,
token_mapper: Option<&'a OnParseTokenCallback>,
) -> (TokenIterator<'a>, TokenizerControl) {
let buffer: TokenizerControl = Cell::new(TokenizerControlBlock::new()).into();
let buffer: TokenizerControl = RefCell::new(TokenizerControlBlock::new()).into();
let buffer2 = buffer.clone();
(
@ -2462,12 +2481,12 @@ impl Engine {
#[cfg(feature = "unchecked")]
max_string_size: None,
next_token_cannot_be_unary: false,
tokenizer_control: buffer,
comment_level: 0,
include_comments: false,
is_within_text_terminated_by: None,
},
pos: Position::new(1, 0),
tokenizer_control: buffer,
stream: MultiInputsStream {
buf: None,
streams: input

View File

@ -68,6 +68,7 @@ fn test_optimizer_run() -> Result<(), Box<EvalAltResult>> {
Ok(())
}
#[cfg(feature = "metadata")]
#[cfg(not(feature = "no_module"))]
#[cfg(not(feature = "no_function"))]
#[cfg(not(feature = "no_position"))]
@ -79,30 +80,39 @@ fn test_optimizer_parse() -> Result<(), Box<EvalAltResult>> {
let ast = engine.compile("{ const DECISION = false; if DECISION { 42 } else { 123 } }")?;
assert_eq!(format!("{:?}", ast), "AST { body: [Expr(123 @ 1:53)] }");
assert_eq!(
format!("{:?}", ast),
r#"AST { source: "", doc: "", resolver: None, body: [Expr(123 @ 1:53)] }"#
);
let ast = engine.compile("const DECISION = false; if DECISION { 42 } else { 123 }")?;
assert_eq!(
format!("{:?}", ast),
r#"AST { body: [Var(("DECISION" @ 1:7, false @ 1:18, None), CONSTANT, 1:1), Expr(123 @ 1:51)] }"#
r#"AST { source: "", doc: "", resolver: None, body: [Var(("DECISION" @ 1:7, false @ 1:18, None), CONSTANT, 1:1), Expr(123 @ 1:51)] }"#
);
let ast = engine.compile("if 1 == 2 { 42 }")?;
assert_eq!(format!("{:?}", ast), "AST { body: [] }");
assert_eq!(
format!("{:?}", ast),
r#"AST { source: "", doc: "", resolver: None, body: [] }"#
);
engine.set_optimization_level(OptimizationLevel::Full);
let ast = engine.compile("abs(-42)")?;
assert_eq!(format!("{:?}", ast), "AST { body: [Expr(42 @ 1:1)] }");
assert_eq!(
format!("{:?}", ast),
r#"AST { source: "", doc: "", resolver: None, body: [Expr(42 @ 1:1)] }"#
);
let ast = engine.compile("NUMBER")?;
assert_eq!(
format!("{:?}", ast),
"AST { body: [Expr(Variable(NUMBER) @ 1:1)] }"
r#"AST { source: "", doc: "", resolver: None, body: [Expr(Variable(NUMBER) @ 1:1)] }"#
);
let mut module = Module::new();
@ -112,7 +122,10 @@ fn test_optimizer_parse() -> Result<(), Box<EvalAltResult>> {
let ast = engine.compile("NUMBER")?;
assert_eq!(format!("{:?}", ast), "AST { body: [Expr(42 @ 1:1)] }");
assert_eq!(
format!("{:?}", ast),
r#"AST { source: "", doc: "", resolver: None, body: [Expr(42 @ 1:1)] }"#
);
Ok(())
}