Update docs.
This commit is contained in:
parent
091e16124c
commit
76ab1e290d
12
CHANGELOG.md
12
CHANGELOG.md
@ -4,16 +4,22 @@ Rhai Release Notes
|
||||
Version 1.13.0
|
||||
==============
|
||||
|
||||
This version attempts a number of optimizations that may yield small speed improvements:
|
||||
|
||||
* Simple operators (e.g. integer arithmetic) are inlined to avoid the overhead of a function call.
|
||||
* The tokenizer uses pre-calculated tables (generated by GNU `gperf`) for keyword recognition.
|
||||
* A black-arts trick (see `Engine::black_box`) is used to prevent LLVM from optimizing hand-tuned AST node matches back into a lookup table, which messes up branch prediction on modern CPU's.
|
||||
|
||||
Bug fixes
|
||||
---------
|
||||
|
||||
* Complex indexing/dotting chains now parse correctly, for example: `a[b][c[d]].e`
|
||||
* `map` and `filter` for arrays are marked `pure`. Warnings are added to the documentation of pure array methods that take `this` closures.
|
||||
* Syntax such as `foo.bar::baz` no longer panics, but returns a proper parse error.
|
||||
* Some op-assignment statements, such as `x += y` where `x` and `y` are `char`, now work correctly instead of failing silently.
|
||||
* Expressions such as `!inside` now parses correctly instead of as `!in` followed by `side`.
|
||||
* Custom syntax starting with symbols now works correctly and no longer raises a parse error.
|
||||
* Comparing different custom types now works correctly when the appropriate comparison operators are registered.
|
||||
* Some op-assignments, such as `x += y` where `x` and `y` are `char`, now work correctly instead of failing silently.
|
||||
* Op-assignments to bit flags or bit ranges now work correctly.
|
||||
|
||||
Potentially breaking changes
|
||||
@ -30,8 +36,8 @@ Enhancements
|
||||
* The functions `min` and `max` are added for numbers.
|
||||
* Range cases in `switch` statements now also match floating-point and decimal values. In order to support this, however, small numeric ranges cases are no longer unrolled.
|
||||
* Loading a module via `import` now gives the module access to the current scope, including variables and constants defined inside.
|
||||
* Some very simple operator calls (e.g. integer add) are short-circuited to avoid the overhead of a function call, resulting in a small speed improvement.
|
||||
* The tokenizer now uses table-driven keyword recognizers generated by GNU gperf. At least _theoretically_ it should be faster...
|
||||
* Some very simple operator calls (e.g. integer add) are inlined to avoid the overhead of a function call, resulting in a small speed improvement.
|
||||
* The tokenizer now uses table-driven keyword recognizers generated by GNU `gperf`. At least _theoretically_ it should be faster...
|
||||
* The field `isAnonymous` is added to JSON functions metadata.
|
||||
|
||||
|
||||
|
@ -305,7 +305,7 @@ impl fmt::Display for Token {
|
||||
}
|
||||
}
|
||||
|
||||
// Table-driven keyword recognizer generated by GNU gperf on the file `tools/keywords.txt`.
|
||||
// Table-driven keyword recognizer generated by GNU `gperf` on the file `tools/keywords.txt`.
|
||||
//
|
||||
// When adding new keywords, make sure to update `tools/keywords.txt` and re-generate this.
|
||||
|
||||
@ -507,7 +507,7 @@ static KEYWORDS_LIST: [(&str, Token); 153] = [
|
||||
("#{", Token::MapStart),
|
||||
];
|
||||
|
||||
// Table-driven reserved symbol recognizer generated by GNU gperf on the file `tools/reserved.txt`.
|
||||
// Table-driven reserved symbol recognizer generated by GNU `gperf` on the file `tools/reserved.txt`.
|
||||
//
|
||||
// When adding new reserved symbols, make sure to update `tools/reserved.txt` and re-generate this.
|
||||
|
||||
@ -872,7 +872,7 @@ impl Token {
|
||||
#[must_use]
|
||||
pub fn lookup_symbol_from_syntax(syntax: &str) -> Option<Self> {
|
||||
// This implementation is based upon a pre-calculated table generated
|
||||
// by GNU gperf on the list of keywords.
|
||||
// by GNU `gperf` on the list of keywords.
|
||||
let utf8 = syntax.as_bytes();
|
||||
let len = utf8.len();
|
||||
let mut hash_val = len;
|
||||
@ -893,8 +893,8 @@ impl Token {
|
||||
|
||||
match KEYWORDS_LIST[hash_val] {
|
||||
(_, Token::EOF) => None,
|
||||
// Fail early to avoid calling memcmp()
|
||||
// Since we are already working with bytes, mind as well check the first one
|
||||
// Fail early to avoid calling memcmp().
|
||||
// Since we are already working with bytes, mind as well check the first one.
|
||||
(s, ref t) if s.len() == len && s.as_bytes()[0] == utf8[0] && s == syntax => {
|
||||
Some(t.clone())
|
||||
}
|
||||
@ -2312,7 +2312,7 @@ pub fn is_id_continue(x: char) -> bool {
|
||||
#[must_use]
|
||||
pub fn is_reserved_keyword_or_symbol(syntax: &str) -> (bool, bool, bool) {
|
||||
// This implementation is based upon a pre-calculated table generated
|
||||
// by GNU gperf on the list of keywords.
|
||||
// by GNU `gperf` on the list of keywords.
|
||||
let utf8 = syntax.as_bytes();
|
||||
let len = utf8.len();
|
||||
let rounds = len.min(3);
|
||||
@ -2333,8 +2333,8 @@ pub fn is_reserved_keyword_or_symbol(syntax: &str) -> (bool, bool, bool) {
|
||||
match RESERVED_LIST[hash_val] {
|
||||
("", ..) => (false, false, false),
|
||||
(s, true, a, b) => (
|
||||
// Fail early to avoid calling memcmp()
|
||||
// Since we are already working with bytes, mind as well check the first one
|
||||
// Fail early to avoid calling memcmp().
|
||||
// Since we are already working with bytes, mind as well check the first one.
|
||||
s.len() == len && s.as_bytes()[0] == utf8[0] && s == syntax,
|
||||
a,
|
||||
b,
|
||||
|
Loading…
Reference in New Issue
Block a user