Use stacked functions resolution caches to further improve performance.

This commit is contained in:
Stephen Chung 2021-02-07 15:41:40 +08:00
parent 7b87f81850
commit aafff4fb93
3 changed files with 87 additions and 39 deletions

View File

@ -9,6 +9,11 @@ Breaking changes
* `Dynamic::into_shared` is no longer available under `no_closure`. It used to panic. * `Dynamic::into_shared` is no longer available under `no_closure`. It used to panic.
Enhancements
------------
* Functions resolution cache is used in more cases, making repeated function calls faster.
Version 0.19.11 Version 0.19.11
=============== ===============

View File

@ -520,10 +520,12 @@ pub struct State {
#[cfg(not(feature = "no_module"))] #[cfg(not(feature = "no_module"))]
pub resolver: Option<Shared<crate::module::resolvers::StaticModuleResolver>>, pub resolver: Option<Shared<crate::module::resolvers::StaticModuleResolver>>,
/// Cached lookup values for function hashes. /// Cached lookup values for function hashes.
pub functions_cache: HashMap< pub functions_caches: StaticVec<
NonZeroU64, HashMap<
Option<(CallableFunction, Option<ImmutableString>)>, NonZeroU64,
StraightHasherBuilder, Option<(CallableFunction, Option<ImmutableString>)>,
StraightHasherBuilder,
>,
>, >,
} }
@ -1856,6 +1858,7 @@ impl Engine {
statements: impl IntoIterator<Item = &'a Stmt>, statements: impl IntoIterator<Item = &'a Stmt>,
level: usize, level: usize,
) -> Result<Dynamic, Box<EvalAltResult>> { ) -> Result<Dynamic, Box<EvalAltResult>> {
let mut has_imports = false;
let prev_always_search = state.always_search; let prev_always_search = state.always_search;
let prev_scope_len = scope.len(); let prev_scope_len = scope.len();
let prev_mods_len = mods.len(); let prev_mods_len = mods.len();
@ -1864,13 +1867,26 @@ impl Engine {
let result = statements let result = statements
.into_iter() .into_iter()
.try_fold(Default::default(), |_, stmt| { .try_fold(Default::default(), |_, stmt| {
match stmt {
Stmt::Import(_, _, _) => {
// When imports list is modified, clear the functions lookup cache
if has_imports {
state.functions_caches.last_mut().map(|c| c.clear());
} else {
state.functions_caches.push(Default::default());
}
has_imports = true;
}
_ => (),
}
self.eval_stmt(scope, mods, state, lib, this_ptr, stmt, level) self.eval_stmt(scope, mods, state, lib, this_ptr, stmt, level)
}); });
scope.rewind(prev_scope_len); scope.rewind(prev_scope_len);
if mods.len() != prev_mods_len { if has_imports {
// If imports list is modified, clear the functions lookup cache // If imports list is modified, pop the functions lookup cache
state.functions_cache.clear(); state.functions_caches.pop();
} }
mods.truncate(prev_mods_len); mods.truncate(prev_mods_len);
state.scope_level -= 1; state.scope_level -= 1;
@ -2365,8 +2381,6 @@ impl Engine {
} else { } else {
mods.push(name_def.name.clone(), module); mods.push(name_def.name.clone(), module);
} }
// When imports list is modified, clear the functions lookup cache
state.functions_cache.clear();
} }
state.modules += 1; state.modules += 1;

View File

@ -176,28 +176,37 @@ impl Engine {
self.inc_operations(state, pos)?; self.inc_operations(state, pos)?;
// Check if function access already in the cache // Check if function access already in the cache
let func = &*state.functions_cache.entry(hash_fn).or_insert_with(|| { if state.functions_caches.is_empty() {
// Search for the native function state.functions_caches.push(Default::default());
// First search registered functions (can override packages) }
// Then search packages
// Finally search modules
//lib.get_fn(hash_fn, pub_only) let func = &*state
self.global_namespace .functions_caches
.get_fn(hash_fn, pub_only) .last_mut()
.cloned() .unwrap()
.map(|f| (f, None)) .entry(hash_fn)
.or_else(|| { .or_insert_with(|| {
self.global_modules.iter().find_map(|m| { // Search for the native function
m.get_fn(hash_fn, false) // First search registered functions (can override packages)
.map(|f| (f.clone(), m.id_raw().cloned())) // Then search packages
// Finally search modules
//lib.get_fn(hash_fn, pub_only)
self.global_namespace
.get_fn(hash_fn, pub_only)
.cloned()
.map(|f| (f, None))
.or_else(|| {
self.global_modules.iter().find_map(|m| {
m.get_fn(hash_fn, false)
.map(|f| (f.clone(), m.id_raw().cloned()))
})
}) })
}) .or_else(|| {
.or_else(|| { mods.get_fn(hash_fn)
mods.get_fn(hash_fn) .map(|(f, source)| (f.clone(), source.cloned()))
.map(|(f, source)| (f.clone(), source.cloned())) })
}) });
});
if let Some((func, source)) = func { if let Some((func, source)) = func {
assert!(func.is_native()); assert!(func.is_native());
@ -392,11 +401,11 @@ impl Engine {
// Merge in encapsulated environment, if any // Merge in encapsulated environment, if any
let mut lib_merged: StaticVec<_>; let mut lib_merged: StaticVec<_>;
let mut old_cache = None; let mut unified = false;
let unified_lib = if let Some(ref env_lib) = fn_def.lib { let unified_lib = if let Some(ref env_lib) = fn_def.lib {
old_cache = Some(mem::take(&mut state.functions_cache)); unified = true;
state.functions_caches.push(Default::default());
lib_merged = Default::default(); lib_merged = Default::default();
lib_merged.push(env_lib.as_ref()); lib_merged.push(env_lib.as_ref());
lib_merged.extend(lib.iter().cloned()); lib_merged.extend(lib.iter().cloned());
@ -467,8 +476,8 @@ impl Engine {
mods.truncate(prev_mods_len); mods.truncate(prev_mods_len);
state.scope_level = orig_scope_level; state.scope_level = orig_scope_level;
if let Some(cache) = old_cache { if unified {
state.functions_cache = cache; state.functions_caches.pop();
} }
result result
@ -506,13 +515,13 @@ impl Engine {
// Check if it is already in the cache // Check if it is already in the cache
if let Some(state) = state.as_mut() { if let Some(state) = state.as_mut() {
if let Some(hash) = hash_script { if let Some(hash) = hash_script {
match state.functions_cache.get(&hash) { match state.functions_caches.last().map_or(None, |c| c.get(&hash)) {
Some(v) => return v.is_some(), Some(v) => return v.is_some(),
None => (), None => (),
} }
} }
if let Some(hash) = hash_fn { if let Some(hash) = hash_fn {
match state.functions_cache.get(&hash) { match state.functions_caches.last().map_or(None, |c| c.get(&hash)) {
Some(v) => return v.is_some(), Some(v) => return v.is_some(),
None => (), None => (),
} }
@ -536,10 +545,24 @@ impl Engine {
if !r { if !r {
if let Some(state) = state.as_mut() { if let Some(state) = state.as_mut() {
if let Some(hash) = hash_script { if let Some(hash) = hash_script {
state.functions_cache.insert(hash, None); if state.functions_caches.is_empty() {
state.functions_caches.push(Default::default());
}
state
.functions_caches
.last_mut()
.unwrap()
.insert(hash, None);
} }
if let Some(hash) = hash_fn { if let Some(hash) = hash_fn {
state.functions_cache.insert(hash, None); if state.functions_caches.is_empty() {
state.functions_caches.push(Default::default());
}
state
.functions_caches
.last_mut()
.unwrap()
.insert(hash, None);
} }
} }
} }
@ -630,8 +653,14 @@ impl Engine {
let hash_script = hash_script.unwrap(); let hash_script = hash_script.unwrap();
// Check if function access already in the cache // Check if function access already in the cache
if state.functions_caches.is_empty() {
state.functions_caches.push(Default::default());
}
let (func, source) = state let (func, source) = state
.functions_cache .functions_caches
.last_mut()
.unwrap()
.entry(hash_script) .entry(hash_script)
.or_insert_with(|| { .or_insert_with(|| {
lib.iter() lib.iter()