From 8d91e7eb633bc4433c1217282b2b23bb0cf83a0a Mon Sep 17 00:00:00 2001 From: quake Date: Wed, 20 Jul 2022 21:28:17 +0900 Subject: [PATCH] chore: clippy fix useless_conversion --- src/api/custom_syntax.rs | 3 +-- src/ast/stmt.rs | 4 ++-- src/eval/expr.rs | 2 +- src/module/mod.rs | 4 ++-- src/tokenizer.rs | 2 +- 5 files changed, 7 insertions(+), 8 deletions(-) diff --git a/src/api/custom_syntax.rs b/src/api/custom_syntax.rs index 4f2fb543..4769e96e 100644 --- a/src/api/custom_syntax.rs +++ b/src/api/custom_syntax.rs @@ -365,8 +365,7 @@ impl Engine { parse: Box::new(parse), func: Box::new(func), scope_may_be_changed, - } - .into(), + }, ); self } diff --git a/src/ast/stmt.rs b/src/ast/stmt.rs index 153ee2ad..e654ebf3 100644 --- a/src/ast/stmt.rs +++ b/src/ast/stmt.rs @@ -194,8 +194,8 @@ impl IntoIterator for RangeCase { #[inline(always)] fn into_iter(self) -> Self::IntoIter { match self { - Self::ExclusiveInt(r, ..) => Box::new(r.into_iter()), - Self::InclusiveInt(r, ..) => Box::new(r.into_iter()), + Self::ExclusiveInt(r, ..) => Box::new(r), + Self::InclusiveInt(r, ..) => Box::new(r), } } } diff --git a/src/eval/expr.rs b/src/eval/expr.rs index c4a0011f..f4929f19 100644 --- a/src/eval/expr.rs +++ b/src/eval/expr.rs @@ -99,7 +99,7 @@ impl Engine { let mut target: Target = value.clone().into(); // Module variables are constant target.set_access_mode(AccessMode::ReadOnly); - return Ok((target.into(), *_var_pos)); + return Ok((target, *_var_pos)); } } diff --git a/src/module/mod.rs b/src/module/mod.rs index c2ebf1b5..18d996c1 100644 --- a/src/module/mod.rs +++ b/src/module/mod.rs @@ -693,7 +693,7 @@ impl Module { #[cfg(feature = "metadata")] comments: Box::default(), }, - func: Into::::into(fn_def).into(), + func: fn_def.into(), param_types: StaticVec::new_const(), } .into(), @@ -1029,7 +1029,7 @@ impl Module { #[cfg(feature = "metadata")] comments: Box::default(), }, - func: func.into(), + func, param_types, } .into(), diff --git a/src/tokenizer.rs b/src/tokenizer.rs index fa1fcad9..eda7720c 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -1485,7 +1485,7 @@ fn get_next_token_inner( let return_comment = return_comment || is_doc_comment(comment.as_ref().expect("`Some`")); if return_comment { - return Some((Token::Comment(comment.expect("`Some`").into()), start_pos)); + return Some((Token::Comment(comment.expect("`Some`")), start_pos)); } if state.comment_level > 0 { // Reached EOF without ending comment block