From d79d63d8aa39198109ccfa8d6cc0d77c9d090eea Mon Sep 17 00:00:00 2001 From: zjb0807 Date: Fri, 1 Aug 2025 10:32:41 +0800 Subject: [PATCH 1/4] update ethereum-types 0.15.1 --- ethabi/Cargo.toml | 4 ++-- ethabi/src/decoder.rs | 10 +++++----- ethabi/src/encoder.rs | 12 ++++++------ ethabi/src/event.rs | 16 +++++++++++++--- ethabi/src/function.rs | 4 ++-- ethabi/src/tests.rs | 16 ++++++++-------- ethabi/src/token/lenient.rs | 8 ++++---- ethabi/src/token/mod.rs | 6 +++--- ethabi/src/token/strict.rs | 10 +++++----- 9 files changed, 48 insertions(+), 38 deletions(-) diff --git a/ethabi/Cargo.toml b/ethabi/Cargo.toml index e52c3a5d7..33b11898f 100644 --- a/ethabi/Cargo.toml +++ b/ethabi/Cargo.toml @@ -17,9 +17,9 @@ hex = { version = "0.4", default-features = false, features = ["alloc"] } serde = { version = "1.0", optional = true, default-features = false, features = ["derive"] } serde_json = { version = "1.0", optional = true } sha3 = { version = "0.10", default-features = false } -ethereum-types = { version = "0.14.0", default-features = false } +ethereum-types = { version = "0.15.1", default-features = false } thiserror = { version = "1", optional = true } -uint = { version = "0.9.0", default-features = false, optional = true } +uint = { version = "0.10.0", default-features = false, optional = true } regex = { version = "1.5.4", optional = true } once_cell = { version = "1.9.0", optional = true } diff --git a/ethabi/src/decoder.rs b/ethabi/src/decoder.rs index cc4414708..8f546d328 100644 --- a/ethabi/src/decoder.rs +++ b/ethabi/src/decoder.rs @@ -10,7 +10,7 @@ #[cfg(not(feature = "std"))] use crate::no_std_prelude::*; -use crate::{Error, ParamType, Token, Word}; +use crate::{Error, Int, ParamType, Token, Uint, Word}; #[derive(Debug)] struct DecodeResult { @@ -131,12 +131,12 @@ fn decode_param(param: &ParamType, data: &[u8], offset: usize, validate: bool) - } ParamType::Int(_) => { let slice = peek_32_bytes(data, offset)?; - let result = DecodeResult { token: Token::Int(slice.into()), new_offset: offset + 32 }; + let result = DecodeResult { token: Token::Int(Int::from_big_endian(&slice)), new_offset: offset + 32 }; Ok(result) } ParamType::Uint(_) => { let slice = peek_32_bytes(data, offset)?; - let result = DecodeResult { token: Token::Uint(slice.into()), new_offset: offset + 32 }; + let result = DecodeResult { token: Token::Uint(Uint::from_big_endian(&slice)), new_offset: offset + 32 }; Ok(result) } ParamType::Bool => { @@ -297,7 +297,7 @@ mod tests { ); let address1 = Token::Address([0x11u8; 20].into()); let address2 = Token::Address([0x22u8; 20].into()); - let uint = Token::Uint([0x11u8; 32].into()); + let uint = Token::Uint(Uint::from_big_endian(&[0x11u8; 32])); let tuple = Token::Tuple(vec![address1, address2, uint]); let expected = vec![tuple]; let decoded = @@ -396,7 +396,7 @@ mod tests { 6761766f66796f726b0000000000000000000000000000000000000000000000 " ); - let uint = Token::Uint([0x11u8; 32].into()); + let uint = Token::Uint(Uint::from_big_endian(&[0x11u8; 32])); let string = Token::String("gavofyork".to_owned()); let address1 = Token::Address([0x11u8; 20].into()); let address2 = Token::Address([0x22u8; 20].into()); diff --git a/ethabi/src/encoder.rs b/ethabi/src/encoder.rs index f6223cd26..b1839ec41 100644 --- a/ethabi/src/encoder.rs +++ b/ethabi/src/encoder.rs @@ -164,8 +164,8 @@ fn encode_token_append(data: &mut Vec, token: &Token) { Token::Bytes(ref bytes) => pad_bytes_append(data, bytes), Token::String(ref s) => pad_bytes_append(data, s.as_bytes()), Token::FixedBytes(ref bytes) => fixed_bytes_append(data, bytes), - Token::Int(int) => data.push(int.into()), - Token::Uint(uint) => data.push(uint.into()), + Token::Int(int) => data.push(int.to_big_endian()), + Token::Uint(uint) => data.push(uint.to_big_endian()), Token::Bool(b) => { let mut value = [0u8; 32]; if b { @@ -183,7 +183,7 @@ mod tests { #[cfg(not(feature = "std"))] use crate::no_std_prelude::*; - use crate::{encode, util::pad_u32, Token}; + use crate::{encode, util::pad_u32, Int, Token, Uint}; #[test] fn encode_address() { @@ -529,7 +529,7 @@ mod tests { fn encode_uint() { let mut uint = [0u8; 32]; uint[31] = 4; - let encoded = encode(&[Token::Uint(uint.into())]); + let encoded = encode(&[Token::Uint(Uint::from_big_endian(&uint))]); let expected = hex!("0000000000000000000000000000000000000000000000000000000000000004"); assert_eq!(encoded, expected); } @@ -538,7 +538,7 @@ mod tests { fn encode_int() { let mut int = [0u8; 32]; int[31] = 4; - let encoded = encode(&[Token::Int(int.into())]); + let encoded = encode(&[Token::Int(Int::from_big_endian(&int))]); let expected = hex!("0000000000000000000000000000000000000000000000000000000000000004"); assert_eq!(encoded, expected); } @@ -730,7 +730,7 @@ mod tests { #[test] fn encode_complex_tuple() { - let uint = Token::Uint([0x11u8; 32].into()); + let uint = Token::Uint(Uint::from_big_endian(&[0x11u8; 32])); let string = Token::String("gavofyork".to_owned()); let address1 = Token::Address([0x11u8; 20].into()); let address2 = Token::Address([0x22u8; 20].into()); diff --git a/ethabi/src/event.rs b/ethabi/src/event.rs index 3df342bf1..61653542d 100644 --- a/ethabi/src/event.rs +++ b/ethabi/src/event.rs @@ -198,7 +198,7 @@ mod tests { log::{Log, RawLog}, signature::long_signature, token::Token, - Event, EventParam, LogParam, ParamType, + Event, EventParam, Int, LogParam, ParamType, }; #[test] @@ -259,8 +259,18 @@ mod tests { result, Log { params: [ - ("a", Token::Int(hex!("0000000000000000000000000000000000000000000000000000000000000003").into()),), - ("b", Token::Int(hex!("0000000000000000000000000000000000000000000000000000000000000002").into()),), + ( + "a", + Token::Int(Int::from_big_endian(&hex!( + "0000000000000000000000000000000000000000000000000000000000000003" + ))), + ), + ( + "b", + Token::Int(Int::from_big_endian(&hex!( + "0000000000000000000000000000000000000000000000000000000000000002" + ))), + ), ("c", Token::Address(hex!("2222222222222222222222222222222222222222").into())), ("d", Token::Address(hex!("1111111111111111111111111111111111111111").into())), ( diff --git a/ethabi/src/function.rs b/ethabi/src/function.rs index 1c6a40ad1..a951420c9 100644 --- a/ethabi/src/function.rs +++ b/ethabi/src/function.rs @@ -103,7 +103,7 @@ mod tests { #[cfg(not(feature = "std"))] use crate::no_std_prelude::*; - use crate::{Function, Param, ParamType, StateMutability, Token}; + use crate::{Function, Param, ParamType, StateMutability, Token, Uint}; #[test] fn test_function_encode_call() { @@ -121,7 +121,7 @@ mod tests { let mut uint = [0u8; 32]; uint[31] = 69; - let encoded = func.encode_input(&[Token::Uint(uint.into()), Token::Bool(true)]).unwrap(); + let encoded = func.encode_input(&[Token::Uint(Uint::from_big_endian(&uint)), Token::Bool(true)]).unwrap(); let expected = hex!("cdcd77c000000000000000000000000000000000000000000000000000000000000000450000000000000000000000000000000000000000000000000000000000000001").to_vec(); assert_eq!(encoded, expected); diff --git a/ethabi/src/tests.rs b/ethabi/src/tests.rs index 73726e548..2638bd7d7 100644 --- a/ethabi/src/tests.rs +++ b/ethabi/src/tests.rs @@ -17,7 +17,7 @@ use serde_json::Value; #[cfg(not(feature = "std"))] use crate::no_std_prelude::*; -use crate::{decode, encode, ParamType, Token}; +use crate::{decode, encode, Int, ParamType, Token, Uint}; #[cfg(feature = "serde")] pub(crate) fn assert_json_eq(left: &str, right: &str) { @@ -133,7 +133,7 @@ test_encode_decode! { test_encode_decode! { name: int, types: [ParamType::Int(32)], - tokens: [Token::Int([0x11u8; 32].into())], + tokens: [Token::Int(Int::from_big_endian(&[0x11u8; 32]))], data: "1111111111111111111111111111111111111111111111111111111111111111" } test_encode_decode! { @@ -142,7 +142,7 @@ test_encode_decode! { tokens: { let mut int = [0u8; 32]; int[31] = 4; - [Token::Int(int.into())] + [Token::Int(Int::from_big_endian(&int))] }, data: "0000000000000000000000000000000000000000000000000000000000000004" } @@ -151,7 +151,7 @@ test_encode_decode! { test_encode_decode! { name: uint, types: [ParamType::Uint(32)], - tokens: [Token::Uint([0x11u8; 32].into())], + tokens: [Token::Uint(Uint::from_big_endian(&[0x11u8; 32]))], data: "1111111111111111111111111111111111111111111111111111111111111111" } test_encode_decode! { @@ -160,7 +160,7 @@ test_encode_decode! { tokens: { let mut uint = [0u8; 32]; uint[31] = 4; - [Token::Uint(uint.into())] + [Token::Uint(Uint::from_big_endian(&uint))] }, data: "0000000000000000000000000000000000000000000000000000000000000004" } @@ -578,15 +578,15 @@ test_encode_decode! { Token::Array(vec![ Token::Tuple(vec![ Token::Address([0x11u8; 20].into()), - Token::Uint([0x11u8; 32].into()), + Token::Uint(Uint::from_big_endian(&[0x11u8; 32])), ]), Token::Tuple(vec![ Token::Address([0x22u8; 20].into()), - Token::Uint([0x22u8; 32].into()), + Token::Uint(Uint::from_big_endian(&[0x22u8; 32])), ]), Token::Tuple(vec![ Token::Address([0x33u8; 20].into()), - Token::Uint([0x44u8; 32].into()), + Token::Uint(Uint::from_big_endian(&[0x44u8; 32])), ]) ]) ] diff --git a/ethabi/src/token/lenient.rs b/ethabi/src/token/lenient.rs index 11c2f97f9..5de210a54 100644 --- a/ethabi/src/token/lenient.rs +++ b/ethabi/src/token/lenient.rs @@ -90,7 +90,7 @@ impl Tokenizer for LenientTokenizer { } }; - Ok(uint.into()) + Ok(uint.to_big_endian()) } // We don't have a proper signed int 256-bit long type, so here we're cheating. We build a U256 @@ -106,7 +106,7 @@ impl Tokenizer for LenientTokenizer { let max = Uint::max_value() / 2; let int = if value.starts_with('-') { if abs.is_zero() { - return Ok(abs.into()); + return Ok(abs.to_big_endian()); } else if abs > max + 1 { return Err(Error::Other(Cow::Borrowed("int256 parse error: Underflow"))); } @@ -117,7 +117,7 @@ impl Tokenizer for LenientTokenizer { } abs }; - Ok(int.into()) + Ok(int.to_big_endian()) } } @@ -139,7 +139,7 @@ mod tests { "1111111111111111111111111111111111111111111111111111111111111111" ) .unwrap(), - Token::Uint([0x11u8; 32].into()) + Token::Uint(Uint::from_big_endian(&[0x11u8; 32])) ); } diff --git a/ethabi/src/token/mod.rs b/ethabi/src/token/mod.rs index 3df155fa4..bcf44cf48 100644 --- a/ethabi/src/token/mod.rs +++ b/ethabi/src/token/mod.rs @@ -27,7 +27,7 @@ use crate::no_std_prelude::*; use core::cmp::Ordering::{Equal, Less}; #[cfg(feature = "serde")] -use crate::{Error, ParamType}; +use crate::{Error, Int, ParamType, Uint}; /// This trait should be used to parse string values as tokens. #[cfg(feature = "serde")] @@ -44,8 +44,8 @@ pub trait Tokenizer { ParamType::FixedBytes(len) => { Self::tokenize_fixed_bytes(value.strip_prefix("0x").unwrap_or(value), len).map(Token::FixedBytes) } - ParamType::Uint(_) => Self::tokenize_uint(value).map(Into::into).map(Token::Uint), - ParamType::Int(_) => Self::tokenize_int(value).map(Into::into).map(Token::Int), + ParamType::Uint(_) => Self::tokenize_uint(value).map(|v| Uint::from_big_endian(&v)).map(Token::Uint), + ParamType::Int(_) => Self::tokenize_int(value).map(|v| Int::from_big_endian(&v)).map(Token::Int), ParamType::Array(ref p) => Self::tokenize_array(value, p).map(Token::Array), ParamType::FixedArray(ref p, len) => Self::tokenize_fixed_array(value, p, len).map(Token::FixedArray), ParamType::Tuple(ref p) => Self::tokenize_struct(value, p).map(Token::Tuple), diff --git a/ethabi/src/token/strict.rs b/ethabi/src/token/strict.rs index 6dfd7a4bd..56ad32456 100644 --- a/ethabi/src/token/strict.rs +++ b/ethabi/src/token/strict.rs @@ -71,7 +71,7 @@ impl Tokenizer for StrictTokenizer { mod tests { use crate::{ token::{StrictTokenizer, Token, Tokenizer}, - ParamType, + Int, ParamType, Uint, }; #[test] @@ -132,7 +132,7 @@ mod tests { "1111111111111111111111111111111111111111111111111111111111111111" ) .unwrap(), - Token::Uint([0x11u8; 32].into()) + Token::Uint(Uint::from_big_endian(&[0x11u8; 32])) ); assert_eq!( @@ -141,7 +141,7 @@ mod tests { "2222222222222222222222222222222222222222222222222222222222222222" ) .unwrap(), - Token::Uint([0x22u8; 32].into()) + Token::Uint(Uint::from_big_endian(&[0x22u8; 32])) ); } @@ -153,7 +153,7 @@ mod tests { "1111111111111111111111111111111111111111111111111111111111111111" ) .unwrap(), - Token::Int([0x11u8; 32].into()) + Token::Int(Int::from_big_endian(&[0x11u8; 32])) ); assert_eq!( @@ -162,7 +162,7 @@ mod tests { "2222222222222222222222222222222222222222222222222222222222222222" ) .unwrap(), - Token::Int([0x22u8; 32].into()) + Token::Int(Int::from_big_endian(&[0x22u8; 32])) ); } From 02a38c209428eee7e0e1c5b40dfc922c6148908f Mon Sep 17 00:00:00 2001 From: zjb0807 Date: Fri, 1 Aug 2025 10:47:54 +0800 Subject: [PATCH 2/4] fix clippy --- derive/src/lib.rs | 2 +- ethabi/src/decoder.rs | 2 +- ethabi/src/error.rs | 2 +- ethabi/src/event.rs | 10 +++++----- ethabi/src/function.rs | 2 +- ethabi/src/param_type/deserialize.rs | 2 +- tests/src/lib.rs | 4 ++-- 7 files changed, 12 insertions(+), 12 deletions(-) diff --git a/derive/src/lib.rs b/derive/src/lib.rs index 3f4d59469..6f9359925 100644 --- a/derive/src/lib.rs +++ b/derive/src/lib.rs @@ -106,7 +106,7 @@ fn to_syntax_string(param_type: ðabi::ParamType) -> proc_macro2::TokenStream } } -fn to_ethabi_param_vec<'a, P: 'a>(params: P) -> proc_macro2::TokenStream +fn to_ethabi_param_vec<'a, P>(params: P) -> proc_macro2::TokenStream where P: IntoIterator, { diff --git a/ethabi/src/decoder.rs b/ethabi/src/decoder.rs index 8f546d328..7e95d2c4d 100644 --- a/ethabi/src/decoder.rs +++ b/ethabi/src/decoder.rs @@ -93,7 +93,7 @@ fn peek_32_bytes(data: &[u8], offset: usize) -> Result { } fn round_up_nearest_multiple(value: usize, padding: usize) -> usize { - (value + padding - 1) / padding * padding + value.div_ceil(padding) * padding } fn take_bytes(data: &[u8], offset: usize, len: usize, validate: bool) -> Result, Error> { diff --git a/ethabi/src/error.rs b/ethabi/src/error.rs index fa1b2f56f..bffd79534 100644 --- a/ethabi/src/error.rs +++ b/ethabi/src/error.rs @@ -51,7 +51,7 @@ impl Error { let signed = short_signature(&self.name, ¶ms).to_vec(); let encoded = encode(tokens); - Ok(signed.into_iter().chain(encoded.into_iter()).collect()) + Ok(signed.into_iter().chain(encoded).collect()) } /// Parses the ABI function input to a list of tokens. diff --git a/ethabi/src/event.rs b/ethabi/src/event.rs index 61653542d..f9961b6f8 100644 --- a/ethabi/src/event.rs +++ b/ethabi/src/event.rs @@ -92,7 +92,7 @@ impl Event { let kinds: Vec<_> = self.indexed_params(true).into_iter().map(|param| param.kind).collect(); let result = if self.anonymous { TopicFilter { - topic0: convert_topic(raw.topic0, kinds.get(0))?, + topic0: convert_topic(raw.topic0, kinds.first())?, topic1: convert_topic(raw.topic1, kinds.get(1))?, topic2: convert_topic(raw.topic2, kinds.get(2))?, topic3: Topic::Any, @@ -100,7 +100,7 @@ impl Event { } else { TopicFilter { topic0: Topic::This(self.signature()), - topic1: convert_topic(raw.topic0, kinds.get(0))?, + topic1: convert_topic(raw.topic0, kinds.first())?, topic2: convert_topic(raw.topic1, kinds.get(1))?, topic3: convert_topic(raw.topic2, kinds.get(2))?, } @@ -136,7 +136,7 @@ impl Event { 0 } else { // verify - let event_signature = topics.get(0).ok_or(Error::InvalidData)?; + let event_signature = topics.first().ok_or(Error::InvalidData)?; if event_signature != &self.signature() { return Err(Error::InvalidData); } @@ -155,13 +155,13 @@ impl Event { return Err(Error::InvalidData); } - let topics_named_tokens = topic_params.into_iter().map(|p| p.name).zip(topic_tokens.into_iter()); + let topics_named_tokens = topic_params.into_iter().map(|p| p.name).zip(topic_tokens); let data_types = data_params.iter().map(|p| p.kind.clone()).collect::>(); let data_tokens = decode(&data_types, &data)?; - let data_named_tokens = data_params.into_iter().map(|p| p.name).zip(data_tokens.into_iter()); + let data_named_tokens = data_params.into_iter().map(|p| p.name).zip(data_tokens); let named_tokens = topics_named_tokens.chain(data_named_tokens).collect::>(); diff --git a/ethabi/src/function.rs b/ethabi/src/function.rs index a951420c9..0214fe2ef 100644 --- a/ethabi/src/function.rs +++ b/ethabi/src/function.rs @@ -59,7 +59,7 @@ impl Function { let signed = short_signature(&self.name, ¶ms).to_vec(); let encoded = encode(tokens); - Ok(signed.into_iter().chain(encoded.into_iter()).collect()) + Ok(signed.into_iter().chain(encoded).collect()) } /// Return the 4 byte short signature of this function. diff --git a/ethabi/src/param_type/deserialize.rs b/ethabi/src/param_type/deserialize.rs index 9e6fd3e03..1bf1ebed5 100644 --- a/ethabi/src/param_type/deserialize.rs +++ b/ethabi/src/param_type/deserialize.rs @@ -25,7 +25,7 @@ impl<'a> Deserialize<'a> for ParamType { struct ParamTypeVisitor; -impl<'a> Visitor<'a> for ParamTypeVisitor { +impl Visitor<'_> for ParamTypeVisitor { type Value = ParamType; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { diff --git a/tests/src/lib.rs b/tests/src/lib.rs index 34170bcef..7bd4802e8 100644 --- a/tests/src/lib.rs +++ b/tests/src/lib.rs @@ -35,7 +35,7 @@ mod tests { let second = [0x22u8; 20]; let encoded_from_vec = functions::set_validators::encode_input(vec![first, second]); - let encoded_from_vec_iter = functions::set_validators::encode_input(vec![first, second].into_iter()); + let encoded_from_vec_iter = functions::set_validators::encode_input(vec![first, second]); let encoded_from_vec_wrapped = functions::set_validators::encode_input(vec![Wrapper(first), Wrapper(second)]); let expected = "9300c9260000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000011111111111111111111111111111111111111110000000000000000000000002222222222222222222222222222222222222222".to_owned(); @@ -70,7 +70,7 @@ mod tests { let second = [0x22u8; 20]; let encoded_from_vec = constructor(code.clone(), vec![first, second]); - let encoded_from_vec_iter = constructor(code.clone(), vec![first, second].into_iter()); + let encoded_from_vec_iter = constructor(code.clone(), vec![first, second]); let encoded_from_vec_wrapped = constructor(code, vec![Wrapper(first), Wrapper(second)]); let expected = "0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000011111111111111111111111111111111111111110000000000000000000000002222222222222222222222222222222222222222".to_owned(); From 2312b66a525e228183fd0649af28269bf0b11d51 Mon Sep 17 00:00:00 2001 From: zjb0807 Date: Fri, 1 Aug 2025 10:58:40 +0800 Subject: [PATCH 3/4] fix clippy --- ethabi/src/encoder.rs | 8 ++++---- ethabi/src/token/mod.rs | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/ethabi/src/encoder.rs b/ethabi/src/encoder.rs index b1839ec41..cbd1b8a22 100644 --- a/ethabi/src/encoder.rs +++ b/ethabi/src/encoder.rs @@ -14,7 +14,7 @@ use crate::{util::pad_u32, Bytes, Token, Word}; fn pad_bytes_len(bytes: &[u8]) -> u32 { // "+ 1" because len is also appended - ((bytes.len() + 31) / 32) as u32 + 1 + bytes.len().div_ceil(32) as u32 + 1 } fn pad_bytes_append(data: &mut Vec, bytes: &[u8]) { @@ -23,11 +23,11 @@ fn pad_bytes_append(data: &mut Vec, bytes: &[u8]) { } fn fixed_bytes_len(bytes: &[u8]) -> u32 { - ((bytes.len() + 31) / 32) as u32 + bytes.len().div_ceil(32) as u32 } fn fixed_bytes_append(result: &mut Vec, bytes: &[u8]) { - let len = (bytes.len() + 31) / 32; + let len = bytes.len().div_ceil(32); for i in 0..len { let mut padded = [0u8; 32]; @@ -173,7 +173,7 @@ fn encode_token_append(data: &mut Vec, token: &Token) { } data.push(value); } - _ => panic!("Unhandled nested token: {:?}", token), + _ => panic!("Unhandled nested token: {token:?}"), }; } diff --git a/ethabi/src/token/mod.rs b/ethabi/src/token/mod.rs index bcf44cf48..d4df12a19 100644 --- a/ethabi/src/token/mod.rs +++ b/ethabi/src/token/mod.rs @@ -81,7 +81,7 @@ pub trait Tokenizer { let mut last_is_array = false; let mut params = param.iter(); - for (pos, ch) in value.chars().enumerate() { + for (pos, ch) in value.char_indices() { match ch { '[' if !ignore => { if array_nested == 0 { @@ -179,7 +179,7 @@ pub trait Tokenizer { let mut tuple_nested = 0isize; let mut tuple_item_start = 1; let mut last_is_tuple = false; - for (i, ch) in value.chars().enumerate() { + for (i, ch) in value.char_indices() { match ch { '(' if !ignore => { if tuple_nested == 0 { From c6b79ef2ae9892aa49dc5fff461fef7aba1a5153 Mon Sep 17 00:00:00 2001 From: zjb0807 Date: Fri, 1 Aug 2025 11:02:53 +0800 Subject: [PATCH 4/4] fix clippy --- ethabi/src/operation.rs | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/ethabi/src/operation.rs b/ethabi/src/operation.rs index 4e8a9d4ac..a970af3dd 100644 --- a/ethabi/src/operation.rs +++ b/ethabi/src/operation.rs @@ -143,10 +143,9 @@ mod tests { "name":"a", "type":"address" }}], - "name":"{}", + "name":"{name}", "outputs": [] - }}"#, - name + }}"# ); let deserialized: Operation = serde_json::from_str(&s).unwrap(); @@ -177,11 +176,10 @@ mod tests { "type":"address", "indexed":true }}], - "name":"{}", + "name":"{name}", "outputs": [], "anonymous": false - }}"#, - name + }}"# ); let deserialized: Operation = serde_json::from_str(&s).unwrap();