Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion derive/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ fn to_syntax_string(param_type: &ethabi::ParamType) -> proc_macro2::TokenStream
}
}

fn to_ethabi_param_vec<'a, P: 'a>(params: P) -> proc_macro2::TokenStream
fn to_ethabi_param_vec<'a, P>(params: P) -> proc_macro2::TokenStream
where
P: IntoIterator<Item = &'a Param>,
{
Expand Down
4 changes: 2 additions & 2 deletions ethabi/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@ hex = { version = "0.4", default-features = false, features = ["alloc"] }
serde = { version = "1.0", optional = true, default-features = false, features = ["derive"] }
serde_json = { version = "1.0", optional = true }
sha3 = { version = "0.10", default-features = false }
ethereum-types = { version = "0.14.0", default-features = false }
ethereum-types = { version = "0.15.1", default-features = false }
thiserror = { version = "1", optional = true }
uint = { version = "0.9.0", default-features = false, optional = true }
uint = { version = "0.10.0", default-features = false, optional = true }
regex = { version = "1.5.4", optional = true }
once_cell = { version = "1.9.0", optional = true }

Expand Down
12 changes: 6 additions & 6 deletions ethabi/src/decoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

#[cfg(not(feature = "std"))]
use crate::no_std_prelude::*;
use crate::{Error, ParamType, Token, Word};
use crate::{Error, Int, ParamType, Token, Uint, Word};

#[derive(Debug)]
struct DecodeResult {
Expand Down Expand Up @@ -93,7 +93,7 @@ fn peek_32_bytes(data: &[u8], offset: usize) -> Result<Word, Error> {
}

fn round_up_nearest_multiple(value: usize, padding: usize) -> usize {
(value + padding - 1) / padding * padding
value.div_ceil(padding) * padding
}

fn take_bytes(data: &[u8], offset: usize, len: usize, validate: bool) -> Result<Vec<u8>, Error> {
Expand Down Expand Up @@ -131,12 +131,12 @@ fn decode_param(param: &ParamType, data: &[u8], offset: usize, validate: bool) -
}
ParamType::Int(_) => {
let slice = peek_32_bytes(data, offset)?;
let result = DecodeResult { token: Token::Int(slice.into()), new_offset: offset + 32 };
let result = DecodeResult { token: Token::Int(Int::from_big_endian(&slice)), new_offset: offset + 32 };
Ok(result)
}
ParamType::Uint(_) => {
let slice = peek_32_bytes(data, offset)?;
let result = DecodeResult { token: Token::Uint(slice.into()), new_offset: offset + 32 };
let result = DecodeResult { token: Token::Uint(Uint::from_big_endian(&slice)), new_offset: offset + 32 };
Ok(result)
}
ParamType::Bool => {
Expand Down Expand Up @@ -297,7 +297,7 @@ mod tests {
);
let address1 = Token::Address([0x11u8; 20].into());
let address2 = Token::Address([0x22u8; 20].into());
let uint = Token::Uint([0x11u8; 32].into());
let uint = Token::Uint(Uint::from_big_endian(&[0x11u8; 32]));
let tuple = Token::Tuple(vec![address1, address2, uint]);
let expected = vec![tuple];
let decoded =
Expand Down Expand Up @@ -396,7 +396,7 @@ mod tests {
6761766f66796f726b0000000000000000000000000000000000000000000000
"
);
let uint = Token::Uint([0x11u8; 32].into());
let uint = Token::Uint(Uint::from_big_endian(&[0x11u8; 32]));
let string = Token::String("gavofyork".to_owned());
let address1 = Token::Address([0x11u8; 20].into());
let address2 = Token::Address([0x22u8; 20].into());
Expand Down
20 changes: 10 additions & 10 deletions ethabi/src/encoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ use crate::{util::pad_u32, Bytes, Token, Word};

fn pad_bytes_len(bytes: &[u8]) -> u32 {
// "+ 1" because len is also appended
((bytes.len() + 31) / 32) as u32 + 1
bytes.len().div_ceil(32) as u32 + 1
}

fn pad_bytes_append(data: &mut Vec<Word>, bytes: &[u8]) {
Expand All @@ -23,11 +23,11 @@ fn pad_bytes_append(data: &mut Vec<Word>, bytes: &[u8]) {
}

fn fixed_bytes_len(bytes: &[u8]) -> u32 {
((bytes.len() + 31) / 32) as u32
bytes.len().div_ceil(32) as u32
}

fn fixed_bytes_append(result: &mut Vec<Word>, bytes: &[u8]) {
let len = (bytes.len() + 31) / 32;
let len = bytes.len().div_ceil(32);
for i in 0..len {
let mut padded = [0u8; 32];

Expand Down Expand Up @@ -164,16 +164,16 @@ fn encode_token_append(data: &mut Vec<Word>, token: &Token) {
Token::Bytes(ref bytes) => pad_bytes_append(data, bytes),
Token::String(ref s) => pad_bytes_append(data, s.as_bytes()),
Token::FixedBytes(ref bytes) => fixed_bytes_append(data, bytes),
Token::Int(int) => data.push(int.into()),
Token::Uint(uint) => data.push(uint.into()),
Token::Int(int) => data.push(int.to_big_endian()),
Token::Uint(uint) => data.push(uint.to_big_endian()),
Token::Bool(b) => {
let mut value = [0u8; 32];
if b {
value[31] = 1;
}
data.push(value);
}
_ => panic!("Unhandled nested token: {:?}", token),
_ => panic!("Unhandled nested token: {token:?}"),
};
}

Expand All @@ -183,7 +183,7 @@ mod tests {

#[cfg(not(feature = "std"))]
use crate::no_std_prelude::*;
use crate::{encode, util::pad_u32, Token};
use crate::{encode, util::pad_u32, Int, Token, Uint};

#[test]
fn encode_address() {
Expand Down Expand Up @@ -529,7 +529,7 @@ mod tests {
fn encode_uint() {
let mut uint = [0u8; 32];
uint[31] = 4;
let encoded = encode(&[Token::Uint(uint.into())]);
let encoded = encode(&[Token::Uint(Uint::from_big_endian(&uint))]);
let expected = hex!("0000000000000000000000000000000000000000000000000000000000000004");
assert_eq!(encoded, expected);
}
Expand All @@ -538,7 +538,7 @@ mod tests {
fn encode_int() {
let mut int = [0u8; 32];
int[31] = 4;
let encoded = encode(&[Token::Int(int.into())]);
let encoded = encode(&[Token::Int(Int::from_big_endian(&int))]);
let expected = hex!("0000000000000000000000000000000000000000000000000000000000000004");
assert_eq!(encoded, expected);
}
Expand Down Expand Up @@ -730,7 +730,7 @@ mod tests {

#[test]
fn encode_complex_tuple() {
let uint = Token::Uint([0x11u8; 32].into());
let uint = Token::Uint(Uint::from_big_endian(&[0x11u8; 32]));
let string = Token::String("gavofyork".to_owned());
let address1 = Token::Address([0x11u8; 20].into());
let address2 = Token::Address([0x22u8; 20].into());
Expand Down
2 changes: 1 addition & 1 deletion ethabi/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ impl Error {

let signed = short_signature(&self.name, &params).to_vec();
let encoded = encode(tokens);
Ok(signed.into_iter().chain(encoded.into_iter()).collect())
Ok(signed.into_iter().chain(encoded).collect())
}

/// Parses the ABI function input to a list of tokens.
Expand Down
26 changes: 18 additions & 8 deletions ethabi/src/event.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,15 +92,15 @@ impl Event {
let kinds: Vec<_> = self.indexed_params(true).into_iter().map(|param| param.kind).collect();
let result = if self.anonymous {
TopicFilter {
topic0: convert_topic(raw.topic0, kinds.get(0))?,
topic0: convert_topic(raw.topic0, kinds.first())?,
topic1: convert_topic(raw.topic1, kinds.get(1))?,
topic2: convert_topic(raw.topic2, kinds.get(2))?,
topic3: Topic::Any,
}
} else {
TopicFilter {
topic0: Topic::This(self.signature()),
topic1: convert_topic(raw.topic0, kinds.get(0))?,
topic1: convert_topic(raw.topic0, kinds.first())?,
topic2: convert_topic(raw.topic1, kinds.get(1))?,
topic3: convert_topic(raw.topic2, kinds.get(2))?,
}
Expand Down Expand Up @@ -136,7 +136,7 @@ impl Event {
0
} else {
// verify
let event_signature = topics.get(0).ok_or(Error::InvalidData)?;
let event_signature = topics.first().ok_or(Error::InvalidData)?;
if event_signature != &self.signature() {
return Err(Error::InvalidData);
}
Expand All @@ -155,13 +155,13 @@ impl Event {
return Err(Error::InvalidData);
}

let topics_named_tokens = topic_params.into_iter().map(|p| p.name).zip(topic_tokens.into_iter());
let topics_named_tokens = topic_params.into_iter().map(|p| p.name).zip(topic_tokens);

let data_types = data_params.iter().map(|p| p.kind.clone()).collect::<Vec<ParamType>>();

let data_tokens = decode(&data_types, &data)?;

let data_named_tokens = data_params.into_iter().map(|p| p.name).zip(data_tokens.into_iter());
let data_named_tokens = data_params.into_iter().map(|p| p.name).zip(data_tokens);

let named_tokens = topics_named_tokens.chain(data_named_tokens).collect::<BTreeMap<String, Token>>();

Expand Down Expand Up @@ -198,7 +198,7 @@ mod tests {
log::{Log, RawLog},
signature::long_signature,
token::Token,
Event, EventParam, LogParam, ParamType,
Event, EventParam, Int, LogParam, ParamType,
};

#[test]
Expand Down Expand Up @@ -259,8 +259,18 @@ mod tests {
result,
Log {
params: [
("a", Token::Int(hex!("0000000000000000000000000000000000000000000000000000000000000003").into()),),
("b", Token::Int(hex!("0000000000000000000000000000000000000000000000000000000000000002").into()),),
(
"a",
Token::Int(Int::from_big_endian(&hex!(
"0000000000000000000000000000000000000000000000000000000000000003"
))),
),
(
"b",
Token::Int(Int::from_big_endian(&hex!(
"0000000000000000000000000000000000000000000000000000000000000002"
))),
),
("c", Token::Address(hex!("2222222222222222222222222222222222222222").into())),
("d", Token::Address(hex!("1111111111111111111111111111111111111111").into())),
(
Expand Down
6 changes: 3 additions & 3 deletions ethabi/src/function.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ impl Function {

let signed = short_signature(&self.name, &params).to_vec();
let encoded = encode(tokens);
Ok(signed.into_iter().chain(encoded.into_iter()).collect())
Ok(signed.into_iter().chain(encoded).collect())
}

/// Return the 4 byte short signature of this function.
Expand Down Expand Up @@ -103,7 +103,7 @@ mod tests {

#[cfg(not(feature = "std"))]
use crate::no_std_prelude::*;
use crate::{Function, Param, ParamType, StateMutability, Token};
use crate::{Function, Param, ParamType, StateMutability, Token, Uint};

#[test]
fn test_function_encode_call() {
Expand All @@ -121,7 +121,7 @@ mod tests {

let mut uint = [0u8; 32];
uint[31] = 69;
let encoded = func.encode_input(&[Token::Uint(uint.into()), Token::Bool(true)]).unwrap();
let encoded = func.encode_input(&[Token::Uint(Uint::from_big_endian(&uint)), Token::Bool(true)]).unwrap();
let expected = hex!("cdcd77c000000000000000000000000000000000000000000000000000000000000000450000000000000000000000000000000000000000000000000000000000000001").to_vec();
assert_eq!(encoded, expected);

Expand Down
10 changes: 4 additions & 6 deletions ethabi/src/operation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -143,10 +143,9 @@ mod tests {
"name":"a",
"type":"address"
}}],
"name":"{}",
"name":"{name}",
"outputs": []
}}"#,
name
}}"#
);

let deserialized: Operation = serde_json::from_str(&s).unwrap();
Expand Down Expand Up @@ -177,11 +176,10 @@ mod tests {
"type":"address",
"indexed":true
}}],
"name":"{}",
"name":"{name}",
"outputs": [],
"anonymous": false
}}"#,
name
}}"#
);

let deserialized: Operation = serde_json::from_str(&s).unwrap();
Expand Down
2 changes: 1 addition & 1 deletion ethabi/src/param_type/deserialize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ impl<'a> Deserialize<'a> for ParamType {

struct ParamTypeVisitor;

impl<'a> Visitor<'a> for ParamTypeVisitor {
impl Visitor<'_> for ParamTypeVisitor {
type Value = ParamType;

fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
Expand Down
16 changes: 8 additions & 8 deletions ethabi/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ use serde_json::Value;

#[cfg(not(feature = "std"))]
use crate::no_std_prelude::*;
use crate::{decode, encode, ParamType, Token};
use crate::{decode, encode, Int, ParamType, Token, Uint};

#[cfg(feature = "serde")]
pub(crate) fn assert_json_eq(left: &str, right: &str) {
Expand Down Expand Up @@ -133,7 +133,7 @@ test_encode_decode! {
test_encode_decode! {
name: int,
types: [ParamType::Int(32)],
tokens: [Token::Int([0x11u8; 32].into())],
tokens: [Token::Int(Int::from_big_endian(&[0x11u8; 32]))],
data: "1111111111111111111111111111111111111111111111111111111111111111"
}
test_encode_decode! {
Expand All @@ -142,7 +142,7 @@ test_encode_decode! {
tokens: {
let mut int = [0u8; 32];
int[31] = 4;
[Token::Int(int.into())]
[Token::Int(Int::from_big_endian(&int))]
},
data: "0000000000000000000000000000000000000000000000000000000000000004"
}
Expand All @@ -151,7 +151,7 @@ test_encode_decode! {
test_encode_decode! {
name: uint,
types: [ParamType::Uint(32)],
tokens: [Token::Uint([0x11u8; 32].into())],
tokens: [Token::Uint(Uint::from_big_endian(&[0x11u8; 32]))],
data: "1111111111111111111111111111111111111111111111111111111111111111"
}
test_encode_decode! {
Expand All @@ -160,7 +160,7 @@ test_encode_decode! {
tokens: {
let mut uint = [0u8; 32];
uint[31] = 4;
[Token::Uint(uint.into())]
[Token::Uint(Uint::from_big_endian(&uint))]
},
data: "0000000000000000000000000000000000000000000000000000000000000004"
}
Expand Down Expand Up @@ -578,15 +578,15 @@ test_encode_decode! {
Token::Array(vec![
Token::Tuple(vec![
Token::Address([0x11u8; 20].into()),
Token::Uint([0x11u8; 32].into()),
Token::Uint(Uint::from_big_endian(&[0x11u8; 32])),
]),
Token::Tuple(vec![
Token::Address([0x22u8; 20].into()),
Token::Uint([0x22u8; 32].into()),
Token::Uint(Uint::from_big_endian(&[0x22u8; 32])),
]),
Token::Tuple(vec![
Token::Address([0x33u8; 20].into()),
Token::Uint([0x44u8; 32].into()),
Token::Uint(Uint::from_big_endian(&[0x44u8; 32])),
])
])
]
Expand Down
8 changes: 4 additions & 4 deletions ethabi/src/token/lenient.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ impl Tokenizer for LenientTokenizer {
}
};

Ok(uint.into())
Ok(uint.to_big_endian())
}

// We don't have a proper signed int 256-bit long type, so here we're cheating. We build a U256
Expand All @@ -106,7 +106,7 @@ impl Tokenizer for LenientTokenizer {
let max = Uint::max_value() / 2;
let int = if value.starts_with('-') {
if abs.is_zero() {
return Ok(abs.into());
return Ok(abs.to_big_endian());
} else if abs > max + 1 {
return Err(Error::Other(Cow::Borrowed("int256 parse error: Underflow")));
}
Expand All @@ -117,7 +117,7 @@ impl Tokenizer for LenientTokenizer {
}
abs
};
Ok(int.into())
Ok(int.to_big_endian())
}
}

Expand All @@ -139,7 +139,7 @@ mod tests {
"1111111111111111111111111111111111111111111111111111111111111111"
)
.unwrap(),
Token::Uint([0x11u8; 32].into())
Token::Uint(Uint::from_big_endian(&[0x11u8; 32]))
);
}

Expand Down
Loading
Loading