Skip to content

Commit

Permalink
Bump parity-common (#11)
Browse files Browse the repository at this point in the history
* Test parity-common master

* ump parity-common crates to upcoming version
  • Loading branch information
pgherveou authored Sep 11, 2024
1 parent 5c01c8a commit a237569
Show file tree
Hide file tree
Showing 4 changed files with 24 additions and 24 deletions.
6 changes: 3 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "ethabi-decode"
version = "1.0.0"
version = "1.1.0"
authors = ["Vincent Geddes <vincent@snowfork.com"]
edition = "2021"
license = "Apache-2.0"
Expand All @@ -11,12 +11,12 @@ repository = "https://github.com/Snowfork/ethabi-decode.git"

[dependencies]
tiny-keccak = { version = "2.0.2", features = ["keccak"] }
ethereum-types = { version = "0.14.1", default-features = false }
ethereum-types = { version = "0.15.0", default-features = false }

[dev-dependencies]
hex = { version = "2.0", package = "rustc-hex" }
hex-literal = "0.4.0"
uint = { version = "0.9.5", default-features = false }
uint = { version = "0.10.0", default-features = false }
paste = "1.0.12"

[features]
Expand Down
12 changes: 6 additions & 6 deletions src/decoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

//! ABI decoder.

use crate::{util::slice_data, Error, ParamKind, Token, Word};
use crate::{util::slice_data, Error, ParamKind, Token, Word, U256};

use crate::std::Vec;

Expand Down Expand Up @@ -93,14 +93,14 @@ fn decode_param(param: &ParamKind, slices: &[Word], offset: usize) -> Result<Dec
ParamKind::Int(_) => {
let slice = peek(slices, offset)?;

let result = DecodeResult { token: Token::Int(slice.clone().into()), new_offset: offset + 1 };
let result = DecodeResult { token: Token::Int(U256::from_big_endian(slice)), new_offset: offset + 1 };

Ok(result)
}
ParamKind::Uint(_) => {
let slice = peek(slices, offset)?;

let result = DecodeResult { token: Token::Uint(slice.clone().into()), new_offset: offset + 1 };
let result = DecodeResult { token: Token::Uint(U256::from_big_endian(slice)), new_offset: offset + 1 };

Ok(result)
}
Expand Down Expand Up @@ -221,7 +221,7 @@ fn decode_param(param: &ParamKind, slices: &[Word], offset: usize) -> Result<Dec
#[cfg(test)]
mod tests {

use crate::{decode, ParamKind, Token};
use crate::{decode, ParamKind, Token, U256};
use hex_literal::hex;

#[test]
Expand Down Expand Up @@ -255,7 +255,7 @@ mod tests {
);
let address1 = Token::Address([0x11u8; 20].into());
let address2 = Token::Address([0x22u8; 20].into());
let uint = Token::Uint([0x11u8; 32].into());
let uint = Token::Uint(U256::from_big_endian(&[0x11u8; 32]));
let tuple = Token::Tuple(vec![address1, address2, uint]);
let expected = vec![tuple];
let decoded = decode(
Expand Down Expand Up @@ -362,7 +362,7 @@ mod tests {
6761766f66796f726b0000000000000000000000000000000000000000000000
"
);
let uint = Token::Uint([0x11u8; 32].into());
let uint = Token::Uint(U256::from_big_endian(&[0x11u8; 32]));
let string = Token::String("gavofyork".as_bytes().to_vec());
let address1 = Token::Address([0x11u8; 20].into());
let address2 = Token::Address([0x22u8; 20].into());
Expand Down
20 changes: 10 additions & 10 deletions src/encoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@

//! ABI encoder.

use crate::std::{vec, Vec};
use crate::{util::pad_u32, Token, Word};
use tiny_keccak::{Hasher, Keccak};
use crate::std::{vec, Vec};

fn pad_bytes(bytes: &[u8]) -> Vec<Word> {
let mut result = vec![pad_u32(bytes.len() as u32)];
Expand Down Expand Up @@ -138,7 +138,6 @@ pub fn encode_function(signature: &str, inputs: &[Token]) -> Vec<u8> {
signed.to_vec().into_iter().chain(encoded.into_iter()).collect()
}


fn encode_token(token: &Token) -> Mediate {
match *token {
Token::Address(ref address) => {
Expand All @@ -149,8 +148,8 @@ fn encode_token(token: &Token) -> Mediate {
Token::Bytes(ref bytes) => Mediate::Prefixed(pad_bytes(bytes)),
Token::String(ref s) => Mediate::Prefixed(pad_bytes(s.as_ref())),
Token::FixedBytes(ref bytes) => Mediate::Raw(pad_fixed_bytes(bytes)),
Token::Int(int) => Mediate::Raw(vec![int.into()]),
Token::Uint(uint) => Mediate::Raw(vec![uint.into()]),
Token::Int(int) => Mediate::Raw(vec![int.to_big_endian()]),
Token::Uint(uint) => Mediate::Raw(vec![uint.to_big_endian()]),
Token::Bool(b) => {
let mut value = [0u8; 32];
if b {
Expand Down Expand Up @@ -187,7 +186,7 @@ fn encode_token(token: &Token) -> Mediate {

#[cfg(test)]
mod tests {
use crate::{encode, encode_function, util::pad_u32, Token};
use crate::{encode, encode_function, util::pad_u32, Token, U256};
use hex_literal::hex;

#[test]
Expand Down Expand Up @@ -505,7 +504,7 @@ mod tests {
fn encode_uint() {
let mut uint = [0u8; 32];
uint[31] = 4;
let encoded = encode(&vec![Token::Uint(uint.into())]);
let encoded = encode(&vec![Token::Uint(U256::from_big_endian(&uint))]);
let expected = hex!("0000000000000000000000000000000000000000000000000000000000000004");
assert_eq!(encoded, expected);
}
Expand All @@ -514,7 +513,7 @@ mod tests {
fn encode_int() {
let mut int = [0u8; 32];
int[31] = 4;
let encoded = encode(&vec![Token::Int(int.into())]);
let encoded = encode(&vec![Token::Int(U256::from_big_endian(&int))]);
let expected = hex!("0000000000000000000000000000000000000000000000000000000000000004");
assert_eq!(encoded, expected);
}
Expand Down Expand Up @@ -706,7 +705,7 @@ mod tests {

#[test]
fn encode_complex_tuple() {
let uint = Token::Uint([0x11u8; 32].into());
let uint = Token::Uint(U256::from_big_endian(&[0x11u8; 32]));
let string = Token::String("gavofyork".into());
let address1 = Token::Address([0x11u8; 20].into());
let address2 = Token::Address([0x22u8; 20].into());
Expand Down Expand Up @@ -832,14 +831,15 @@ mod tests {
let mut uint = [0u8; 32];
uint[31] = 69;

let encoded = encode_function(signature, &[Token::Uint(uint.into()), Token::Bool(true)]);
let encoded = encode_function(signature, &[Token::Uint(U256::from_big_endian(&uint)), Token::Bool(true)]);
let expected = hex!(
"
cdcd77c000000000000000000000000000000000000000000000000000000000
0000004500000000000000000000000000000000000000000000000000000000
00000001
"
).to_vec();
)
.to_vec();
assert_eq!(encoded, expected);
}
}
10 changes: 5 additions & 5 deletions tests/decode_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.

use ethabi_decode::{decode, ParamKind, Token};
use ethabi_decode::{decode, ParamKind, Token, U256};
use hex_literal::hex;
use paste;

Expand Down Expand Up @@ -100,7 +100,7 @@ test_encode_decode! {
test_encode_decode! {
name: int,
types: [ParamKind::Int(32)],
tokens: [Token::Int([0x11u8; 32].into())],
tokens: [Token::Int(U256::from_big_endian(&[0x11u8; 32]))],
data: "1111111111111111111111111111111111111111111111111111111111111111"
}
test_encode_decode! {
Expand All @@ -109,7 +109,7 @@ test_encode_decode! {
tokens: {
let mut int = [0u8; 32];
int[31] = 4;
[Token::Int(int.into())]
[Token::Int(U256::from_big_endian(&int))]
},
data: "0000000000000000000000000000000000000000000000000000000000000004"
}
Expand All @@ -118,7 +118,7 @@ test_encode_decode! {
test_encode_decode! {
name: uint,
types: [ParamKind::Uint(32)],
tokens: [Token::Uint([0x11u8; 32].into())],
tokens: [Token::Uint(U256::from_big_endian(&[0x11u8; 32]))],
data: "1111111111111111111111111111111111111111111111111111111111111111"
}
test_encode_decode! {
Expand All @@ -127,7 +127,7 @@ test_encode_decode! {
tokens: {
let mut uint = [0u8; 32];
uint[31] = 4;
[Token::Uint(uint.into())]
[Token::Uint(U256::from_big_endian(&uint))]
},
data: "0000000000000000000000000000000000000000000000000000000000000004"
}
Expand Down

0 comments on commit a237569

Please sign in to comment.