Skip to content

Commit

Permalink
feat(ethabi, token): support nested tuples/arrays parsing (#276)
Browse files Browse the repository at this point in the history
  • Loading branch information
shekhirin authored Aug 12, 2022
1 parent 6215662 commit bb3f430
Showing 1 changed file with 120 additions and 9 deletions.
129 changes: 120 additions & 9 deletions ethabi/src/token/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,12 +73,44 @@ pub trait Tokenizer {
let mut nested = 0isize;
let mut ignore = false;
let mut last_item = 1;

let mut array_nested = 0isize;
let mut array_item_start = 1;
let mut last_is_array = false;

let mut params = param.iter();
for (pos, ch) in value.chars().enumerate() {
match ch {
'[' if !ignore => {
if array_nested == 0 {
array_item_start = pos;
}
array_nested += 1;
}
']' if !ignore => {
array_nested -= 1;

match array_nested.cmp(&0) {
Less => {
return Err(Error::InvalidData);
}
Equal => {
let sub = &value[array_item_start..pos + 1];
let token = Self::tokenize(params.next().ok_or(Error::InvalidData)?, sub)?;
result.push(token);
last_is_array = !last_is_array;
}
_ => {}
}
}
_ if array_nested != 0 => continue,
'(' if !ignore => {
nested += 1;
}
')' if !ignore && last_is_array => {
nested -= 1;
last_is_array = !last_is_array;
}
')' if !ignore => {
nested -= 1;

Expand All @@ -87,17 +119,24 @@ pub trait Tokenizer {
return Err(Error::InvalidData);
}
Equal => {
let sub = &value[last_item..pos];
let token = Self::tokenize(params.next().ok_or(Error::InvalidData)?, sub)?;
result.push(token);
last_item = pos + 1;
if last_is_array {
last_is_array = !last_is_array;
} else {
let sub = &value[last_item..pos];
let token = Self::tokenize(params.next().ok_or(Error::InvalidData)?, sub)?;
result.push(token);
last_item = pos + 1;
}
}
_ => {}
}
}
'"' => {
ignore = !ignore;
}
',' if array_nested == 0 && nested == 1 && !ignore && last_is_array => {
last_is_array = !last_is_array;
}
',' if nested == 1 && !ignore => {
let sub = &value[last_item..pos];
let token = Self::tokenize(params.next().ok_or(Error::InvalidData)?, sub)?;
Expand Down Expand Up @@ -129,30 +168,67 @@ pub trait Tokenizer {
let mut nested = 0isize;
let mut ignore = false;
let mut last_item = 1;

let mut tuple_nested = 0isize;
let mut tuple_item_start = 1;
let mut last_is_tuple = false;
for (i, ch) in value.chars().enumerate() {
match ch {
'(' if !ignore => {
if tuple_nested == 0 {
tuple_item_start = i;
}
tuple_nested += 1;
}
')' if !ignore => {
tuple_nested -= 1;
match tuple_nested.cmp(&0) {
Less => {
return Err(Error::InvalidData);
}
Equal => {
let sub = &value[tuple_item_start..i + 1];
let token = Self::tokenize(param, sub)?;
result.push(token);
last_is_tuple = !last_is_tuple;
}
_ => {}
}
}
_ if tuple_nested != 0 => continue,
'[' if !ignore => {
nested += 1;
}
']' if !ignore && last_is_tuple => {
nested -= 1;
last_is_tuple = !last_is_tuple;
}
']' if !ignore => {
nested -= 1;
match nested.cmp(&0) {
Less => {
return Err(Error::InvalidData);
}
Equal => {
let sub = &value[last_item..i];
let token = Self::tokenize(param, sub)?;
result.push(token);
last_item = i + 1;
if last_is_tuple {
last_is_tuple = !last_is_tuple;
} else {
let sub = &value[last_item..i];
let token = Self::tokenize(param, sub)?;
result.push(token);
last_item = i + 1;
}
}
_ => {}
}
}
'"' => {
ignore = !ignore;
}
',' if nested == 1 && !ignore => {
',' if tuple_nested == 0 && nested == 1 && !ignore && last_is_tuple => {
last_is_tuple = !last_is_tuple;
}
',' if tuple_nested == 0 && nested == 1 && !ignore => {
let sub = &value[last_item..i];
let token = Self::tokenize(param, sub)?;
result.push(token);
Expand Down Expand Up @@ -194,6 +270,8 @@ pub trait Tokenizer {
#[cfg(all(test, feature = "full-serde"))]
mod test {
use super::{LenientTokenizer, ParamType, Tokenizer};
use crate::Token;

#[test]
fn single_quoted_in_array_must_error() {
assert!(LenientTokenizer::tokenize_array("[1,\"0,false]", &ParamType::Bool).is_err());
Expand All @@ -202,4 +280,37 @@ mod test {
assert!(LenientTokenizer::tokenize_array("[1,\"0\",false]", &ParamType::Bool).is_err());
assert!(LenientTokenizer::tokenize_array("[1,0]", &ParamType::Bool).is_ok());
}

#[test]
fn tuples_arrays_mixed() {
assert_eq!(
LenientTokenizer::tokenize_array(
"[([(true)],[(false,true)])]",
&ParamType::Tuple(vec![
ParamType::Array(Box::new(ParamType::Tuple(vec![ParamType::Bool]))),
ParamType::Array(Box::new(ParamType::Tuple(vec![ParamType::Bool, ParamType::Bool]))),
]),
)
.unwrap(),
vec![Token::Tuple(vec![
Token::Array(vec![Token::Tuple(vec![Token::Bool(true)])]),
Token::Array(vec![Token::Tuple(vec![Token::Bool(false), Token::Bool(true)])]),
])]
);

assert_eq!(
LenientTokenizer::tokenize_struct(
"([(true)],[(false,true)])",
&[
ParamType::Array(Box::new(ParamType::Tuple(vec![ParamType::Bool]))),
ParamType::Array(Box::new(ParamType::Tuple(vec![ParamType::Bool, ParamType::Bool]))),
]
)
.unwrap(),
vec![
Token::Array(vec![Token::Tuple(vec![Token::Bool(true)])]),
Token::Array(vec![Token::Tuple(vec![Token::Bool(false), Token::Bool(true)])]),
]
);
}
}

0 comments on commit bb3f430

Please sign in to comment.