From d09ae5afe26fd3daf1dcddd3dd333224cffe247c Mon Sep 17 00:00:00 2001 From: YSawc Date: Mon, 19 Oct 2020 21:53:50 +0900 Subject: [PATCH] Implement multiple error notification for tokenizer. --- README.md | 10 +++++++++- src/main.rs | 18 +++++++++++++++--- src/tests/token/tests.rs | 12 +++++++++++- src/token/token.rs | 13 +++++++++++-- 4 files changed, 46 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 65f9f26..4ee6878 100644 --- a/README.md +++ b/README.md @@ -25,7 +25,15 @@ A toy compiler implemented by rust. - [x] [beta](https://github.com/YSawc/lio/commit/dd6dd3de39019f4c7bec2677140fb22e9f06fcc9) - [x] [simplified](https://github.com/YSawc/lio/commit/e2199f937ca5e13c19579430e677ea922cd4cbf5) - [ ] unused variable checker - - [ ] global + - [x] [global](https://github.com/YSawc/lio/commit/a8c70e5e3824b5b61afaf07ff636bdeca46b41b6) - [x] [local](https://github.com/YSawc/lio/commit/da07a3dc4c1985c2116da6e4e94554c51d51e30c) - [x] [not checkes for under score variable](https://github.com/YSawc/lio/commit/0c95ef3d9c57e8578d584aaef5dc42fca986a3c9) - [x] [checker for return type](https://github.com/YSawc/lio/commit/cb7864e64982aeb98adda36f606e96cb451b0784) +- [ ] multiple error notification + - [ ] tokenize + - [ ] parser + - [ ] whole of program +- [ ] command selector in several mathine architecture +- [ ] lifetime +- [ ] allocation register +- [ ] canonical tree diff --git a/src/main.rs b/src/main.rs index 1912a10..8361d66 100644 --- a/src/main.rs +++ b/src/main.rs @@ -56,7 +56,17 @@ fn main() { continue; } - let t = Token::tokenize(&s).unwrap(); + let t = match Token::tokenize(&s) { + Ok(t) => t, + Err(e) => { + for e in e.to_owned() { + // e.show_diagnostic(arg1); // FIXME + show_trace(e); + } + std::process::exit(1); + } + }; + let t = match map(t) { Ok(t) => t, Err(e) => { @@ -91,8 +101,10 @@ fn main() { let t = match Token::tokenize(arg1) { Ok(n) => n, Err(e) => { - // e.show_diagnostic(arg1); // FIXME - show_trace(e); + for e in e.to_owned() { + // e.show_diagnostic(arg1); // FIXME + show_trace(e); + } std::process::exit(1); } }; diff --git a/src/tests/token/tests.rs b/src/tests/token/tests.rs index bcdcaf6..3a184f2 100644 --- a/src/tests/token/tests.rs +++ b/src/tests/token/tests.rs @@ -21,7 +21,7 @@ fn tokenize_test() { #[test] fn invalid_token_test() { let l = match Token::tokenize("1+1\n") { - Err(e) => match e.value { + Err(e) => match e[0].value { TokenErrorKind::InvalidToken('\n') => true, _ => false, }, @@ -37,3 +37,13 @@ fn pass_comparison_tokenize_test() { assert!(true); } + +#[test] +fn multiple_invalid_tokens_test() { + let l = Token::tokenize("\n1+1\n"); + let e = vec![ + (TokenError::invalid_token('\n', Loc::new(0, 1))), + (TokenError::invalid_token('\n', Loc::new(4, 5))), + ]; + assert_eq!(e, l.unwrap_err()); +} diff --git a/src/token/token.rs b/src/token/token.rs index 2caf159..7b0b298 100644 --- a/src/token/token.rs +++ b/src/token/token.rs @@ -114,11 +114,13 @@ impl Token { } impl Token { - pub fn tokenize(input: &str) -> Result, TokenError> { + pub fn tokenize(input: &str) -> Result, Vec> { let mut p_data = Vec::new(); let l = input.len(); let mut b = 0; let mut i = 0; + let mut e: bool = false; + let mut ev: Vec = vec![]; fn multiple_symbol_map_map() -> FxHashMap { let mut map = FxHashMap::default(); @@ -225,13 +227,20 @@ impl Token { p_data.push(Self::ident(s, Loc::new(t as u8 + b, (i as u8 + 1) + b))); continue; } - return Err(TokenError::invalid_token( + ev.push(TokenError::invalid_token( input.to_string().chars().nth(i).unwrap(), Loc::new(i as u8 + b, i as u8 + 1 + b), )); + if !e { + e = true; + } } i += 1 } + if e { + return Err(ev); + } + Ok(p_data) } }