Skip to content

Commit

Permalink
Implement multiple error notification for tokenizer.
Browse files Browse the repository at this point in the history
  • Loading branch information
YSawc committed Oct 19, 2020
1 parent 2ebfba1 commit d09ae5a
Show file tree
Hide file tree
Showing 4 changed files with 46 additions and 7 deletions.
10 changes: 9 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,15 @@ A toy compiler implemented by rust.
- [x] [beta](https://github.com/YSawc/lio/commit/dd6dd3de39019f4c7bec2677140fb22e9f06fcc9)
- [x] [simplified](https://github.com/YSawc/lio/commit/e2199f937ca5e13c19579430e677ea922cd4cbf5)
- [ ] unused variable checker
- [ ] global
- [x] [global](https://github.com/YSawc/lio/commit/a8c70e5e3824b5b61afaf07ff636bdeca46b41b6)
- [x] [local](https://github.com/YSawc/lio/commit/da07a3dc4c1985c2116da6e4e94554c51d51e30c)
- [x] [not checkes for under score variable](https://github.com/YSawc/lio/commit/0c95ef3d9c57e8578d584aaef5dc42fca986a3c9)
- [x] [checker for return type](https://github.com/YSawc/lio/commit/cb7864e64982aeb98adda36f606e96cb451b0784)
- [ ] multiple error notification
- [ ] tokenize
- [ ] parser
- [ ] whole of program
- [ ] command selector in several mathine architecture
- [ ] lifetime
- [ ] allocation register
- [ ] canonical tree
18 changes: 15 additions & 3 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,17 @@ fn main() {
continue;
}

let t = Token::tokenize(&s).unwrap();
let t = match Token::tokenize(&s) {
Ok(t) => t,
Err(e) => {
for e in e.to_owned() {
// e.show_diagnostic(arg1); // FIXME
show_trace(e);
}
std::process::exit(1);
}
};

let t = match map(t) {
Ok(t) => t,
Err(e) => {
Expand Down Expand Up @@ -91,8 +101,10 @@ fn main() {
let t = match Token::tokenize(arg1) {
Ok(n) => n,
Err(e) => {
// e.show_diagnostic(arg1); // FIXME
show_trace(e);
for e in e.to_owned() {
// e.show_diagnostic(arg1); // FIXME
show_trace(e);
}
std::process::exit(1);
}
};
Expand Down
12 changes: 11 additions & 1 deletion src/tests/token/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ fn tokenize_test() {
#[test]
fn invalid_token_test() {
let l = match Token::tokenize("1+1\n") {
Err(e) => match e.value {
Err(e) => match e[0].value {
TokenErrorKind::InvalidToken('\n') => true,
_ => false,
},
Expand All @@ -37,3 +37,13 @@ fn pass_comparison_tokenize_test() {

assert!(true);
}

#[test]
fn multiple_invalid_tokens_test() {
let l = Token::tokenize("\n1+1\n");
let e = vec![
(TokenError::invalid_token('\n', Loc::new(0, 1))),
(TokenError::invalid_token('\n', Loc::new(4, 5))),
];
assert_eq!(e, l.unwrap_err());
}
13 changes: 11 additions & 2 deletions src/token/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -114,11 +114,13 @@ impl Token {
}

impl Token {
pub fn tokenize(input: &str) -> Result<Vec<Token>, TokenError> {
pub fn tokenize(input: &str) -> Result<Vec<Token>, Vec<TokenError>> {
let mut p_data = Vec::new();
let l = input.len();
let mut b = 0;
let mut i = 0;
let mut e: bool = false;
let mut ev: Vec<TokenError> = vec![];

fn multiple_symbol_map_map() -> FxHashMap<String, TokenKind> {
let mut map = FxHashMap::default();
Expand Down Expand Up @@ -225,13 +227,20 @@ impl Token {
p_data.push(Self::ident(s, Loc::new(t as u8 + b, (i as u8 + 1) + b)));
continue;
}
return Err(TokenError::invalid_token(
ev.push(TokenError::invalid_token(
input.to_string().chars().nth(i).unwrap(),
Loc::new(i as u8 + b, i as u8 + 1 + b),
));
if !e {
e = true;
}
}
i += 1
}
if e {
return Err(ev);
}

Ok(p_data)
}
}
Expand Down

0 comments on commit d09ae5a

Please sign in to comment.