Skip to content

Commit

Permalink
Merge #39
Browse files Browse the repository at this point in the history
39: Tools r=matklad a=matklad

closes #34 

bors r+
  • Loading branch information
bors[bot] committed Feb 3, 2018
2 parents 3c70ae2 + a5a6973 commit 31bcfdd
Show file tree
Hide file tree
Showing 15 changed files with 184 additions and 36 deletions.
1 change: 1 addition & 0 deletions .cargo/config
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
[alias]
parse = "run --package tools --bin parse"
gen = "run --package tools --bin gen"
collect-tests = "run --package tools --bin collect-tests --"
1 change: 1 addition & 0 deletions appveyor.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ install:
build: false

test_script:
- cargo collect-tests --verify
- cargo test

branches:
Expand Down
16 changes: 15 additions & 1 deletion docs/TESTS.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,26 @@ files to have the same name except for the leading number. In general,
test suite should be append-only: old tests should not be modified,
new tests should be created instead.


Note that only `ok` tests are normative: `err` tests test error
recovery and it is totally ok for a parser to not implement any error
recovery at all. However, for libsyntax2.0 we do care about error
recovery, and we do care about precise and useful error messages.

There are also so-called "inline tests". They appear as the comments
with a `test` header in the source code, like this:

```rust
// test fn_basic
// fn foo() {}
fn fn_item(p: &mut Parser) {
// ...
}
```

You can run `cargo collect-tests` command to collect all inline tests
into `tests/data/inline` directory. The main advantage of inline tests
is that they help to illustrate what the relevant code is doing.


Contribution opportunity: design and implement testing infrastructure
for validators.
4 changes: 4 additions & 0 deletions src/parser/event_parser/grammar/items/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,11 +52,15 @@ fn item(p: &mut Parser) {
STATIC_ITEM
}
CONST_KW => match p.nth(1) {
// test const_fn
// const fn foo() {}
FN_KW => {
p.bump();
fn_item(p);
FN_ITEM
}
// test const_unsafe_fn
// const unsafe fn foo() {}
UNSAFE_KW if p.nth(2) == FN_KW => {
p.bump();
p.bump();
Expand Down
1 change: 1 addition & 0 deletions tests/data/parser/inline/0001_const_unsafe_fn.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
const unsafe fn foo() {}
15 changes: 15 additions & 0 deletions tests/data/parser/inline/0001_const_unsafe_fn.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
FILE@[0; 25)
FN_ITEM@[0; 25)
CONST_KW@[0; 5)
WHITESPACE@[5; 6)
UNSAFE_KW@[6; 12)
WHITESPACE@[12; 13)
FN_KW@[13; 15)
WHITESPACE@[15; 16)
IDENT@[16; 19) "foo"
L_PAREN@[19; 20)
R_PAREN@[20; 21)
WHITESPACE@[21; 22)
L_CURLY@[22; 23)
R_CURLY@[23; 24)
WHITESPACE@[24; 25)
1 change: 1 addition & 0 deletions tests/data/parser/inline/0002_const_fn.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
const fn foo() {}
13 changes: 13 additions & 0 deletions tests/data/parser/inline/0002_const_fn.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
FILE@[0; 18)
FN_ITEM@[0; 18)
CONST_KW@[0; 5)
WHITESPACE@[5; 6)
FN_KW@[6; 8)
WHITESPACE@[8; 9)
IDENT@[9; 12) "foo"
L_PAREN@[12; 13)
R_PAREN@[13; 14)
WHITESPACE@[14; 15)
L_CURLY@[15; 16)
R_CURLY@[16; 17)
WHITESPACE@[17; 18)
5 changes: 0 additions & 5 deletions tests/data/parser/ok/0024_const_fn.rs

This file was deleted.

29 changes: 0 additions & 29 deletions tests/data/parser/ok/0024_const_fn.txt

This file was deleted.

File renamed without changes.
File renamed without changes.
2 changes: 1 addition & 1 deletion tests/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use testutils::dir_tests;

#[test]
fn parser_tests() {
dir_tests(&["parser/ok", "parser/err"], |text| {
dir_tests(&["parser/inline", "parser/ok", "parser/err"], |text| {
let tokens = tokenize(text);
let file = parse(text.to_string(), &tokens);
dump_tree(&file)
Expand Down
2 changes: 2 additions & 0 deletions tools/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,6 @@ serde = "1.0.26"
serde_derive = "1.0.26"
file = "1.1.1"
ron = "0.1.5"
walkdir = "2"
itertools = "0.7"
libsyntax2 = { path = "../" }
130 changes: 130 additions & 0 deletions tools/src/bin/collect-tests.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
extern crate file;
extern crate itertools;
extern crate walkdir;

use walkdir::WalkDir;
use itertools::Itertools;

use std::path::{Path, PathBuf};
use std::collections::HashSet;
use std::fs;

fn main() {
let verify = ::std::env::args().any(|arg| arg == "--verify");

let d = grammar_dir();
let tests = tests_from_dir(&d);
let existing = existing_tests();

for t in existing.difference(&tests) {
panic!("Test is deleted: {}\n{}", t.name, t.text);
}

let new_tests = tests.difference(&existing);
for (i, t) in new_tests.enumerate() {
if verify {
panic!("Inline test is not recorded: {}", t.name);
}

let name = format!("{:04}_{}.rs", existing.len() + i + 1, t.name);
println!("Creating {}", name);
let path = inline_tests_dir().join(name);
file::put_text(&path, &t.text).unwrap();
}
}

#[derive(Debug, Eq)]
struct Test {
name: String,
text: String,
}

impl PartialEq for Test {
fn eq(&self, other: &Test) -> bool {
self.name.eq(&other.name)
}
}

impl ::std::hash::Hash for Test {
fn hash<H: ::std::hash::Hasher>(&self, state: &mut H) {
self.name.hash(state)
}
}

fn tests_from_dir(dir: &Path) -> HashSet<Test> {
let mut res = HashSet::new();
for entry in WalkDir::new(dir) {
let entry = entry.unwrap();
if !entry.file_type().is_file() {
continue;
}
if entry.path().extension().unwrap_or_default() != "rs" {
continue;
}
let text = file::get_text(entry.path()).unwrap();

for test in collect_tests(&text) {
if let Some(old_test) = res.replace(test) {
panic!("Duplicate test: {}", old_test.name)
}
}
}
res
}

fn collect_tests(s: &str) -> Vec<Test> {
let mut res = vec![];
let prefix = "// ";
let comment_blocks = s.lines()
.map(str::trim_left)
.group_by(|line| line.starts_with(prefix));

for (is_comment, block) in comment_blocks.into_iter() {
if !is_comment {
continue;
}
let mut block = block.map(|line| &line[prefix.len()..]);
let first = block.next().unwrap();
if !first.starts_with("test ") {
continue;
}
let name = first["test ".len()..].to_string();
let text: String = itertools::join(block.chain(::std::iter::once("")), "\n");
assert!(!text.trim().is_empty() && text.ends_with("\n"));
res.push(Test { name, text })
}
res
}

fn existing_tests() -> HashSet<Test> {
let mut res = HashSet::new();
for file in fs::read_dir(&inline_tests_dir()).unwrap() {
let file = file.unwrap();
let path = file.path();
if path.extension().unwrap_or_default() != "rs" {
continue;
}
let name = path.file_name().unwrap().to_str().unwrap();
let name = name["0000_".len()..name.len() - 3].to_string();
let text = file::get_text(&path).unwrap();
res.insert(Test { name, text });
}
res
}

fn inline_tests_dir() -> PathBuf {
let res = base_dir().join("tests/data/parser/inline");
if !res.is_dir() {
fs::create_dir_all(&res).unwrap();
}
res
}

fn grammar_dir() -> PathBuf {
base_dir().join("src/parser/event_parser/grammar")
}

fn base_dir() -> PathBuf {
let dir = env!("CARGO_MANIFEST_DIR");
PathBuf::from(dir).parent().unwrap().to_owned()
}

0 comments on commit 31bcfdd

Please sign in to comment.