Skip to content

Commit

Permalink
fail -> panic
Browse files Browse the repository at this point in the history
  • Loading branch information
steveklabnik committed Nov 4, 2014
1 parent 1240fe1 commit 8721022
Show file tree
Hide file tree
Showing 6 changed files with 42 additions and 42 deletions.
8 changes: 4 additions & 4 deletions src/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -349,10 +349,10 @@ mod tests {
assert_eq!((*f)("count: ".to_string()), "count: 2".to_string());
assert_eq!((*f)("count: ".to_string()), "count: 3".to_string());
}
_ => fail!(),
_ => panic!(),
}
}
_ => fail!(),
_ => panic!(),
}
}

Expand All @@ -378,10 +378,10 @@ mod tests {
assert_eq!((*f)("count: ".to_string()), "count: 2".to_string());
assert_eq!((*f)("count: ".to_string()), "count: 3".to_string());
}
_ => fail!(),
_ => panic!(),
}
}
_ => fail!(),
_ => panic!(),
}
}
}
4 changes: 2 additions & 2 deletions src/compiler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ impl<T: Iterator<char>> Compiler<T> {
Ok(contents) => {
let string = match str::from_utf8(contents.as_slice()) {
Some(string) => string.to_string(),
None => { fail!("Failed to parse file as UTF-8"); }
None => { panic!("Failed to parse file as UTF-8"); }
};

let compiler = Compiler {
Expand All @@ -82,7 +82,7 @@ impl<T: Iterator<char>> Compiler<T> {
if e.kind == FileNotFound {
debug!("failed to read file {}", path.display());
} else {
fail!("error reading file: {}", e);
panic!("error reading file: {}", e);
}
}
}
Expand Down
6 changes: 3 additions & 3 deletions src/encoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -201,12 +201,12 @@ impl<'a> serialize::Encoder<Error> for Encoder<'a> {
};
let mut m = match self.data.pop() {
Some(::Map(m)) => m,
_ => fail!("Expected a map"),
_ => panic!("Expected a map"),
};
try!(f(self));
let popped = match self.data.pop() {
Some(p) => p,
None => fail!("Error: Nothing to pop!"),
None => panic!("Error: Nothing to pop!"),
};
m.insert(k, popped);
self.data.push(::Map(m));
Expand All @@ -220,6 +220,6 @@ pub fn encode<'a, T: serialize::Encodable<Encoder<'a>, Error>>(data: &T) -> Resu
assert_eq!(encoder.data.len(), 1);
match encoder.data.pop() {
Some(data) => Ok(data),
None => fail!("Error: Nothing to pop!"),
None => panic!("Error: Nothing to pop!"),
}
}
2 changes: 1 addition & 1 deletion src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ impl<'a> PartialEq for Data<'a> {
(&Bool(ref v0), &Bool(ref v1)) => v0 == v1,
(&Vect(ref v0), &Vect(ref v1)) => v0 == v1,
(&Map(ref v0), &Map(ref v1)) => v0 == v1,
(&Fun(_), &Fun(_)) => fail!("cannot compare closures"),
(&Fun(_), &Fun(_)) => panic!("cannot compare closures"),
(_, _) => false,
}
}
Expand Down
22 changes: 11 additions & 11 deletions src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ impl<'a, T: Iterator<char>> Parser<'a, T> {
self.bump();
}
} else {
fail!("character {} is not part of CTAG: {}",
panic!("character {} is not part of CTAG: {}",
ch,
self.ctag_chars[self.tag_position]);
}
Expand All @@ -187,15 +187,15 @@ impl<'a, T: Iterator<char>> Parser<'a, T> {
match self.state {
TEXT => { self.add_text(); }
OTAG => { self.not_otag(); self.add_text(); }
TAG => { fail!("unclosed tag"); }
TAG => { panic!("unclosed tag"); }
CTAG => { self.not_ctag(); self.add_text(); }
}

// Check that we don't have any incomplete sections.
for token in self.tokens.iter() {
match *token {
IncompleteSection(ref path, _, _, _) => {
fail!("Unclosed mustache section {}", path.connect("."));
panic!("Unclosed mustache section {}", path.connect("."));
}
_ => {}
}
Expand Down Expand Up @@ -321,7 +321,7 @@ impl<'a, T: Iterator<char>> Parser<'a, T> {
.map(|x| x.to_string())
.collect();
self.tokens.push(UTag(name, tag));
} else { fail!("unbalanced \"{\" in tag"); }
} else { panic!("unbalanced \"{\" in tag"); }
}
'#' => {
let newlined = self.eat_whitespace();
Expand Down Expand Up @@ -352,7 +352,7 @@ impl<'a, T: Iterator<char>> Parser<'a, T> {

loop {
if self.tokens.len() == 0 {
fail!("closing unopened section");
panic!("closing unopened section");
}

let last = self.tokens.pop();
Expand All @@ -376,7 +376,7 @@ impl<'a, T: Iterator<char>> Parser<'a, T> {
srcs.push(src.clone());
srcs.push(csection.clone());
}
_ => fail!(),
_ => panic!(),
}
}

Expand All @@ -400,7 +400,7 @@ impl<'a, T: Iterator<char>> Parser<'a, T> {
self.ctag.to_string()));
break;
} else {
fail!("Unclosed section");
panic!("Unclosed section");
}
}
_ => { match last {
Expand All @@ -420,7 +420,7 @@ impl<'a, T: Iterator<char>> Parser<'a, T> {

let pos = s.as_slice().find(char::is_whitespace);
let pos = match pos {
None => { fail!("invalid change delimiter tag content"); }
None => { panic!("invalid change delimiter tag content"); }
Some(pos) => { pos }
};

Expand All @@ -430,14 +430,14 @@ impl<'a, T: Iterator<char>> Parser<'a, T> {
let s2 = s.as_slice().slice_from(pos);
let pos = s2.find(|c| !char::is_whitespace(c));
let pos = match pos {
None => { fail!("invalid change delimiter tag content"); }
None => { panic!("invalid change delimiter tag content"); }
Some(pos) => { pos }
};

self.ctag = s2.slice_from(pos).to_string();
self.ctag_chars = self.ctag.as_slice().chars().collect();
} else {
fail!("invalid change delimiter tag content");
panic!("invalid change delimiter tag content");
}
}
_ => {
Expand Down Expand Up @@ -510,7 +510,7 @@ impl<'a, T: Iterator<char>> Parser<'a, T> {
fn check_content(&self, content: &str) -> String {
let trimmed = content.trim();
if trimmed.len() == 0 {
fail!("empty tag");
panic!("empty tag");
}
trimmed.to_string()
}
Expand Down
42 changes: 21 additions & 21 deletions src/template.rs
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ impl<'a> RenderContext<'a> {
Partial(ref name, ref indent, _) => {
self.render_partial(wr, stack, name.as_slice(), indent.as_slice());
}
_ => { fail!() }
_ => { panic!() }
}
}

Expand Down Expand Up @@ -197,7 +197,7 @@ impl<'a> RenderContext<'a> {
self.render(wr, stack, tokens.as_slice());
}

ref value => { fail!("unexpected value {}", value); }
ref value => { panic!("unexpected value {}", value); }
}
}
};
Expand Down Expand Up @@ -254,7 +254,7 @@ impl<'a> RenderContext<'a> {
let tokens = self.render_fun(src, otag, ctag, f);
self.render(wr, stack, tokens.as_slice())
}
_ => { fail!("unexpected value {}", value) }
_ => { panic!("unexpected value {}", value) }
}
}
}
Expand Down Expand Up @@ -323,7 +323,7 @@ impl<'a> RenderContext<'a> {
None => { }
}
}
_ => { fail!("expect map: {}", path) }
_ => { panic!("expect map: {}", path) }
}
}

Expand Down Expand Up @@ -510,26 +510,26 @@ mod tests {

let file_contents = match File::open(&path).read_to_end() {
Ok(reader) => reader,
Err(e) => fail!("Could not read file {}", e),
Err(e) => panic!("Could not read file {}", e),
};

let s = match str::from_utf8(file_contents.as_slice()){
Some(str) => str.to_string(),
None => {fail!("File was not UTF8 encoded");}
None => {panic!("File was not UTF8 encoded");}
};

match json::from_str(s.as_slice()) {
Err(e) => fail!(e.to_string()),
Err(e) => panic!(e.to_string()),
Ok(json) => {
match json {
json::Object(d) => {
let mut d = d;
match d.pop(&"tests".to_string()) {
Some(json::List(tests)) => tests.into_iter().collect(),
_ => fail!("{}: tests key not a list", src),
_ => panic!("{}: tests key not a list", src),
}
}
_ => fail!("{}: JSON value not a map", src),
_ => panic!("{}: JSON value not a map", src),
}
}
}
Expand All @@ -545,30 +545,30 @@ mod tests {
path.push(*key + ".mustache");
File::create(&path).write(s.as_bytes()).unwrap();
}
_ => fail!(),
_ => panic!(),
}
}
},
_ => fail!(),
_ => panic!(),
}
}

fn run_test(test: json::JsonObject, data: Data) {
let template = match test.find(&"template".to_string()) {
Some(&json::String(ref s)) => s.clone(),
_ => fail!(),
_ => panic!(),
};

let expected = match test.find(&"expected".to_string()) {
Some(&json::String(ref s)) => s.clone(),
_ => fail!(),
_ => panic!(),
};

// Make a temporary dir where we'll store our partials. This is to
// avoid a race on filenames.
let tmpdir = match TempDir::new("") {
Ok(tmpdir) => tmpdir,
Err(_) => fail!(),
Err(_) => panic!(),
};

match test.find(&"partials".to_string()) {
Expand Down Expand Up @@ -596,12 +596,12 @@ mod tests {
for json in parse_spec_tests(spec).into_iter() {
let test = match json {
json::Object(m) => m,
_ => fail!(),
_ => panic!(),
};

let data = match test.find(&"data".to_string()) {
Some(data) => data.clone(),
None => fail!(),
None => panic!(),
};

let mut encoder = Encoder::new();
Expand Down Expand Up @@ -647,26 +647,26 @@ mod tests {
for json in parse_spec_tests("spec/specs/~lambdas.json").into_iter() {
let mut test = match json {
json::Object(m) => m,
value => { fail!("{}", value) }
value => { panic!("{}", value) }
};

let s = match test.pop(&"name".to_string()) {
Some(json::String(s)) => s,
value => { fail!("{}", value) }
value => { panic!("{}", value) }
};

// Replace the lambda with rust code.
let data = match test.pop(&"data".to_string()) {
Some(data) => data,
None => fail!(),
None => panic!(),
};

let mut encoder = Encoder::new();
data.encode(&mut encoder).unwrap();

let mut ctx = match encoder.data.pop().unwrap() {
::Map(ctx) => ctx,
_ => fail!(),
_ => panic!(),
};

// needed for the closure test.
Expand Down Expand Up @@ -713,7 +713,7 @@ mod tests {
|_text| { "".to_string() }
}

value => { fail!("{}", value) }
value => { panic!("{}", value) }
};

ctx.insert("lambda".to_string(), ::Fun(RefCell::new(f)));
Expand Down

0 comments on commit 8721022

Please sign in to comment.