diff --git a/mk/crates.mk b/mk/crates.mk index 2b168b8f0e448..0bd0c70bd0519 100644 --- a/mk/crates.mk +++ b/mk/crates.mk @@ -57,10 +57,10 @@ TARGET_CRATES := libc std term \ panic_abort panic_unwind unwind RUSTC_CRATES := rustc rustc_typeck rustc_mir rustc_borrowck rustc_resolve rustc_driver \ rustc_trans rustc_back rustc_llvm rustc_privacy rustc_lint \ - rustc_data_structures rustc_platform_intrinsics \ + rustc_data_structures rustc_platform_intrinsics rustc_errors \ rustc_plugin rustc_metadata rustc_passes rustc_save_analysis \ rustc_const_eval rustc_const_math rustc_incremental -HOST_CRATES := syntax syntax_ext $(RUSTC_CRATES) rustdoc fmt_macros \ +HOST_CRATES := syntax syntax_ext syntax_pos $(RUSTC_CRATES) rustdoc fmt_macros \ flate arena graphviz rbml log serialize TOOLS := compiletest rustdoc rustc rustbook error_index_generator @@ -98,43 +98,45 @@ DEPS_serialize := std log DEPS_term := std DEPS_test := std getopts term native:rust_test_helpers -DEPS_syntax := std term serialize log arena libc rustc_bitflags rustc_unicode -DEPS_syntax_ext := syntax fmt_macros +DEPS_syntax := std term serialize log arena libc rustc_bitflags rustc_unicode rustc_errors syntax_pos +DEPS_syntax_ext := syntax syntax_pos rustc_errors fmt_macros +DEPS_syntax_pos := serialize DEPS_rustc_const_math := std syntax log serialize DEPS_rustc_const_eval := rustc_const_math rustc syntax log serialize \ - rustc_back graphviz + rustc_back graphviz syntax_pos DEPS_rustc := syntax fmt_macros flate arena serialize getopts rbml \ log graphviz rustc_llvm rustc_back rustc_data_structures\ - rustc_const_math + rustc_const_math syntax_pos rustc_errors DEPS_rustc_back := std syntax flate log libc -DEPS_rustc_borrowck := rustc log graphviz syntax rustc_mir +DEPS_rustc_borrowck := rustc log graphviz syntax syntax_pos rustc_errors rustc_mir DEPS_rustc_data_structures := std log serialize DEPS_rustc_driver := arena flate getopts graphviz libc rustc rustc_back rustc_borrowck \ rustc_typeck rustc_mir rustc_resolve log syntax serialize rustc_llvm \ rustc_trans rustc_privacy rustc_lint rustc_plugin \ rustc_metadata syntax_ext rustc_passes rustc_save_analysis rustc_const_eval \ - rustc_incremental -DEPS_rustc_lint := rustc log syntax rustc_const_eval + rustc_incremental syntax_pos rustc_errors +DEPS_rustc_errors := log libc serialize syntax_pos +DEPS_rustc_lint := rustc log syntax syntax_pos rustc_const_eval DEPS_rustc_llvm := native:rustllvm libc std rustc_bitflags -DEPS_rustc_metadata := rustc syntax rbml rustc_const_math -DEPS_rustc_passes := syntax rustc core rustc_const_eval -DEPS_rustc_mir := rustc syntax rustc_const_math rustc_const_eval rustc_bitflags -DEPS_rustc_resolve := arena rustc log syntax +DEPS_rustc_metadata := rustc syntax syntax_pos rustc_errors rbml rustc_const_math +DEPS_rustc_passes := syntax syntax_pos rustc core rustc_const_eval rustc_errors +DEPS_rustc_mir := rustc syntax syntax_pos rustc_const_math rustc_const_eval rustc_bitflags +DEPS_rustc_resolve := arena rustc log syntax syntax_pos rustc_errors DEPS_rustc_platform_intrinsics := std -DEPS_rustc_plugin := rustc rustc_metadata syntax -DEPS_rustc_privacy := rustc log syntax +DEPS_rustc_plugin := rustc rustc_metadata syntax syntax_pos rustc_errors +DEPS_rustc_privacy := rustc log syntax syntax_pos DEPS_rustc_trans := arena flate getopts graphviz libc rustc rustc_back \ log syntax serialize rustc_llvm rustc_platform_intrinsics \ - rustc_const_math rustc_const_eval rustc_incremental -DEPS_rustc_incremental := rbml rustc serialize rustc_data_structures -DEPS_rustc_save_analysis := rustc log syntax serialize -DEPS_rustc_typeck := rustc syntax rustc_platform_intrinsics rustc_const_math \ - rustc_const_eval + rustc_const_math rustc_const_eval rustc_incremental rustc_errors syntax_pos +DEPS_rustc_incremental := rbml rustc syntax_pos serialize rustc_data_structures +DEPS_rustc_save_analysis := rustc log syntax syntax_pos serialize +DEPS_rustc_typeck := rustc syntax syntax_pos rustc_platform_intrinsics rustc_const_math \ + rustc_const_eval rustc_errors DEPS_rustdoc := rustc rustc_driver native:hoedown serialize getopts \ - test rustc_lint rustc_const_eval + test rustc_lint rustc_const_eval syntax_pos TOOL_DEPS_compiletest := test getopts log serialize diff --git a/src/doc/book/compiler-plugins.md b/src/doc/book/compiler-plugins.md index 5b75ad6cfa858..8426d5a626549 100644 --- a/src/doc/book/compiler-plugins.md +++ b/src/doc/book/compiler-plugins.md @@ -45,11 +45,11 @@ extern crate syntax; extern crate rustc; extern crate rustc_plugin; -use syntax::codemap::Span; use syntax::parse::token; use syntax::ast::TokenTree; use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager}; use syntax::ext::build::AstBuilder; // trait for expr_usize +use syntax_pos::Span; use rustc_plugin::Registry; fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree]) diff --git a/src/grammar/verify.rs b/src/grammar/verify.rs index a07fdec32801d..884db34189240 100644 --- a/src/grammar/verify.rs +++ b/src/grammar/verify.rs @@ -32,9 +32,9 @@ use std::rc::Rc; use syntax::ast; use syntax::ast::Name; use syntax::codemap; -use syntax::codemap::Pos; use syntax::parse::token::{self, BinOpToken, DelimToken, Lit, Token}; use syntax::parse::lexer::TokenAndSpan; +use syntax_pos::Pos; fn parse_token_list(file: &str) -> HashMap { fn id() -> token::Token { @@ -233,10 +233,10 @@ fn parse_antlr_token(s: &str, tokens: &HashMap, surrogate_ lo -= surrogate_pairs_pos.binary_search(&(lo as usize)).unwrap_or_else(|x| x) as u32; hi -= surrogate_pairs_pos.binary_search(&(hi as usize)).unwrap_or_else(|x| x) as u32; - let sp = codemap::Span { - lo: codemap::BytePos(lo), - hi: codemap::BytePos(hi), - expn_id: codemap::NO_EXPANSION + let sp = syntax_pos::Span { + lo: syntax_pos::BytePos(lo), + hi: syntax_pos::BytePos(hi), + expn_id: syntax_pos::NO_EXPANSION }; TokenAndSpan { diff --git a/src/librustc/Cargo.toml b/src/librustc/Cargo.toml index 9291227a734f7..aaef8e8423cbe 100644 --- a/src/librustc/Cargo.toml +++ b/src/librustc/Cargo.toml @@ -19,6 +19,8 @@ rustc_back = { path = "../librustc_back" } rustc_bitflags = { path = "../librustc_bitflags" } rustc_const_math = { path = "../librustc_const_math" } rustc_data_structures = { path = "../librustc_data_structures" } +rustc_errors = { path = "../librustc_errors" } rustc_llvm = { path = "../librustc_llvm" } serialize = { path = "../libserialize" } syntax = { path = "../libsyntax" } +syntax_pos = { path = "../libsyntax_pos" } diff --git a/src/librustc/hir/check_attr.rs b/src/librustc/hir/check_attr.rs index 085acc198d16a..a1c04dfcab5e6 100644 --- a/src/librustc/hir/check_attr.rs +++ b/src/librustc/hir/check_attr.rs @@ -95,7 +95,7 @@ impl<'a> CheckAttrVisitor<'a> { } } -impl<'a, 'v> Visitor<'v> for CheckAttrVisitor<'a> { +impl<'a> Visitor for CheckAttrVisitor<'a> { fn visit_item(&mut self, item: &ast::Item) { let target = Target::from_item(item); for attr in &item.attrs { diff --git a/src/librustc/hir/fold.rs b/src/librustc/hir/fold.rs index 1e80bc3c54dd0..78fd2bbbe0d25 100644 --- a/src/librustc/hir/fold.rs +++ b/src/librustc/hir/fold.rs @@ -14,9 +14,9 @@ use hir::*; use syntax::ast::{Name, NodeId, DUMMY_NODE_ID, Attribute, Attribute_, MetaItem}; use syntax::ast::MetaItemKind; -use syntax::attr::ThinAttributesExt; use hir; -use syntax::codemap::{respan, Span, Spanned}; +use syntax_pos::Span; +use syntax::codemap::{respan, Spanned}; use syntax::ptr::P; use syntax::parse::token::keywords; use syntax::util::move_map::MoveMap; @@ -292,8 +292,11 @@ pub fn noop_fold_view_path(view_path: P, fld: &mut T) -> P< }) } -pub fn fold_attrs(attrs: HirVec, fld: &mut T) -> HirVec { - attrs.move_flat_map(|x| fld.fold_attribute(x)) +pub fn fold_attrs(attrs: T, fld: &mut F) -> T + where T: Into> + From>, + F: Folder, +{ + attrs.into().move_flat_map(|x| fld.fold_attribute(x)).into() } pub fn noop_fold_arm(Arm { attrs, pats, guard, body }: Arm, fld: &mut T) -> Arm { @@ -461,7 +464,7 @@ pub fn noop_fold_local(l: P, fld: &mut T) -> P { pat: fld.fold_pat(pat), init: init.map(|e| fld.fold_expr(e)), span: fld.new_span(span), - attrs: attrs.map_thin_attrs(|attrs| fold_attrs(attrs.into(), fld).into()), + attrs: fold_attrs(attrs, fld), } }) } @@ -1078,7 +1081,7 @@ pub fn noop_fold_expr(Expr { id, node, span, attrs }: Expr, folder: & } }, span: folder.new_span(span), - attrs: attrs.map_thin_attrs(|attrs| fold_attrs(attrs.into(), folder).into()), + attrs: fold_attrs(attrs, folder), } } diff --git a/src/librustc/hir/intravisit.rs b/src/librustc/hir/intravisit.rs index d47de676e7960..2d5c4ebf8d898 100644 --- a/src/librustc/hir/intravisit.rs +++ b/src/librustc/hir/intravisit.rs @@ -27,8 +27,8 @@ use syntax::abi::Abi; use syntax::ast::{NodeId, CRATE_NODE_ID, Name, Attribute}; -use syntax::attr::ThinAttributesExt; -use syntax::codemap::{Span, Spanned}; +use syntax::codemap::Spanned; +use syntax_pos::Span; use hir::*; use std::cmp; @@ -756,7 +756,7 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) { walk_list!(visitor, visit_arm, arms); } ExprClosure(_, ref function_declaration, ref body, _fn_decl_span) => { - visitor.visit_fn(FnKind::Closure(expression.attrs.as_attr_slice()), + visitor.visit_fn(FnKind::Closure(&expression.attrs), function_declaration, body, expression.span, diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index 393045bf93efb..5b655522f342f 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -50,12 +50,12 @@ use session::Session; use std::collections::BTreeMap; use std::iter; use syntax::ast::*; -use syntax::attr::{ThinAttributes, ThinAttributesExt}; use syntax::ptr::P; -use syntax::codemap::{respan, Spanned, Span}; +use syntax::codemap::{respan, Spanned}; use syntax::parse::token; use syntax::std_inject; use syntax::visit::{self, Visitor}; +use syntax_pos::Span; pub struct LoweringContext<'a> { crate_root: Option<&'static str>, @@ -137,8 +137,8 @@ impl<'a> LoweringContext<'a> { lctx: &'lcx mut LoweringContext<'interner>, } - impl<'lcx, 'interner> Visitor<'lcx> for ItemLowerer<'lcx, 'interner> { - fn visit_item(&mut self, item: &'lcx Item) { + impl<'lcx, 'interner> Visitor for ItemLowerer<'lcx, 'interner> { + fn visit_item(&mut self, item: &Item) { self.items.insert(item.id, self.lctx.lower_item(item)); visit::walk_item(self, item); } @@ -237,19 +237,6 @@ impl<'a> LoweringContext<'a> { } } - fn lower_decl(&mut self, d: &Decl) -> P { - match d.node { - DeclKind::Local(ref l) => P(Spanned { - node: hir::DeclLocal(self.lower_local(l)), - span: d.span, - }), - DeclKind::Item(ref it) => P(Spanned { - node: hir::DeclItem(self.lower_item_id(it)), - span: d.span, - }), - } - } - fn lower_ty_binding(&mut self, b: &TypeBinding) -> hir::TypeBinding { hir::TypeBinding { id: b.id, @@ -587,10 +574,23 @@ impl<'a> LoweringContext<'a> { } fn lower_block(&mut self, b: &Block) -> P { + let mut stmts = Vec::new(); + let mut expr = None; + + if let Some((last, rest)) = b.stmts.split_last() { + stmts = rest.iter().map(|s| self.lower_stmt(s)).collect::>(); + let last = self.lower_stmt(last); + if let hir::StmtExpr(e, _) = last.node { + expr = Some(e); + } else { + stmts.push(last); + } + } + P(hir::Block { id: b.id, - stmts: b.stmts.iter().map(|s| self.lower_stmt(s)).collect(), - expr: b.expr.as_ref().map(|ref x| self.lower_expr(x)), + stmts: stmts.into(), + expr: expr, rules: self.lower_block_check_mode(&b.rules), span: b.span, }) @@ -683,6 +683,7 @@ impl<'a> LoweringContext<'a> { hir::TypeTraitItem(this.lower_bounds(bounds), default.as_ref().map(|x| this.lower_ty(x))) } + TraitItemKind::Macro(..) => panic!("Shouldn't exist any more"), }, span: i.span, } @@ -866,10 +867,10 @@ impl<'a> LoweringContext<'a> { pats.iter().map(|x| self.lower_pat(x)).collect(), ddpos) } - PatKind::Path(ref pth) => { + PatKind::Path(None, ref pth) => { hir::PatKind::Path(self.lower_path(pth)) } - PatKind::QPath(ref qself, ref pth) => { + PatKind::Path(Some(ref qself), ref pth) => { let qself = hir::QSelf { ty: self.lower_ty(&qself.ty), position: qself.position, @@ -962,16 +963,16 @@ impl<'a> LoweringContext<'a> { let make_call = |this: &mut LoweringContext, p, args| { let path = this.core_path(e.span, p); - let path = this.expr_path(path, None); - this.expr_call(e.span, path, args, None) + let path = this.expr_path(path, ThinVec::new()); + this.expr_call(e.span, path, args) }; let mk_stmt_let = |this: &mut LoweringContext, bind, expr| { - this.stmt_let(e.span, false, bind, expr, None) + this.stmt_let(e.span, false, bind, expr) }; let mk_stmt_let_mut = |this: &mut LoweringContext, bind, expr| { - this.stmt_let(e.span, true, bind, expr, None) + this.stmt_let(e.span, true, bind, expr) }; // let placer = ; @@ -980,21 +981,21 @@ impl<'a> LoweringContext<'a> { placer_expr, e.span, hir::PopUnstableBlock, - None); + ThinVec::new()); mk_stmt_let(self, placer_ident, placer_expr) }; // let mut place = Placer::make_place(placer); let (s2, place_binding) = { - let placer = self.expr_ident(e.span, placer_ident, None, placer_binding); + let placer = self.expr_ident(e.span, placer_ident, placer_binding); let call = make_call(self, &make_place, hir_vec![placer]); mk_stmt_let_mut(self, place_ident, call) }; // let p_ptr = Place::pointer(&mut place); let (s3, p_ptr_binding) = { - let agent = self.expr_ident(e.span, place_ident, None, place_binding); - let args = hir_vec![self.expr_mut_addr_of(e.span, agent, None)]; + let agent = self.expr_ident(e.span, place_ident, place_binding); + let args = hir_vec![self.expr_mut_addr_of(e.span, agent)]; let call = make_call(self, &place_pointer, args); mk_stmt_let(self, p_ptr_ident, call) }; @@ -1005,11 +1006,12 @@ impl<'a> LoweringContext<'a> { value_expr, e.span, hir::PopUnstableBlock, - None); + ThinVec::new()); self.signal_block_expr(hir_vec![], value_expr, e.span, - hir::PopUnsafeBlock(hir::CompilerGenerated), None) + hir::PopUnsafeBlock(hir::CompilerGenerated), + ThinVec::new()) }; // push_unsafe!({ @@ -1017,19 +1019,20 @@ impl<'a> LoweringContext<'a> { // InPlace::finalize(place) // }) let expr = { - let ptr = self.expr_ident(e.span, p_ptr_ident, None, p_ptr_binding); + let ptr = self.expr_ident(e.span, p_ptr_ident, p_ptr_binding); let call_move_val_init = hir::StmtSemi( make_call(self, &move_val_init, hir_vec![ptr, pop_unsafe_expr]), self.next_id()); let call_move_val_init = respan(e.span, call_move_val_init); - let place = self.expr_ident(e.span, place_ident, None, place_binding); + let place = self.expr_ident(e.span, place_ident, place_binding); let call = make_call(self, &inplace_finalize, hir_vec![place]); self.signal_block_expr(hir_vec![call_move_val_init], call, e.span, - hir::PushUnsafeBlock(hir::CompilerGenerated), None) + hir::PushUnsafeBlock(hir::CompilerGenerated), + ThinVec::new()) }; return self.signal_block_expr(hir_vec![s1, s2, s3], @@ -1101,7 +1104,7 @@ impl<'a> LoweringContext<'a> { rules: hir::DefaultBlock, span: span, }); - self.expr_block(blk, None) + self.expr_block(blk, ThinVec::new()) } _ => self.lower_expr(els), } @@ -1168,7 +1171,7 @@ impl<'a> LoweringContext<'a> { expr, e.span, hir::PopUnstableBlock, - None); + ThinVec::new()); this.field(token::intern(s), signal_block, ast_expr.span) }).collect(); let attrs = ast_expr.attrs.clone(); @@ -1180,7 +1183,7 @@ impl<'a> LoweringContext<'a> { hir_expr, ast_expr.span, hir::PushUnstableBlock, - None) + ThinVec::new()) } use syntax::ast::RangeLimits::*; @@ -1223,7 +1226,7 @@ impl<'a> LoweringContext<'a> { hir::ExprPath(hir_qself, self.lower_path(path)) } ExprKind::Break(opt_ident) => hir::ExprBreak(self.lower_opt_sp_ident(opt_ident)), - ExprKind::Again(opt_ident) => hir::ExprAgain(self.lower_opt_sp_ident(opt_ident)), + ExprKind::Continue(opt_ident) => hir::ExprAgain(self.lower_opt_sp_ident(opt_ident)), ExprKind::Ret(ref e) => hir::ExprRet(e.as_ref().map(|x| self.lower_expr(x))), ExprKind::InlineAsm(InlineAsm { ref inputs, @@ -1267,9 +1270,9 @@ impl<'a> LoweringContext<'a> { ex.span = e.span; } // merge attributes into the inner expression. - ex.attrs.update(|attrs| { - attrs.prepend(e.attrs.clone()) - }); + let mut attrs = e.attrs.clone(); + attrs.extend::>(ex.attrs.into()); + ex.attrs = attrs; ex }); } @@ -1288,7 +1291,7 @@ impl<'a> LoweringContext<'a> { // ` => ` let pat_arm = { let body = self.lower_block(body); - let body_expr = self.expr_block(body, None); + let body_expr = self.expr_block(body, ThinVec::new()); let pat = self.lower_pat(pat); self.arm(hir_vec![pat], body_expr) }; @@ -1308,7 +1311,7 @@ impl<'a> LoweringContext<'a> { attrs: hir_vec![], pats: hir_vec![pat_under], guard: Some(cond), - body: self.expr_block(then, None), + body: self.expr_block(then, ThinVec::new()), }); else_opt.map(|else_opt| (else_opt, true)) } @@ -1339,7 +1342,7 @@ impl<'a> LoweringContext<'a> { let else_arm = { let pat_under = self.pat_wild(e.span); let else_expr = - else_opt.unwrap_or_else(|| self.expr_tuple(e.span, hir_vec![], None)); + else_opt.unwrap_or_else(|| self.expr_tuple(e.span, hir_vec![])); self.arm(hir_vec![pat_under], else_expr) }; @@ -1374,7 +1377,7 @@ impl<'a> LoweringContext<'a> { // ` => ` let pat_arm = { let body = self.lower_block(body); - let body_expr = self.expr_block(body, None); + let body_expr = self.expr_block(body, ThinVec::new()); let pat = self.lower_pat(pat); self.arm(hir_vec![pat], body_expr) }; @@ -1382,7 +1385,7 @@ impl<'a> LoweringContext<'a> { // `_ => break` let break_arm = { let pat_under = self.pat_wild(e.span); - let break_expr = self.expr_break(e.span, None); + let break_expr = self.expr_break(e.span, ThinVec::new()); self.arm(hir_vec![pat_under], break_expr) }; @@ -1393,7 +1396,7 @@ impl<'a> LoweringContext<'a> { hir::ExprMatch(sub_expr, arms, hir::MatchSource::WhileLetDesugar), - None); + ThinVec::new()); // `[opt_ident]: loop { ... }` let loop_block = self.block_expr(match_expr); @@ -1435,7 +1438,7 @@ impl<'a> LoweringContext<'a> { id: self.next_id(), node: hir::ExprBlock(body_block), span: body_span, - attrs: None, + attrs: ThinVec::new(), }); let pat = self.lower_pat(pat); let some_pat = self.pat_some(e.span, pat); @@ -1445,7 +1448,7 @@ impl<'a> LoweringContext<'a> { // `::std::option::Option::None => break` let break_arm = { - let break_expr = self.expr_break(e.span, None); + let break_expr = self.expr_break(e.span, ThinVec::new()); let pat = self.pat_none(e.span); self.arm(hir_vec![pat], break_expr) }; @@ -1461,25 +1464,26 @@ impl<'a> LoweringContext<'a> { self.path_global(e.span, strs) }; - let iter = self.expr_ident(e.span, iter, None, iter_pat.id); - let ref_mut_iter = self.expr_mut_addr_of(e.span, iter, None); - let next_path = self.expr_path(next_path, None); - let next_expr = self.expr_call(e.span, - next_path, - hir_vec![ref_mut_iter], - None); + let iter = self.expr_ident(e.span, iter, iter_pat.id); + let ref_mut_iter = self.expr_mut_addr_of(e.span, iter); + let next_path = self.expr_path(next_path, ThinVec::new()); + let next_expr = self.expr_call(e.span, next_path, hir_vec![ref_mut_iter]); let arms = hir_vec![pat_arm, break_arm]; self.expr(e.span, hir::ExprMatch(next_expr, arms, hir::MatchSource::ForLoopDesugar), - None) + ThinVec::new()) }; // `[opt_ident]: loop { ... }` let loop_block = self.block_expr(match_expr); let loop_expr = hir::ExprLoop(loop_block, self.lower_opt_sp_ident(opt_ident)); - let loop_expr = - P(hir::Expr { id: e.id, node: loop_expr, span: e.span, attrs: None }); + let loop_expr = P(hir::Expr { + id: e.id, + node: loop_expr, + span: e.span, + attrs: ThinVec::new(), + }); // `mut iter => { ... }` let iter_arm = self.arm(hir_vec![iter_pat], loop_expr); @@ -1492,23 +1496,22 @@ impl<'a> LoweringContext<'a> { self.path_global(e.span, strs) }; - let into_iter = self.expr_path(into_iter_path, None); - self.expr_call(e.span, into_iter, hir_vec![head], None) + let into_iter = self.expr_path(into_iter_path, ThinVec::new()); + self.expr_call(e.span, into_iter, hir_vec![head]) }; let match_expr = self.expr_match(e.span, into_iter_expr, hir_vec![iter_arm], - hir::MatchSource::ForLoopDesugar, - None); + hir::MatchSource::ForLoopDesugar); // `{ let _result = ...; _result }` // underscore prevents an unused_variables lint if the head diverges let result_ident = self.str_to_ident("_result"); let (let_stmt, let_stmt_binding) = - self.stmt_let(e.span, false, result_ident, match_expr, None); + self.stmt_let(e.span, false, result_ident, match_expr); - let result = self.expr_ident(e.span, result_ident, None, let_stmt_binding); + let result = self.expr_ident(e.span, result_ident, let_stmt_binding); let block = self.block_all(e.span, hir_vec![let_stmt], Some(result)); // add the attributes to the outer returned expr node return self.expr_block(block, e.attrs.clone()); @@ -1535,7 +1538,7 @@ impl<'a> LoweringContext<'a> { let ok_arm = { let val_ident = self.str_to_ident("val"); let val_pat = self.pat_ident(e.span, val_ident); - let val_expr = self.expr_ident(e.span, val_ident, None, val_pat.id); + let val_expr = self.expr_ident(e.span, val_ident, val_pat.id); let ok_pat = self.pat_ok(e.span, val_pat); self.arm(hir_vec![ok_pat], val_expr) @@ -1548,26 +1551,26 @@ impl<'a> LoweringContext<'a> { let from_expr = { let path = self.std_path(&["convert", "From", "from"]); let path = self.path_global(e.span, path); - let from = self.expr_path(path, None); - let err_expr = self.expr_ident(e.span, err_ident, None, err_local.id); + let from = self.expr_path(path, ThinVec::new()); + let err_expr = self.expr_ident(e.span, err_ident, err_local.id); - self.expr_call(e.span, from, hir_vec![err_expr], None) + self.expr_call(e.span, from, hir_vec![err_expr]) }; let err_expr = { let path = self.std_path(&["result", "Result", "Err"]); let path = self.path_global(e.span, path); - let err_ctor = self.expr_path(path, None); - self.expr_call(e.span, err_ctor, hir_vec![from_expr], None) + let err_ctor = self.expr_path(path, ThinVec::new()); + self.expr_call(e.span, err_ctor, hir_vec![from_expr]) }; let err_pat = self.pat_err(e.span, err_local); let ret_expr = self.expr(e.span, - hir::Expr_::ExprRet(Some(err_expr)), None); - + hir::Expr_::ExprRet(Some(err_expr)), + ThinVec::new()); self.arm(hir_vec![err_pat], ret_expr) }; return self.expr_match(e.span, sub_expr, hir_vec![err_arm, ok_arm], - hir::MatchSource::TryDesugar, None); + hir::MatchSource::TryDesugar); } ExprKind::Mac(_) => panic!("Shouldn't exist here"), @@ -1579,21 +1582,29 @@ impl<'a> LoweringContext<'a> { fn lower_stmt(&mut self, s: &Stmt) -> hir::Stmt { match s.node { - StmtKind::Decl(ref d, id) => { - Spanned { - node: hir::StmtDecl(self.lower_decl(d), id), + StmtKind::Local(ref l) => Spanned { + node: hir::StmtDecl(P(Spanned { + node: hir::DeclLocal(self.lower_local(l)), span: s.span, - } - } - StmtKind::Expr(ref e, id) => { + }), s.id), + span: s.span, + }, + StmtKind::Item(ref it) => Spanned { + node: hir::StmtDecl(P(Spanned { + node: hir::DeclItem(self.lower_item_id(it)), + span: s.span, + }), s.id), + span: s.span, + }, + StmtKind::Expr(ref e) => { Spanned { - node: hir::StmtExpr(self.lower_expr(e), id), + node: hir::StmtExpr(self.lower_expr(e), s.id), span: s.span, } } - StmtKind::Semi(ref e, id) => { + StmtKind::Semi(ref e) => { Spanned { - node: hir::StmtSemi(self.lower_expr(e), id), + node: hir::StmtSemi(self.lower_expr(e), s.id), span: s.span, } } @@ -1682,23 +1693,18 @@ impl<'a> LoweringContext<'a> { } } - fn expr_break(&mut self, span: Span, attrs: ThinAttributes) -> P { + fn expr_break(&mut self, span: Span, attrs: ThinVec) -> P { self.expr(span, hir::ExprBreak(None), attrs) } - fn expr_call(&mut self, - span: Span, - e: P, - args: hir::HirVec>, - attrs: ThinAttributes) + fn expr_call(&mut self, span: Span, e: P, args: hir::HirVec>) -> P { - self.expr(span, hir::ExprCall(e, args), attrs) + self.expr(span, hir::ExprCall(e, args), ThinVec::new()) } - fn expr_ident(&mut self, span: Span, id: Name, attrs: ThinAttributes, binding: NodeId) - -> P { + fn expr_ident(&mut self, span: Span, id: Name, binding: NodeId) -> P { let expr_path = hir::ExprPath(None, self.path_ident(span, id)); - let expr = self.expr(span, expr_path, attrs); + let expr = self.expr(span, expr_path, ThinVec::new()); let def = self.resolver.definitions().map(|defs| { Def::Local(defs.local_def_id(binding), binding) @@ -1708,12 +1714,11 @@ impl<'a> LoweringContext<'a> { expr } - fn expr_mut_addr_of(&mut self, span: Span, e: P, attrs: ThinAttributes) - -> P { - self.expr(span, hir::ExprAddrOf(hir::MutMutable, e), attrs) + fn expr_mut_addr_of(&mut self, span: Span, e: P) -> P { + self.expr(span, hir::ExprAddrOf(hir::MutMutable, e), ThinVec::new()) } - fn expr_path(&mut self, path: hir::Path, attrs: ThinAttributes) -> P { + fn expr_path(&mut self, path: hir::Path, attrs: ThinVec) -> P { let def = self.resolver.resolve_generated_global_path(&path, true); let expr = self.expr(path.span, hir::ExprPath(None, path), attrs); self.resolver.record_resolution(expr.id, def); @@ -1724,19 +1729,17 @@ impl<'a> LoweringContext<'a> { span: Span, arg: P, arms: hir::HirVec, - source: hir::MatchSource, - attrs: ThinAttributes) + source: hir::MatchSource) -> P { - self.expr(span, hir::ExprMatch(arg, arms, source), attrs) + self.expr(span, hir::ExprMatch(arg, arms, source), ThinVec::new()) } - fn expr_block(&mut self, b: P, attrs: ThinAttributes) -> P { + fn expr_block(&mut self, b: P, attrs: ThinVec) -> P { self.expr(b.span, hir::ExprBlock(b), attrs) } - fn expr_tuple(&mut self, sp: Span, exprs: hir::HirVec>, attrs: ThinAttributes) - -> P { - self.expr(sp, hir::ExprTup(exprs), attrs) + fn expr_tuple(&mut self, sp: Span, exprs: hir::HirVec>) -> P { + self.expr(sp, hir::ExprTup(exprs), ThinVec::new()) } fn expr_struct(&mut self, @@ -1744,14 +1747,14 @@ impl<'a> LoweringContext<'a> { path: hir::Path, fields: hir::HirVec, e: Option>, - attrs: ThinAttributes) -> P { + attrs: ThinVec) -> P { let def = self.resolver.resolve_generated_global_path(&path, false); let expr = self.expr(sp, hir::ExprStruct(path, fields, e), attrs); self.resolver.record_resolution(expr.id, def); expr } - fn expr(&mut self, span: Span, node: hir::Expr_, attrs: ThinAttributes) -> P { + fn expr(&mut self, span: Span, node: hir::Expr_, attrs: ThinVec) -> P { P(hir::Expr { id: self.next_id(), node: node, @@ -1760,12 +1763,7 @@ impl<'a> LoweringContext<'a> { }) } - fn stmt_let(&mut self, - sp: Span, - mutbl: bool, - ident: Name, - ex: P, - attrs: ThinAttributes) + fn stmt_let(&mut self, sp: Span, mutbl: bool, ident: Name, ex: P) -> (hir::Stmt, NodeId) { let pat = if mutbl { self.pat_ident_binding_mode(sp, ident, hir::BindByValue(hir::MutMutable)) @@ -1779,7 +1777,7 @@ impl<'a> LoweringContext<'a> { init: Some(ex), id: self.next_id(), span: sp, - attrs: attrs, + attrs: ThinVec::new(), }); let decl = respan(sp, hir::DeclLocal(local)); (respan(sp, hir::StmtDecl(P(decl), self.next_id())), pat_id) @@ -1939,7 +1937,7 @@ impl<'a> LoweringContext<'a> { expr: P, span: Span, rule: hir::BlockCheckMode, - attrs: ThinAttributes) + attrs: ThinVec) -> P { let id = self.next_id(); let block = P(hir::Block { diff --git a/src/librustc/hir/map/blocks.rs b/src/librustc/hir/map/blocks.rs index bac96c68e4cf8..50e8c6e7ab842 100644 --- a/src/librustc/hir/map/blocks.rs +++ b/src/librustc/hir/map/blocks.rs @@ -27,9 +27,8 @@ use hir::map::{self, Node}; use syntax::abi; use hir::{Block, FnDecl}; use syntax::ast::{Attribute, Name, NodeId}; -use syntax::attr::ThinAttributesExt; use hir as ast; -use syntax::codemap::Span; +use syntax_pos::Span; use hir::intravisit::FnKind; /// An FnLikeNode is a Node that is like a fn, in that it has a decl @@ -257,11 +256,7 @@ impl<'a> FnLikeNode<'a> { } map::NodeExpr(e) => match e.node { ast::ExprClosure(_, ref decl, ref block, _fn_decl_span) => - closure(ClosureParts::new(&decl, - &block, - e.id, - e.span, - e.attrs.as_attr_slice())), + closure(ClosureParts::new(&decl, &block, e.id, e.span, &e.attrs)), _ => bug!("expr FnLikeNode that is not fn-like"), }, _ => bug!("other FnLikeNode that is not fn-like"), diff --git a/src/librustc/hir/map/collector.rs b/src/librustc/hir/map/collector.rs index 692f56bde28f5..693d7a2edfca5 100644 --- a/src/librustc/hir/map/collector.rs +++ b/src/librustc/hir/map/collector.rs @@ -17,7 +17,7 @@ use hir::def_id::DefId; use middle::cstore::InlinedItem; use std::iter::repeat; use syntax::ast::{NodeId, CRATE_NODE_ID}; -use syntax::codemap::Span; +use syntax_pos::Span; /// A Visitor that walks over the HIR and collects Nodes into a HIR map pub struct NodeCollector<'ast> { diff --git a/src/librustc/hir/map/def_collector.rs b/src/librustc/hir/map/def_collector.rs index e3b6539b8ccab..ccb3e154d9204 100644 --- a/src/librustc/hir/map/def_collector.rs +++ b/src/librustc/hir/map/def_collector.rs @@ -98,7 +98,7 @@ impl<'ast> DefCollector<'ast> { self.parent_def = parent; } - fn visit_ast_const_integer(&mut self, expr: &'ast Expr) { + fn visit_ast_const_integer(&mut self, expr: &Expr) { // Find the node which will be used after lowering. if let ExprKind::Paren(ref inner) = expr.node { return self.visit_ast_const_integer(inner); @@ -124,8 +124,8 @@ impl<'ast> DefCollector<'ast> { } } -impl<'ast> visit::Visitor<'ast> for DefCollector<'ast> { - fn visit_item(&mut self, i: &'ast Item) { +impl<'ast> visit::Visitor for DefCollector<'ast> { + fn visit_item(&mut self, i: &Item) { debug!("visit_item: {:?}", i); // Pick the def data. This need not be unique, but the more @@ -183,7 +183,7 @@ impl<'ast> visit::Visitor<'ast> for DefCollector<'ast> { }); } - fn visit_foreign_item(&mut self, foreign_item: &'ast ForeignItem) { + fn visit_foreign_item(&mut self, foreign_item: &ForeignItem) { let def = self.create_def(foreign_item.id, DefPathData::ValueNs(foreign_item.ident.name)); self.with_parent(def, |this| { @@ -191,7 +191,7 @@ impl<'ast> visit::Visitor<'ast> for DefCollector<'ast> { }); } - fn visit_generics(&mut self, generics: &'ast Generics) { + fn visit_generics(&mut self, generics: &Generics) { for ty_param in generics.ty_params.iter() { self.create_def(ty_param.id, DefPathData::TypeParam(ty_param.ident.name)); } @@ -199,11 +199,12 @@ impl<'ast> visit::Visitor<'ast> for DefCollector<'ast> { visit::walk_generics(self, generics); } - fn visit_trait_item(&mut self, ti: &'ast TraitItem) { + fn visit_trait_item(&mut self, ti: &TraitItem) { let def_data = match ti.node { TraitItemKind::Method(..) | TraitItemKind::Const(..) => DefPathData::ValueNs(ti.ident.name), TraitItemKind::Type(..) => DefPathData::TypeNs(ti.ident.name), + TraitItemKind::Macro(..) => DefPathData::MacroDef(ti.ident.name), }; let def = self.create_def(ti.id, def_data); @@ -216,7 +217,7 @@ impl<'ast> visit::Visitor<'ast> for DefCollector<'ast> { }); } - fn visit_impl_item(&mut self, ii: &'ast ImplItem) { + fn visit_impl_item(&mut self, ii: &ImplItem) { let def_data = match ii.node { ImplItemKind::Method(..) | ImplItemKind::Const(..) => DefPathData::ValueNs(ii.ident.name), @@ -234,7 +235,7 @@ impl<'ast> visit::Visitor<'ast> for DefCollector<'ast> { }); } - fn visit_pat(&mut self, pat: &'ast Pat) { + fn visit_pat(&mut self, pat: &Pat) { let parent_def = self.parent_def; if let PatKind::Ident(_, id, _) = pat.node { @@ -246,7 +247,7 @@ impl<'ast> visit::Visitor<'ast> for DefCollector<'ast> { self.parent_def = parent_def; } - fn visit_expr(&mut self, expr: &'ast Expr) { + fn visit_expr(&mut self, expr: &Expr) { let parent_def = self.parent_def; if let ExprKind::Repeat(_, ref count) = expr.node { @@ -262,18 +263,18 @@ impl<'ast> visit::Visitor<'ast> for DefCollector<'ast> { self.parent_def = parent_def; } - fn visit_ty(&mut self, ty: &'ast Ty) { + fn visit_ty(&mut self, ty: &Ty) { if let TyKind::FixedLengthVec(_, ref length) = ty.node { self.visit_ast_const_integer(length); } visit::walk_ty(self, ty); } - fn visit_lifetime_def(&mut self, def: &'ast LifetimeDef) { + fn visit_lifetime_def(&mut self, def: &LifetimeDef) { self.create_def(def.lifetime.id, DefPathData::LifetimeDef(def.lifetime.name)); } - fn visit_macro_def(&mut self, macro_def: &'ast MacroDef) { + fn visit_macro_def(&mut self, macro_def: &MacroDef) { self.create_def(macro_def.id, DefPathData::MacroDef(macro_def.ident.name)); } } diff --git a/src/librustc/hir/map/mod.rs b/src/librustc/hir/map/mod.rs index 41b72e569f475..f9fb8ac66b7ef 100644 --- a/src/librustc/hir/map/mod.rs +++ b/src/librustc/hir/map/mod.rs @@ -23,9 +23,9 @@ use hir::def_id::{CRATE_DEF_INDEX, DefId, DefIndex}; use syntax::abi::Abi; use syntax::ast::{self, Name, NodeId, DUMMY_NODE_ID, }; -use syntax::attr::ThinAttributesExt; -use syntax::codemap::{Span, Spanned}; +use syntax::codemap::Spanned; use syntax::visit; +use syntax_pos::Span; use hir::*; use hir::fold::Folder; @@ -577,7 +577,7 @@ impl<'ast> Map<'ast> { Some(NodeTraitItem(ref ti)) => Some(&ti.attrs[..]), Some(NodeImplItem(ref ii)) => Some(&ii.attrs[..]), Some(NodeVariant(ref v)) => Some(&v.node.attrs[..]), - Some(NodeExpr(ref e)) => Some(e.attrs.as_attr_slice()), + Some(NodeExpr(ref e)) => Some(&*e.attrs), Some(NodeStmt(ref s)) => Some(s.node.attrs()), // unit/tuple structs take the attributes straight from // the struct definition. diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index 8faa1cc1174e8..a139dd152f006 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -36,13 +36,15 @@ use hir::def::Def; use hir::def_id::DefId; use util::nodemap::{NodeMap, FnvHashSet}; -use syntax::codemap::{self, mk_sp, respan, Span, Spanned, ExpnId}; +use syntax_pos::{mk_sp, Span, ExpnId}; +use syntax::codemap::{self, respan, Spanned}; use syntax::abi::Abi; -use syntax::ast::{Name, NodeId, DUMMY_NODE_ID, TokenTree, AsmDialect}; +use syntax::ast::{Name, NodeId, DUMMY_NODE_ID, AsmDialect}; use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem}; -use syntax::attr::{ThinAttributes, ThinAttributesExt}; use syntax::parse::token::{keywords, InternedString}; use syntax::ptr::P; +use syntax::tokenstream::TokenTree; +use syntax::util::ThinVec; use std::collections::BTreeMap; use std::fmt; @@ -732,7 +734,7 @@ impl Stmt_ { match *self { StmtDecl(ref d, _) => d.node.attrs(), StmtExpr(ref e, _) | - StmtSemi(ref e, _) => e.attrs.as_attr_slice(), + StmtSemi(ref e, _) => &e.attrs, } } @@ -756,7 +758,7 @@ pub struct Local { pub init: Option>, pub id: NodeId, pub span: Span, - pub attrs: ThinAttributes, + pub attrs: ThinVec, } pub type Decl = Spanned; @@ -772,7 +774,7 @@ pub enum Decl_ { impl Decl_ { pub fn attrs(&self) -> &[Attribute] { match *self { - DeclLocal(ref l) => l.attrs.as_attr_slice(), + DeclLocal(ref l) => &l.attrs, DeclItem(_) => &[] } } @@ -817,7 +819,7 @@ pub struct Expr { pub id: NodeId, pub node: Expr_, pub span: Span, - pub attrs: ThinAttributes, + pub attrs: ThinVec, } impl fmt::Debug for Expr { diff --git a/src/librustc/hir/pat_util.rs b/src/librustc/hir/pat_util.rs index 6405be7455dfe..3bb9b6d260255 100644 --- a/src/librustc/hir/pat_util.rs +++ b/src/librustc/hir/pat_util.rs @@ -14,7 +14,8 @@ use hir::{self, PatKind}; use ty::TyCtxt; use util::nodemap::FnvHashMap; use syntax::ast; -use syntax::codemap::{Span, Spanned, DUMMY_SP}; +use syntax::codemap::Spanned; +use syntax_pos::{Span, DUMMY_SP}; use std::iter::{Enumerate, ExactSizeIterator}; diff --git a/src/librustc/hir/print.rs b/src/librustc/hir/print.rs index 6340f9e74724a..598a2cfca1320 100644 --- a/src/librustc/hir/print.rs +++ b/src/librustc/hir/print.rs @@ -12,8 +12,7 @@ pub use self::AnnNode::*; use syntax::abi::Abi; use syntax::ast; -use syntax::codemap::{self, CodeMap, BytePos, Spanned}; -use syntax::errors; +use syntax::codemap::{CodeMap, Spanned}; use syntax::parse::token::{self, keywords, BinOpToken}; use syntax::parse::lexer::comments; use syntax::print::pp::{self, break_offset, word, space, hardbreak}; @@ -21,6 +20,8 @@ use syntax::print::pp::{Breaks, eof}; use syntax::print::pp::Breaks::{Consistent, Inconsistent}; use syntax::print::pprust::{self as ast_pp, PrintState}; use syntax::ptr::P; +use syntax_pos::{self, BytePos}; +use errors; use hir; use hir::{Crate, PatKind, RegionTyParamBound, SelfKind, TraitTyParamBound, TraitBoundModifier}; @@ -368,11 +369,11 @@ impl<'a> State<'a> { self.end() // close the head-box } - pub fn bclose_(&mut self, span: codemap::Span, indented: usize) -> io::Result<()> { + pub fn bclose_(&mut self, span: syntax_pos::Span, indented: usize) -> io::Result<()> { self.bclose_maybe_open(span, indented, true) } pub fn bclose_maybe_open(&mut self, - span: codemap::Span, + span: syntax_pos::Span, indented: usize, close_box: bool) -> io::Result<()> { @@ -384,7 +385,7 @@ impl<'a> State<'a> { } Ok(()) } - pub fn bclose(&mut self, span: codemap::Span) -> io::Result<()> { + pub fn bclose(&mut self, span: syntax_pos::Span) -> io::Result<()> { self.bclose_(span, indent_unit) } @@ -432,7 +433,7 @@ impl<'a> State<'a> { mut get_span: G) -> io::Result<()> where F: FnMut(&mut State, &T) -> io::Result<()>, - G: FnMut(&T) -> codemap::Span + G: FnMut(&T) -> syntax_pos::Span { self.rbox(0, b)?; let len = elts.len(); @@ -859,7 +860,7 @@ impl<'a> State<'a> { enum_definition: &hir::EnumDef, generics: &hir::Generics, name: ast::Name, - span: codemap::Span, + span: syntax_pos::Span, visibility: &hir::Visibility) -> io::Result<()> { self.head(&visibility_qualified(visibility, "enum"))?; @@ -872,7 +873,7 @@ impl<'a> State<'a> { pub fn print_variants(&mut self, variants: &[hir::Variant], - span: codemap::Span) + span: syntax_pos::Span) -> io::Result<()> { self.bopen()?; for v in variants { @@ -902,7 +903,7 @@ impl<'a> State<'a> { struct_def: &hir::VariantData, generics: &hir::Generics, name: ast::Name, - span: codemap::Span, + span: syntax_pos::Span, print_finalizer: bool) -> io::Result<()> { self.print_name(name)?; @@ -2237,7 +2238,7 @@ impl<'a> State<'a> { } pub fn maybe_print_trailing_comment(&mut self, - span: codemap::Span, + span: syntax_pos::Span, next_pos: Option) -> io::Result<()> { let cm = match self.cm { diff --git a/src/librustc/infer/combine.rs b/src/librustc/infer/combine.rs index fa3715b6891a0..c9235d063cba0 100644 --- a/src/librustc/infer/combine.rs +++ b/src/librustc/infer/combine.rs @@ -49,7 +49,7 @@ use ty::relate::{RelateResult, TypeRelation}; use traits::PredicateObligations; use syntax::ast; -use syntax::codemap::Span; +use syntax_pos::Span; #[derive(Clone)] pub struct CombineFields<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { diff --git a/src/librustc/infer/error_reporting.rs b/src/librustc/infer/error_reporting.rs index 86bc4355b2d3c..894044296cbd6 100644 --- a/src/librustc/infer/error_reporting.rs +++ b/src/librustc/infer/error_reporting.rs @@ -91,10 +91,10 @@ use std::cell::{Cell, RefCell}; use std::char::from_u32; use std::fmt; use syntax::ast; -use syntax::errors::{DiagnosticBuilder, check_old_skool}; -use syntax::codemap::{self, Pos, Span}; use syntax::parse::token; use syntax::ptr::P; +use syntax_pos::{self, Pos, Span}; +use errors::{DiagnosticBuilder, check_old_skool}; impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { pub fn note_and_explain_region(self, @@ -1933,6 +1933,6 @@ impl LifeGiver { fn name_to_dummy_lifetime(name: ast::Name) -> hir::Lifetime { hir::Lifetime { id: ast::DUMMY_NODE_ID, - span: codemap::DUMMY_SP, + span: syntax_pos::DUMMY_SP, name: name } } diff --git a/src/librustc/infer/higher_ranked/mod.rs b/src/librustc/infer/higher_ranked/mod.rs index 84b72d9be60a1..03a09917c5343 100644 --- a/src/librustc/infer/higher_ranked/mod.rs +++ b/src/librustc/infer/higher_ranked/mod.rs @@ -23,7 +23,7 @@ use super::region_inference::{TaintDirections}; use ty::{self, TyCtxt, Binder, TypeFoldable}; use ty::error::TypeError; use ty::relate::{Relate, RelateResult, TypeRelation}; -use syntax::codemap::Span; +use syntax_pos::Span; use util::nodemap::{FnvHashMap, FnvHashSet}; pub struct HrMatchResult { diff --git a/src/librustc/infer/mod.rs b/src/librustc/infer/mod.rs index 620e6504f413f..2ea2978b2940d 100644 --- a/src/librustc/infer/mod.rs +++ b/src/librustc/infer/mod.rs @@ -39,9 +39,8 @@ use rustc_data_structures::unify::{self, UnificationTable}; use std::cell::{Cell, RefCell, Ref, RefMut}; use std::fmt; use syntax::ast; -use syntax::codemap; -use syntax::codemap::{Span, DUMMY_SP}; -use syntax::errors::DiagnosticBuilder; +use errors::DiagnosticBuilder; +use syntax_pos::{self, Span, DUMMY_SP}; use util::nodemap::{FnvHashMap, FnvHashSet, NodeMap}; use self::combine::CombineFields; @@ -1036,7 +1035,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { -> UnitResult<'tcx> { self.probe(|_| { - let origin = TypeOrigin::Misc(codemap::DUMMY_SP); + let origin = TypeOrigin::Misc(syntax_pos::DUMMY_SP); let trace = TypeTrace::types(origin, true, a, b); self.sub(true, trace, &a, &b).map(|_| ()) }) @@ -1813,7 +1812,7 @@ impl<'a, 'gcx, 'tcx> TypeTrace<'tcx> { pub fn dummy(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> TypeTrace<'tcx> { TypeTrace { - origin: TypeOrigin::Misc(codemap::DUMMY_SP), + origin: TypeOrigin::Misc(syntax_pos::DUMMY_SP), values: Types(ExpectedFound { expected: tcx.types.err, found: tcx.types.err, @@ -1887,7 +1886,7 @@ impl RegionVariableOrigin { Coercion(a) => a, EarlyBoundRegion(a, _) => a, LateBoundRegion(a, _, _) => a, - BoundRegionInCoherence(_) => codemap::DUMMY_SP, + BoundRegionInCoherence(_) => syntax_pos::DUMMY_SP, UpvarRegion(_, a) => a } } diff --git a/src/librustc/infer/type_variable.rs b/src/librustc/infer/type_variable.rs index badbeccee83cf..09ae16540c4f1 100644 --- a/src/librustc/infer/type_variable.rs +++ b/src/librustc/infer/type_variable.rs @@ -13,7 +13,7 @@ use self::TypeVariableValue::*; use self::UndoEntry::*; use hir::def_id::{DefId}; use ty::{self, Ty}; -use syntax::codemap::Span; +use syntax_pos::Span; use std::cmp::min; use std::marker::PhantomData; diff --git a/src/librustc/lib.rs b/src/librustc/lib.rs index fc2d68d7262e6..eb9dd7dfcb36f 100644 --- a/src/librustc/lib.rs +++ b/src/librustc/lib.rs @@ -55,8 +55,10 @@ extern crate rustc_data_structures; extern crate serialize; extern crate collections; extern crate rustc_const_math; +extern crate rustc_errors as errors; #[macro_use] extern crate log; #[macro_use] extern crate syntax; +#[macro_use] extern crate syntax_pos; #[macro_use] #[no_link] extern crate rustc_bitflags; extern crate serialize as rustc_serialize; // used by deriving diff --git a/src/librustc/lint/context.rs b/src/librustc/lint/context.rs index 94f17ea779ac8..3e101e1934f29 100644 --- a/src/librustc/lint/context.rs +++ b/src/librustc/lint/context.rs @@ -40,11 +40,10 @@ use std::cmp; use std::default::Default as StdDefault; use std::mem; use syntax::attr::{self, AttrMetaMethods}; -use syntax::codemap::Span; -use syntax::errors::DiagnosticBuilder; use syntax::parse::token::InternedString; use syntax::ast; -use syntax::attr::ThinAttributesExt; +use syntax_pos::Span; +use errors::DiagnosticBuilder; use hir; use hir::intravisit as hir_visit; use hir::intravisit::{IdVisitor, IdVisitingOperation}; @@ -767,7 +766,7 @@ impl<'a, 'tcx, 'v> hir_visit::Visitor<'v> for LateContext<'a, 'tcx> { } fn visit_expr(&mut self, e: &hir::Expr) { - self.with_lint_attrs(e.attrs.as_attr_slice(), |cx| { + self.with_lint_attrs(&e.attrs, |cx| { run_lints!(cx, check_expr, late_passes, e); hir_visit::walk_expr(cx, e); }) @@ -832,7 +831,7 @@ impl<'a, 'tcx, 'v> hir_visit::Visitor<'v> for LateContext<'a, 'tcx> { } fn visit_local(&mut self, l: &hir::Local) { - self.with_lint_attrs(l.attrs.as_attr_slice(), |cx| { + self.with_lint_attrs(&l.attrs, |cx| { run_lints!(cx, check_local, late_passes, l); hir_visit::walk_local(cx, l); }) @@ -905,7 +904,7 @@ impl<'a, 'tcx, 'v> hir_visit::Visitor<'v> for LateContext<'a, 'tcx> { } } -impl<'a, 'v> ast_visit::Visitor<'v> for EarlyContext<'a> { +impl<'a> ast_visit::Visitor for EarlyContext<'a> { fn visit_item(&mut self, it: &ast::Item) { self.with_lint_attrs(&it.attrs, |cx| { run_lints!(cx, check_item, early_passes, it); @@ -928,7 +927,7 @@ impl<'a, 'v> ast_visit::Visitor<'v> for EarlyContext<'a> { } fn visit_expr(&mut self, e: &ast::Expr) { - self.with_lint_attrs(e.attrs.as_attr_slice(), |cx| { + self.with_lint_attrs(&e.attrs, |cx| { run_lints!(cx, check_expr, early_passes, e); ast_visit::walk_expr(cx, e); }) @@ -939,8 +938,8 @@ impl<'a, 'v> ast_visit::Visitor<'v> for EarlyContext<'a> { ast_visit::walk_stmt(self, s); } - fn visit_fn(&mut self, fk: ast_visit::FnKind<'v>, decl: &'v ast::FnDecl, - body: &'v ast::Block, span: Span, id: ast::NodeId) { + fn visit_fn(&mut self, fk: ast_visit::FnKind, decl: &ast::FnDecl, + body: &ast::Block, span: Span, id: ast::NodeId) { run_lints!(self, check_fn, early_passes, fk, decl, body, span, id); ast_visit::walk_fn(self, fk, decl, body, span); run_lints!(self, check_fn_post, early_passes, fk, decl, body, span, id); @@ -988,7 +987,7 @@ impl<'a, 'v> ast_visit::Visitor<'v> for EarlyContext<'a> { } fn visit_local(&mut self, l: &ast::Local) { - self.with_lint_attrs(l.attrs.as_attr_slice(), |cx| { + self.with_lint_attrs(&l.attrs, |cx| { run_lints!(cx, check_local, early_passes, l); ast_visit::walk_local(cx, l); }) @@ -1005,11 +1004,6 @@ impl<'a, 'v> ast_visit::Visitor<'v> for EarlyContext<'a> { ast_visit::walk_arm(self, a); } - fn visit_decl(&mut self, d: &ast::Decl) { - run_lints!(self, check_decl, early_passes, d); - ast_visit::walk_decl(self, d); - } - fn visit_expr_post(&mut self, e: &ast::Expr) { run_lints!(self, check_expr_post, early_passes, e); } diff --git a/src/librustc/lint/mod.rs b/src/librustc/lint/mod.rs index cc7fa54bd0a5e..92aa446c265f9 100644 --- a/src/librustc/lint/mod.rs +++ b/src/librustc/lint/mod.rs @@ -33,7 +33,7 @@ pub use self::LintSource::*; use std::hash; use std::ascii::AsciiExt; -use syntax::codemap::Span; +use syntax_pos::Span; use hir::intravisit::FnKind; use syntax::visit as ast_visit; use syntax::ast; @@ -195,7 +195,6 @@ pub trait EarlyLintPass: LintPass { fn check_stmt(&mut self, _: &EarlyContext, _: &ast::Stmt) { } fn check_arm(&mut self, _: &EarlyContext, _: &ast::Arm) { } fn check_pat(&mut self, _: &EarlyContext, _: &ast::Pat) { } - fn check_decl(&mut self, _: &EarlyContext, _: &ast::Decl) { } fn check_expr(&mut self, _: &EarlyContext, _: &ast::Expr) { } fn check_expr_post(&mut self, _: &EarlyContext, _: &ast::Expr) { } fn check_ty(&mut self, _: &EarlyContext, _: &ast::Ty) { } diff --git a/src/librustc/middle/astconv_util.rs b/src/librustc/middle/astconv_util.rs index f132212415f2f..e856eb84ff2c3 100644 --- a/src/librustc/middle/astconv_util.rs +++ b/src/librustc/middle/astconv_util.rs @@ -17,7 +17,7 @@ use hir::def::Def; use ty::{Ty, TyCtxt}; -use syntax::codemap::Span; +use syntax_pos::Span; use hir as ast; impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { diff --git a/src/librustc/middle/cstore.rs b/src/librustc/middle/cstore.rs index d85ea96146227..fd9463b13c055 100644 --- a/src/librustc/middle/cstore.rs +++ b/src/librustc/middle/cstore.rs @@ -39,9 +39,9 @@ use std::rc::Rc; use std::path::PathBuf; use syntax::ast; use syntax::attr; -use syntax::codemap::Span; use syntax::ptr::P; use syntax::parse::token::InternedString; +use syntax_pos::Span; use rustc_back::target::Target; use hir; use hir::intravisit::{IdVisitor, IdVisitingOperation, Visitor}; diff --git a/src/librustc/middle/dead.rs b/src/librustc/middle/dead.rs index 17da8ddbbc30c..2b59e603cc897 100644 --- a/src/librustc/middle/dead.rs +++ b/src/librustc/middle/dead.rs @@ -26,6 +26,7 @@ use lint; use std::collections::HashSet; use syntax::{ast, codemap}; use syntax::attr; +use syntax_pos; // Any local node that may call something in its body block should be // explored. For example, if it's a live NodeItem that is a @@ -215,7 +216,7 @@ impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> { impl<'a, 'tcx, 'v> Visitor<'v> for MarkSymbolVisitor<'a, 'tcx> { fn visit_variant_data(&mut self, def: &hir::VariantData, _: ast::Name, - _: &hir::Generics, _: ast::NodeId, _: codemap::Span) { + _: &hir::Generics, _: ast::NodeId, _: syntax_pos::Span) { let has_extern_repr = self.struct_has_extern_repr; let inherited_pub_visibility = self.inherited_pub_visibility; let live_fields = def.fields().iter().filter(|f| { @@ -478,7 +479,7 @@ impl<'a, 'tcx> DeadVisitor<'a, 'tcx> { fn warn_dead_code(&mut self, id: ast::NodeId, - span: codemap::Span, + span: syntax_pos::Span, name: ast::Name, node_type: &str) { let name = name.as_str(); diff --git a/src/librustc/middle/effect.rs b/src/librustc/middle/effect.rs index 24816d2b49729..6fe98119c7060 100644 --- a/src/librustc/middle/effect.rs +++ b/src/librustc/middle/effect.rs @@ -18,7 +18,7 @@ use ty::{self, Ty, TyCtxt}; use ty::MethodCall; use syntax::ast; -use syntax::codemap::Span; +use syntax_pos::Span; use hir; use hir::intravisit; use hir::intravisit::{FnKind, Visitor}; diff --git a/src/librustc/middle/entry.rs b/src/librustc/middle/entry.rs index 602889375e970..23a261400ed07 100644 --- a/src/librustc/middle/entry.rs +++ b/src/librustc/middle/entry.rs @@ -15,8 +15,8 @@ use hir::def_id::{CRATE_DEF_INDEX}; use session::{config, Session}; use syntax::ast::NodeId; use syntax::attr; -use syntax::codemap::Span; use syntax::entry::EntryPointType; +use syntax_pos::Span; use hir::{Item, ItemFn}; use hir::intravisit::Visitor; diff --git a/src/librustc/middle/expr_use_visitor.rs b/src/librustc/middle/expr_use_visitor.rs index 3b571ed057602..4e0b76365041c 100644 --- a/src/librustc/middle/expr_use_visitor.rs +++ b/src/librustc/middle/expr_use_visitor.rs @@ -30,7 +30,7 @@ use hir::{self, PatKind}; use syntax::ast; use syntax::ptr::P; -use syntax::codemap::Span; +use syntax_pos::Span; /////////////////////////////////////////////////////////////////////////// // The Delegate trait diff --git a/src/librustc/middle/intrinsicck.rs b/src/librustc/middle/intrinsicck.rs index 70158e9b9dff7..d753381d71e25 100644 --- a/src/librustc/middle/intrinsicck.rs +++ b/src/librustc/middle/intrinsicck.rs @@ -18,7 +18,7 @@ use ty::layout::{LayoutError, Pointer, SizeSkeleton}; use syntax::abi::Abi::RustIntrinsic; use syntax::ast; -use syntax::codemap::Span; +use syntax_pos::Span; use hir::intravisit::{self, Visitor, FnKind}; use hir; diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index ceffa366413fa..cb2f68bb55397 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -123,9 +123,10 @@ use std::io::prelude::*; use std::io; use std::rc::Rc; use syntax::ast::{self, NodeId}; -use syntax::codemap::{BytePos, original_sp, Span}; +use syntax::codemap::original_sp; use syntax::parse::token::keywords; use syntax::ptr::P; +use syntax_pos::{BytePos, Span}; use hir::Expr; use hir; diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs index a345e94ebda0b..a70829347f1c1 100644 --- a/src/librustc/middle/mem_categorization.rs +++ b/src/librustc/middle/mem_categorization.rs @@ -83,7 +83,7 @@ use hir::{MutImmutable, MutMutable, PatKind}; use hir::pat_util::EnumerateAndAdjustIterator; use hir; use syntax::ast; -use syntax::codemap::Span; +use syntax_pos::Span; use std::fmt; use std::rc::Rc; diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs index 3efc584ae2b66..6f0ad087dc589 100644 --- a/src/librustc/middle/region.rs +++ b/src/librustc/middle/region.rs @@ -27,8 +27,9 @@ use std::cell::RefCell; use std::collections::hash_map::Entry; use std::fmt; use std::mem; -use syntax::codemap::{self, Span}; +use syntax::codemap; use syntax::ast::{self, NodeId}; +use syntax_pos::Span; use hir; use hir::intravisit::{self, Visitor, FnKind}; diff --git a/src/librustc/middle/resolve_lifetime.rs b/src/librustc/middle/resolve_lifetime.rs index 78d9f5c9b7c29..2ba05b4ae3212 100644 --- a/src/librustc/middle/resolve_lifetime.rs +++ b/src/librustc/middle/resolve_lifetime.rs @@ -29,8 +29,8 @@ use ty; use std::fmt; use std::mem::replace; use syntax::ast; -use syntax::codemap::Span; use syntax::parse::token::keywords; +use syntax_pos::Span; use util::nodemap::NodeMap; use rustc_data_structures::fnv::FnvHashSet; diff --git a/src/librustc/middle/stability.rs b/src/librustc/middle/stability.rs index 3744f564fa271..36268a9de960f 100644 --- a/src/librustc/middle/stability.rs +++ b/src/librustc/middle/stability.rs @@ -23,7 +23,7 @@ use hir::def_id::{CRATE_DEF_INDEX, DefId}; use ty::{self, TyCtxt}; use middle::privacy::AccessLevels; use syntax::parse::token::InternedString; -use syntax::codemap::{Span, DUMMY_SP}; +use syntax_pos::{Span, DUMMY_SP}; use syntax::ast; use syntax::ast::{NodeId, Attribute}; use syntax::feature_gate::{GateIssue, emit_feature_err, find_lang_feature_accepted_version}; diff --git a/src/librustc/middle/weak_lang_items.rs b/src/librustc/middle/weak_lang_items.rs index 325887684914b..20c5320fd6464 100644 --- a/src/librustc/middle/weak_lang_items.rs +++ b/src/librustc/middle/weak_lang_items.rs @@ -15,8 +15,8 @@ use session::Session; use middle::lang_items; use syntax::ast; -use syntax::codemap::Span; use syntax::parse::token::InternedString; +use syntax_pos::Span; use hir::intravisit::Visitor; use hir::intravisit; use hir; diff --git a/src/librustc/mir/repr.rs b/src/librustc/mir/repr.rs index d39ff28841851..62d3421770c2f 100644 --- a/src/librustc/mir/repr.rs +++ b/src/librustc/mir/repr.rs @@ -29,7 +29,7 @@ use std::{iter, u32}; use std::ops::{Index, IndexMut}; use std::vec::IntoIter; use syntax::ast::{self, Name}; -use syntax::codemap::Span; +use syntax_pos::Span; use super::cache::Cache; diff --git a/src/librustc/mir/visit.rs b/src/librustc/mir/visit.rs index bc45a730c2e21..3f714ff4d5152 100644 --- a/src/librustc/mir/visit.rs +++ b/src/librustc/mir/visit.rs @@ -16,7 +16,7 @@ use mir::repr::*; use rustc_const_math::ConstUsize; use rustc_data_structures::tuple_slice::TupleSlice; use rustc_data_structures::indexed_vec::Idx; -use syntax::codemap::Span; +use syntax_pos::Span; // # The MIR Visitor // diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs index 7a1ac7c218c8c..a37990061920b 100644 --- a/src/librustc/session/config.rs +++ b/src/librustc/session/config.rs @@ -26,11 +26,12 @@ use middle::cstore; use syntax::ast::{self, IntTy, UintTy}; use syntax::attr; use syntax::attr::AttrMetaMethods; -use syntax::errors::{ColorConfig, Handler}; use syntax::parse; use syntax::parse::token::InternedString; use syntax::feature_gate::UnstableFeatures; +use errors::{ColorConfig, Handler}; + use getopts; use std::collections::HashMap; use std::env; @@ -1420,12 +1421,11 @@ mod tests { use middle::cstore::DummyCrateStore; use session::config::{build_configuration, build_session_options}; use session::build_session; - + use errors; use std::rc::Rc; use getopts::{getopts, OptGroup}; use syntax::attr; use syntax::attr::AttrMetaMethods; - use syntax::diagnostics; fn optgroups() -> Vec { super::rustc_optgroups().into_iter() @@ -1442,7 +1442,7 @@ mod tests { Ok(m) => m, Err(f) => panic!("test_switch_implies_cfg_test: {}", f) }; - let registry = diagnostics::registry::Registry::new(&[]); + let registry = errors::registry::Registry::new(&[]); let sessopts = build_session_options(matches); let sess = build_session(sessopts, &dep_graph, None, registry, Rc::new(DummyCrateStore)); let cfg = build_configuration(&sess); @@ -1462,7 +1462,7 @@ mod tests { panic!("test_switch_implies_cfg_test_unless_cfg_test: {}", f) } }; - let registry = diagnostics::registry::Registry::new(&[]); + let registry = errors::registry::Registry::new(&[]); let sessopts = build_session_options(matches); let sess = build_session(sessopts, &dep_graph, None, registry, Rc::new(DummyCrateStore)); @@ -1479,7 +1479,7 @@ mod tests { let matches = getopts(&[ "-Awarnings".to_string() ], &optgroups()).unwrap(); - let registry = diagnostics::registry::Registry::new(&[]); + let registry = errors::registry::Registry::new(&[]); let sessopts = build_session_options(&matches); let sess = build_session(sessopts, &dep_graph, None, registry, Rc::new(DummyCrateStore)); @@ -1491,7 +1491,7 @@ mod tests { "-Awarnings".to_string(), "-Dwarnings".to_string() ], &optgroups()).unwrap(); - let registry = diagnostics::registry::Registry::new(&[]); + let registry = errors::registry::Registry::new(&[]); let sessopts = build_session_options(&matches); let sess = build_session(sessopts, &dep_graph, None, registry, Rc::new(DummyCrateStore)); @@ -1502,7 +1502,7 @@ mod tests { let matches = getopts(&[ "-Adead_code".to_string() ], &optgroups()).unwrap(); - let registry = diagnostics::registry::Registry::new(&[]); + let registry = errors::registry::Registry::new(&[]); let sessopts = build_session_options(&matches); let sess = build_session(sessopts, &dep_graph, None, registry, Rc::new(DummyCrateStore)); diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs index d60c31369d0a1..77259cea24d28 100644 --- a/src/librustc/session/mod.rs +++ b/src/librustc/session/mod.rs @@ -21,17 +21,16 @@ use util::nodemap::{NodeMap, FnvHashMap}; use mir::transform as mir_pass; use syntax::ast::{NodeId, NodeIdAssigner, Name}; -use syntax::codemap::{Span, MultiSpan}; -use syntax::errors::{self, DiagnosticBuilder}; -use syntax::errors::emitter::{Emitter, BasicEmitter, EmitterWriter}; -use syntax::errors::json::JsonEmitter; -use syntax::diagnostics; +use errors::{self, DiagnosticBuilder}; +use errors::emitter::{Emitter, BasicEmitter, EmitterWriter}; +use syntax::json::JsonEmitter; use syntax::feature_gate; use syntax::parse; use syntax::parse::ParseSess; use syntax::parse::token; use syntax::{ast, codemap}; use syntax::feature_gate::AttributeType; +use syntax_pos::{Span, MultiSpan}; use rustc_back::target::Target; use llvm; @@ -424,7 +423,7 @@ fn split_msg_into_multilines(msg: &str) -> Option { pub fn build_session(sopts: config::Options, dep_graph: &DepGraph, local_crate_source_file: Option, - registry: diagnostics::registry::Registry, + registry: errors::registry::Registry, cstore: Rc CrateStore<'a>>) -> Session { build_session_with_codemap(sopts, @@ -438,7 +437,7 @@ pub fn build_session(sopts: config::Options, pub fn build_session_with_codemap(sopts: config::Options, dep_graph: &DepGraph, local_crate_source_file: Option, - registry: diagnostics::registry::Registry, + registry: errors::registry::Registry, cstore: Rc CrateStore<'a>>, codemap: Rc) -> Session { @@ -455,7 +454,10 @@ pub fn build_session_with_codemap(sopts: config::Options, let emitter: Box = match sopts.error_format { config::ErrorOutputType::HumanReadable(color_config) => { - Box::new(EmitterWriter::stderr(color_config, Some(registry), codemap.clone())) + Box::new(EmitterWriter::stderr(color_config, + Some(registry), + codemap.clone(), + errors::snippet::FormatMode::EnvironmentSelected)) } config::ErrorOutputType::Json => { Box::new(JsonEmitter::stderr(Some(registry), codemap.clone())) diff --git a/src/librustc/traits/coherence.rs b/src/librustc/traits/coherence.rs index 414b9fa70c302..4344eb1ebf664 100644 --- a/src/librustc/traits/coherence.rs +++ b/src/librustc/traits/coherence.rs @@ -17,7 +17,7 @@ use hir::def_id::DefId; use ty::subst::TypeSpace; use ty::{self, Ty, TyCtxt}; use infer::{InferCtxt, TypeOrigin}; -use syntax::codemap::DUMMY_SP; +use syntax_pos::DUMMY_SP; #[derive(Copy, Clone)] struct InferIsLocal(bool); diff --git a/src/librustc/traits/error_reporting.rs b/src/librustc/traits/error_reporting.rs index 9a69958fea014..b6591471f0eef 100644 --- a/src/librustc/traits/error_reporting.rs +++ b/src/librustc/traits/error_reporting.rs @@ -37,8 +37,8 @@ use std::cmp; use std::fmt; use syntax::ast; use syntax::attr::{AttributeMethods, AttrMetaMethods}; -use syntax::codemap::Span; -use syntax::errors::DiagnosticBuilder; +use syntax_pos::Span; +use errors::DiagnosticBuilder; #[derive(Debug, PartialEq, Eq, Hash)] pub struct TraitErrorKey<'tcx> { diff --git a/src/librustc/traits/mod.rs b/src/librustc/traits/mod.rs index 5b363d90578b1..68db5f8647639 100644 --- a/src/librustc/traits/mod.rs +++ b/src/librustc/traits/mod.rs @@ -23,7 +23,7 @@ use infer::InferCtxt; use std::rc::Rc; use syntax::ast; -use syntax::codemap::{Span, DUMMY_SP}; +use syntax_pos::{Span, DUMMY_SP}; pub use self::error_reporting::TraitErrorKey; pub use self::coherence::orphan_check; diff --git a/src/librustc/traits/specialize/mod.rs b/src/librustc/traits/specialize/mod.rs index c7a3637557602..9348def1311eb 100644 --- a/src/librustc/traits/specialize/mod.rs +++ b/src/librustc/traits/specialize/mod.rs @@ -27,7 +27,7 @@ use middle::region; use ty::subst::{Subst, Substs}; use traits::{self, ProjectionMode, ObligationCause, Normalized}; use ty::{self, TyCtxt}; -use syntax::codemap::DUMMY_SP; +use syntax_pos::DUMMY_SP; pub mod specialization_graph; diff --git a/src/librustc/traits/util.rs b/src/librustc/traits/util.rs index f8149565aa66b..a097c0093b2e1 100644 --- a/src/librustc/traits/util.rs +++ b/src/librustc/traits/util.rs @@ -12,7 +12,7 @@ use hir::def_id::DefId; use infer::InferCtxt; use ty::subst::{Subst, Substs}; use ty::{self, Ty, TyCtxt, ToPredicate, ToPolyTraitRef}; -use syntax::codemap::Span; +use syntax_pos::Span; use util::common::ErrorReported; use util::nodemap::FnvHashSet; diff --git a/src/librustc/ty/adjustment.rs b/src/librustc/ty/adjustment.rs index 60f2ca6f4d9b6..47ca7d335ab82 100644 --- a/src/librustc/ty/adjustment.rs +++ b/src/librustc/ty/adjustment.rs @@ -15,7 +15,7 @@ use ty::{self, Ty, TyCtxt, TypeAndMut, TypeFoldable}; use ty::LvaluePreference::{NoPreference}; use syntax::ast; -use syntax::codemap::Span; +use syntax_pos::Span; use hir; diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index 3d187cfc08175..219cb5e383a8d 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -862,7 +862,7 @@ pub mod tls { use std::cell::Cell; use std::fmt; - use syntax::codemap; + use syntax_pos; /// Marker types used for the scoped TLS slot. /// The type context cannot be used directly because the scoped TLS @@ -875,7 +875,7 @@ pub mod tls { *const ThreadLocalInterners)>> = Cell::new(None) } - fn span_debug(span: codemap::Span, f: &mut fmt::Formatter) -> fmt::Result { + fn span_debug(span: syntax_pos::Span, f: &mut fmt::Formatter) -> fmt::Result { with(|tcx| { write!(f, "{}", tcx.sess.codemap().span_to_string(span)) }) @@ -884,7 +884,7 @@ pub mod tls { pub fn enter_global<'gcx, F, R>(gcx: GlobalCtxt<'gcx>, f: F) -> R where F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'gcx>) -> R { - codemap::SPAN_DEBUG.with(|span_dbg| { + syntax_pos::SPAN_DEBUG.with(|span_dbg| { let original_span_debug = span_dbg.get(); span_dbg.set(span_debug); let result = enter(&gcx, &gcx.global_interners, f); diff --git a/src/librustc/ty/error.rs b/src/librustc/ty/error.rs index b5190f313309f..bddc2dbdb7e77 100644 --- a/src/librustc/ty/error.rs +++ b/src/librustc/ty/error.rs @@ -16,8 +16,8 @@ use ty::{self, BoundRegion, Region, Ty, TyCtxt}; use std::fmt; use syntax::abi; use syntax::ast::{self, Name}; -use syntax::codemap::Span; -use syntax::errors::DiagnosticBuilder; +use errors::DiagnosticBuilder; +use syntax_pos::Span; use hir; diff --git a/src/librustc/ty/layout.rs b/src/librustc/ty/layout.rs index d305a772ae5db..61c8aa8fcebf3 100644 --- a/src/librustc/ty/layout.rs +++ b/src/librustc/ty/layout.rs @@ -21,7 +21,7 @@ use util::common::slice_pat; use syntax::ast::{FloatTy, IntTy, UintTy}; use syntax::attr; -use syntax::codemap::DUMMY_SP; +use syntax_pos::DUMMY_SP; use std::cmp; use std::fmt; diff --git a/src/librustc/ty/mod.rs b/src/librustc/ty/mod.rs index 2826680926637..14db922d29810 100644 --- a/src/librustc/ty/mod.rs +++ b/src/librustc/ty/mod.rs @@ -44,8 +44,8 @@ use std::slice; use std::vec::IntoIter; use syntax::ast::{self, CrateNum, Name, NodeId}; use syntax::attr::{self, AttrMetaMethods}; -use syntax::codemap::{DUMMY_SP, Span}; use syntax::parse::token::InternedString; +use syntax_pos::{DUMMY_SP, Span}; use rustc_const_math::ConstInt; diff --git a/src/librustc/ty/subst.rs b/src/librustc/ty/subst.rs index fbc565ca847b6..595d965ffce26 100644 --- a/src/librustc/ty/subst.rs +++ b/src/librustc/ty/subst.rs @@ -22,7 +22,7 @@ use std::fmt; use std::iter::IntoIterator; use std::slice::Iter; use std::vec::{Vec, IntoIter}; -use syntax::codemap::{Span, DUMMY_SP}; +use syntax_pos::{Span, DUMMY_SP}; /////////////////////////////////////////////////////////////////////////// diff --git a/src/librustc/ty/util.rs b/src/librustc/ty/util.rs index 7745f00c2ebd9..ee7fb5fc94b77 100644 --- a/src/librustc/ty/util.rs +++ b/src/librustc/ty/util.rs @@ -27,7 +27,7 @@ use std::cmp; use std::hash::{Hash, SipHasher, Hasher}; use syntax::ast::{self, Name}; use syntax::attr::{self, SignedInt, UnsignedInt}; -use syntax::codemap::Span; +use syntax_pos::Span; use hir; diff --git a/src/librustc/ty/wf.rs b/src/librustc/ty/wf.rs index a25994ea69981..37ba936d2f42a 100644 --- a/src/librustc/ty/wf.rs +++ b/src/librustc/ty/wf.rs @@ -16,7 +16,7 @@ use traits; use ty::{self, ToPredicate, Ty, TyCtxt, TypeFoldable}; use std::iter::once; use syntax::ast; -use syntax::codemap::Span; +use syntax_pos::Span; use util::common::ErrorReported; /// Returns the set of obligations needed to make `ty` well-formed. diff --git a/src/librustc_borrowck/Cargo.toml b/src/librustc_borrowck/Cargo.toml index c8a71ea350555..d53318f176848 100644 --- a/src/librustc_borrowck/Cargo.toml +++ b/src/librustc_borrowck/Cargo.toml @@ -12,7 +12,9 @@ test = false [dependencies] log = { path = "../liblog" } syntax = { path = "../libsyntax" } +syntax_pos = { path = "../libsyntax_pos" } graphviz = { path = "../libgraphviz" } rustc = { path = "../librustc" } rustc_data_structures = { path = "../librustc_data_structures" } rustc_mir = { path = "../librustc_mir" } +rustc_errors = { path = "../librustc_errors" } diff --git a/src/librustc_borrowck/borrowck/check_loans.rs b/src/librustc_borrowck/borrowck/check_loans.rs index 36f95f62d0606..9cae270984f00 100644 --- a/src/librustc_borrowck/borrowck/check_loans.rs +++ b/src/librustc_borrowck/borrowck/check_loans.rs @@ -27,7 +27,7 @@ use rustc::middle::mem_categorization::Categorization; use rustc::middle::region; use rustc::ty::{self, TyCtxt}; use syntax::ast; -use syntax::codemap::Span; +use syntax_pos::Span; use rustc::hir; use std::rc::Rc; diff --git a/src/librustc_borrowck/borrowck/fragments.rs b/src/librustc_borrowck/borrowck/fragments.rs index 6ab85d7d449dc..d3d6fa9eb52b5 100644 --- a/src/librustc_borrowck/borrowck/fragments.rs +++ b/src/librustc_borrowck/borrowck/fragments.rs @@ -27,8 +27,8 @@ use rustc::middle::mem_categorization as mc; use std::mem; use std::rc::Rc; use syntax::ast; -use syntax::codemap::{Span, DUMMY_SP}; use syntax::attr::AttrMetaMethods; +use syntax_pos::{Span, DUMMY_SP}; #[derive(PartialEq, Eq, PartialOrd, Ord)] enum Fragment { diff --git a/src/librustc_borrowck/borrowck/gather_loans/gather_moves.rs b/src/librustc_borrowck/borrowck/gather_loans/gather_moves.rs index 64f35aed23f5c..9431dcdbcac8e 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/gather_moves.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/gather_moves.rs @@ -22,7 +22,7 @@ use rustc::ty; use std::rc::Rc; use syntax::ast; -use syntax::codemap::Span; +use syntax_pos::Span; use rustc::hir::{self, PatKind}; struct GatherMoveInfo<'tcx> { diff --git a/src/librustc_borrowck/borrowck/gather_loans/lifetime.rs b/src/librustc_borrowck/borrowck/gather_loans/lifetime.rs index c2492bba62473..e34c6e567bd8e 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/lifetime.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/lifetime.rs @@ -19,7 +19,7 @@ use rustc::middle::region; use rustc::ty; use syntax::ast; -use syntax::codemap::Span; +use syntax_pos::Span; type R = Result<(),()>; diff --git a/src/librustc_borrowck/borrowck/gather_loans/mod.rs b/src/librustc_borrowck/borrowck/gather_loans/mod.rs index 7f814f5dfaa91..c982fc091d24c 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/mod.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/mod.rs @@ -25,8 +25,8 @@ use rustc::middle::region; use rustc::ty::{self, TyCtxt}; use syntax::ast; -use syntax::codemap::Span; use syntax::ast::NodeId; +use syntax_pos::Span; use rustc::hir; use rustc::hir::Expr; use rustc::hir::intravisit; diff --git a/src/librustc_borrowck/borrowck/gather_loans/move_error.rs b/src/librustc_borrowck/borrowck/gather_loans/move_error.rs index c1e83588570e7..fc17633d63b93 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/move_error.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/move_error.rs @@ -14,8 +14,8 @@ use rustc::middle::mem_categorization::Categorization; use rustc::middle::mem_categorization::InteriorOffsetKind as Kind; use rustc::ty; use syntax::ast; -use syntax::codemap; -use syntax::errors::DiagnosticBuilder; +use syntax_pos; +use errors::DiagnosticBuilder; use rustc::hir; pub struct MoveErrorCollector<'tcx> { @@ -56,7 +56,7 @@ impl<'tcx> MoveError<'tcx> { #[derive(Clone)] pub struct MoveSpanAndPath { - pub span: codemap::Span, + pub span: syntax_pos::Span, pub name: ast::Name, } @@ -169,7 +169,7 @@ fn report_cannot_move_out_of<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, } fn note_move_destination(mut err: DiagnosticBuilder, - move_to_span: codemap::Span, + move_to_span: syntax_pos::Span, pat_name: ast::Name, is_first_note: bool) -> DiagnosticBuilder { if is_first_note { diff --git a/src/librustc_borrowck/borrowck/gather_loans/restrictions.rs b/src/librustc_borrowck/borrowck/gather_loans/restrictions.rs index ee8c3aff2ec80..3d9df4c8bd008 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/restrictions.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/restrictions.rs @@ -15,7 +15,7 @@ use rustc::middle::expr_use_visitor as euv; use rustc::middle::mem_categorization as mc; use rustc::middle::mem_categorization::Categorization; use rustc::ty; -use syntax::codemap::Span; +use syntax_pos::Span; use borrowck::ToInteriorKind; diff --git a/src/librustc_borrowck/borrowck/mir/dataflow/sanity_check.rs b/src/librustc_borrowck/borrowck/mir/dataflow/sanity_check.rs index c8d3ff01b6c1a..d59bdf93f3225 100644 --- a/src/librustc_borrowck/borrowck/mir/dataflow/sanity_check.rs +++ b/src/librustc_borrowck/borrowck/mir/dataflow/sanity_check.rs @@ -10,7 +10,7 @@ use syntax::abi::{Abi}; use syntax::ast; -use syntax::codemap::Span; +use syntax_pos::Span; use rustc::ty::{self, TyCtxt}; use rustc::mir::repr::{self, Mir}; diff --git a/src/librustc_borrowck/borrowck/mir/elaborate_drops.rs b/src/librustc_borrowck/borrowck/mir/elaborate_drops.rs index 065cbbf76b384..623ea60c5a6d8 100644 --- a/src/librustc_borrowck/borrowck/mir/elaborate_drops.rs +++ b/src/librustc_borrowck/borrowck/mir/elaborate_drops.rs @@ -23,7 +23,7 @@ use rustc::middle::const_val::ConstVal; use rustc::middle::lang_items; use rustc::util::nodemap::FnvHashMap; use rustc_data_structures::indexed_vec::Idx; -use syntax::codemap::Span; +use syntax_pos::Span; use std::fmt; use std::u32; diff --git a/src/librustc_borrowck/borrowck/mir/mod.rs b/src/librustc_borrowck/borrowck/mir/mod.rs index 36f7c11c8cb81..7481b15685e69 100644 --- a/src/librustc_borrowck/borrowck/mir/mod.rs +++ b/src/librustc_borrowck/borrowck/mir/mod.rs @@ -12,8 +12,8 @@ use borrowck::BorrowckCtxt; use syntax::ast::{self, MetaItem}; use syntax::attr::AttrMetaMethods; -use syntax::codemap::{Span, DUMMY_SP}; use syntax::ptr::P; +use syntax_pos::{Span, DUMMY_SP}; use rustc::hir; use rustc::hir::intravisit::{FnKind}; diff --git a/src/librustc_borrowck/borrowck/mod.rs b/src/librustc_borrowck/borrowck/mod.rs index 5acbb18a2ffee..e86120b73bf97 100644 --- a/src/librustc_borrowck/borrowck/mod.rs +++ b/src/librustc_borrowck/borrowck/mod.rs @@ -43,8 +43,8 @@ use std::mem; use std::rc::Rc; use syntax::ast; use syntax::attr::AttrMetaMethods; -use syntax::codemap::{MultiSpan, Span}; -use syntax::errors::DiagnosticBuilder; +use syntax_pos::{MultiSpan, Span}; +use errors::DiagnosticBuilder; use rustc::hir; use rustc::hir::{FnDecl, Block}; diff --git a/src/librustc_borrowck/borrowck/move_data.rs b/src/librustc_borrowck/borrowck/move_data.rs index a742260018676..4b8cbbffaa5ef 100644 --- a/src/librustc_borrowck/borrowck/move_data.rs +++ b/src/librustc_borrowck/borrowck/move_data.rs @@ -28,7 +28,7 @@ use std::cell::RefCell; use std::rc::Rc; use std::usize; use syntax::ast; -use syntax::codemap::Span; +use syntax_pos::Span; use rustc::hir; use rustc::hir::intravisit::IdRange; diff --git a/src/librustc_borrowck/lib.rs b/src/librustc_borrowck/lib.rs index f3c39bd5fd165..16fefee347269 100644 --- a/src/librustc_borrowck/lib.rs +++ b/src/librustc_borrowck/lib.rs @@ -28,6 +28,8 @@ #![feature(question_mark)] #[macro_use] extern crate log; #[macro_use] extern crate syntax; +extern crate syntax_pos; +extern crate rustc_errors as errors; // for "clarity", rename the graphviz crate to dot; graphviz within `borrowck` // refers to the borrowck-specific graphviz adapter traits. diff --git a/src/librustc_const_eval/Cargo.toml b/src/librustc_const_eval/Cargo.toml index c572284a6bb37..01872bbe3c049 100644 --- a/src/librustc_const_eval/Cargo.toml +++ b/src/librustc_const_eval/Cargo.toml @@ -16,3 +16,4 @@ rustc_back = { path = "../librustc_back" } rustc_const_math = { path = "../librustc_const_math" } syntax = { path = "../libsyntax" } graphviz = { path = "../libgraphviz" } +syntax_pos = { path = "../libsyntax_pos" } \ No newline at end of file diff --git a/src/librustc_const_eval/check_match.rs b/src/librustc_const_eval/check_match.rs index dbca15ffd34b9..c878edcd4b2a8 100644 --- a/src/librustc_const_eval/check_match.rs +++ b/src/librustc_const_eval/check_match.rs @@ -37,7 +37,8 @@ use rustc::hir::intravisit::{self, IdVisitor, IdVisitingOperation, Visitor, FnKi use rustc_back::slice; use syntax::ast::{self, DUMMY_NODE_ID, NodeId}; -use syntax::codemap::{Span, Spanned, DUMMY_SP}; +use syntax::codemap::Spanned; +use syntax_pos::{Span, DUMMY_SP}; use rustc::hir::fold::{Folder, noop_fold_pat}; use rustc::hir::print::pat_to_string; use syntax::ptr::P; @@ -451,7 +452,7 @@ fn const_val_to_expr(value: &ConstVal) -> P { id: 0, node: hir::ExprLit(P(Spanned { node: node, span: DUMMY_SP })), span: DUMMY_SP, - attrs: None, + attrs: ast::ThinVec::new(), }) } diff --git a/src/librustc_const_eval/eval.rs b/src/librustc_const_eval/eval.rs index 34dce44004823..c03903515abe6 100644 --- a/src/librustc_const_eval/eval.rs +++ b/src/librustc_const_eval/eval.rs @@ -33,10 +33,10 @@ use syntax::ast; use rustc::hir::{Expr, PatKind}; use rustc::hir; use rustc::hir::intravisit::FnKind; -use syntax::codemap::Span; use syntax::ptr::P; use syntax::codemap; use syntax::attr::IntType; +use syntax_pos::{self, Span}; use std::borrow::Cow; use std::cmp::Ordering; @@ -301,7 +301,7 @@ pub fn const_expr_to_pat<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, let field_pats = try!(fields.iter() .map(|field| Ok(codemap::Spanned { - span: codemap::DUMMY_SP, + span: syntax_pos::DUMMY_SP, node: hir::FieldPat { name: field.name.node, pat: try!(const_expr_to_pat(tcx, &field.expr, diff --git a/src/librustc_const_eval/lib.rs b/src/librustc_const_eval/lib.rs index 9ab6a437a5ab2..2da9a55f1fd44 100644 --- a/src/librustc_const_eval/lib.rs +++ b/src/librustc_const_eval/lib.rs @@ -38,7 +38,7 @@ extern crate rustc_back; extern crate rustc_const_math; extern crate graphviz; - +extern crate syntax_pos; extern crate serialize as rustc_serialize; // used by deriving // NB: This module needs to be declared first so diagnostics are diff --git a/src/librustc_driver/Cargo.toml b/src/librustc_driver/Cargo.toml index 4533946d26ea4..54c62d3665994 100644 --- a/src/librustc_driver/Cargo.toml +++ b/src/librustc_driver/Cargo.toml @@ -17,6 +17,7 @@ rustc = { path = "../librustc" } rustc_back = { path = "../librustc_back" } rustc_borrowck = { path = "../librustc_borrowck" } rustc_const_eval = { path = "../librustc_const_eval" } +rustc_errors = { path = "../librustc_errors" } rustc_lint = { path = "../librustc_lint" } rustc_llvm = { path = "../librustc_llvm" } rustc_mir = { path = "../librustc_mir" } @@ -32,3 +33,4 @@ rustc_metadata = { path = "../librustc_metadata" } serialize = { path = "../libserialize" } syntax = { path = "../libsyntax" } syntax_ext = { path = "../libsyntax_ext" } +syntax_pos = { path = "../libsyntax_pos" } \ No newline at end of file diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index 927953b034ba2..eb442c0a34e74 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -763,6 +763,9 @@ pub fn phase_2_configure_and_expand<'a>(sess: &Session, } pub fn assign_node_ids(sess: &Session, krate: ast::Crate) -> ast::Crate { + use syntax::ptr::P; + use syntax::util::move_map::MoveMap; + struct NodeIdAssigner<'a> { sess: &'a Session, } @@ -772,6 +775,27 @@ pub fn assign_node_ids(sess: &Session, krate: ast::Crate) -> ast::Crate { assert_eq!(old_id, ast::DUMMY_NODE_ID); self.sess.next_node_id() } + + fn fold_block(&mut self, block: P) -> P { + block.map(|mut block| { + block.id = self.new_id(block.id); + + let stmt = block.stmts.pop(); + block.stmts = block.stmts.move_flat_map(|s| self.fold_stmt(s).into_iter()); + if let Some(ast::Stmt { node: ast::StmtKind::Expr(expr), span, .. }) = stmt { + let expr = self.fold_expr(expr); + block.stmts.push(ast::Stmt { + id: expr.id, + node: ast::StmtKind::Expr(expr), + span: span, + }); + } else if let Some(stmt) = stmt { + block.stmts.extend(self.fold_stmt(stmt)); + } + + block + }) + } } let krate = time(sess.time_passes(), diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index bd6b0599e7832..32eb3fdd71092 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -42,6 +42,7 @@ extern crate rustc; extern crate rustc_back; extern crate rustc_borrowck; extern crate rustc_const_eval; +extern crate rustc_errors as errors; extern crate rustc_passes; extern crate rustc_lint; extern crate rustc_plugin; @@ -60,6 +61,7 @@ extern crate log; #[macro_use] extern crate syntax; extern crate syntax_ext; +extern crate syntax_pos; use driver::CompileController; use pretty::{PpMode, UserIdentifiedItem}; @@ -92,11 +94,12 @@ use std::thread; use rustc::session::early_error; -use syntax::{ast, errors, diagnostics}; -use syntax::codemap::{CodeMap, FileLoader, RealFileLoader, MultiSpan}; -use syntax::errors::emitter::Emitter; +use syntax::{ast, json}; +use syntax::codemap::{CodeMap, FileLoader, RealFileLoader}; use syntax::feature_gate::{GatedCfg, UnstableFeatures}; use syntax::parse::{self, PResult, token}; +use syntax_pos::MultiSpan; +use errors::emitter::Emitter; #[cfg(test)] pub mod test; @@ -290,7 +293,7 @@ pub trait CompilerCalls<'a> { fn early_callback(&mut self, _: &getopts::Matches, _: &config::Options, - _: &diagnostics::registry::Registry, + _: &errors::registry::Registry, _: ErrorOutputType) -> Compilation { Compilation::Continue @@ -329,7 +332,7 @@ pub trait CompilerCalls<'a> { _: &config::Options, _: &Option, _: &Option, - _: &diagnostics::registry::Registry) + _: &errors::registry::Registry) -> Option<(Input, Option)> { None } @@ -344,7 +347,7 @@ pub trait CompilerCalls<'a> { pub struct RustcDefaultCalls; fn handle_explain(code: &str, - descriptions: &diagnostics::registry::Registry, + descriptions: &errors::registry::Registry, output: ErrorOutputType) { let normalised = if code.starts_with("E") { code.to_string() @@ -374,7 +377,7 @@ fn check_cfg(sopts: &config::Options, config::ErrorOutputType::HumanReadable(color_config) => { Box::new(errors::emitter::BasicEmitter::stderr(color_config)) } - config::ErrorOutputType::Json => Box::new(errors::json::JsonEmitter::basic()), + config::ErrorOutputType::Json => Box::new(json::JsonEmitter::basic()), }; let mut saw_invalid_predicate = false; @@ -401,7 +404,7 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls { fn early_callback(&mut self, matches: &getopts::Matches, sopts: &config::Options, - descriptions: &diagnostics::registry::Registry, + descriptions: &errors::registry::Registry, output: ErrorOutputType) -> Compilation { if let Some(ref code) = matches.opt_str("explain") { @@ -418,7 +421,7 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls { sopts: &config::Options, odir: &Option, ofile: &Option, - descriptions: &diagnostics::registry::Registry) + descriptions: &errors::registry::Registry) -> Option<(Input, Option)> { match matches.free.len() { 0 => { @@ -1081,8 +1084,8 @@ fn exit_on_err() -> ! { panic!(); } -pub fn diagnostics_registry() -> diagnostics::registry::Registry { - use syntax::diagnostics::registry::Registry; +pub fn diagnostics_registry() -> errors::registry::Registry { + use errors::registry::Registry; let mut all_errors = Vec::new(); all_errors.extend_from_slice(&rustc::DIAGNOSTICS); diff --git a/src/librustc_driver/pretty.rs b/src/librustc_driver/pretty.rs index 0a093887c5094..baac455a25f33 100644 --- a/src/librustc_driver/pretty.rs +++ b/src/librustc_driver/pretty.rs @@ -31,12 +31,12 @@ use rustc_mir::pretty::write_mir_pretty; use rustc_mir::graphviz::write_mir_graphviz; use syntax::ast::{self, BlockCheckMode}; -use syntax::codemap; use syntax::fold::{self, Folder}; use syntax::print::{pp, pprust}; use syntax::print::pprust::PrintState; use syntax::ptr::P; use syntax::util::small_vector::SmallVector; +use syntax_pos; use graphviz as dot; @@ -657,11 +657,14 @@ impl fold::Folder for ReplaceBodyWithLoop { fn fold_block(&mut self, b: P) -> P { fn expr_to_block(rules: ast::BlockCheckMode, e: Option>) -> P { P(ast::Block { - expr: e, - stmts: vec![], + stmts: e.map(|e| ast::Stmt { + id: ast::DUMMY_NODE_ID, + span: e.span, + node: ast::StmtKind::Expr(e), + }).into_iter().collect(), rules: rules, id: ast::DUMMY_NODE_ID, - span: codemap::DUMMY_SP, + span: syntax_pos::DUMMY_SP, }) } @@ -671,8 +674,8 @@ impl fold::Folder for ReplaceBodyWithLoop { let loop_expr = P(ast::Expr { node: ast::ExprKind::Loop(empty_block, None), id: ast::DUMMY_NODE_ID, - span: codemap::DUMMY_SP, - attrs: None, + span: syntax_pos::DUMMY_SP, + attrs: ast::ThinVec::new(), }); expr_to_block(b.rules, Some(loop_expr)) diff --git a/src/librustc_driver/test.rs b/src/librustc_driver/test.rs index c147a534e5ea7..0f5977cf06618 100644 --- a/src/librustc_driver/test.rs +++ b/src/librustc_driver/test.rs @@ -31,12 +31,13 @@ use rustc::session::{self, config}; use std::rc::Rc; use syntax::ast; use syntax::abi::Abi; -use syntax::codemap::{CodeMap, DUMMY_SP}; -use syntax::errors; -use syntax::errors::emitter::{CoreEmitter, Emitter}; -use syntax::errors::{Level, RenderSpan}; +use syntax::codemap::CodeMap; +use errors; +use errors::emitter::{CoreEmitter, Emitter}; +use errors::{Level, RenderSpan}; use syntax::parse::token; use syntax::feature_gate::UnstableFeatures; +use syntax_pos::DUMMY_SP; use rustc::hir; diff --git a/src/librustc_errors/Cargo.toml b/src/librustc_errors/Cargo.toml new file mode 100644 index 0000000000000..128c270eb359c --- /dev/null +++ b/src/librustc_errors/Cargo.toml @@ -0,0 +1,14 @@ +[package] +authors = ["The Rust Project Developers"] +name = "rustc_errors" +version = "0.0.0" + +[lib] +name = "rustc_errors" +path = "lib.rs" +crate-type = ["dylib"] + +[dependencies] +log = { path = "../liblog" } +serialize = { path = "../libserialize" } +syntax_pos = { path = "../libsyntax_pos" } \ No newline at end of file diff --git a/src/libsyntax/errors/emitter.rs b/src/librustc_errors/emitter.rs similarity index 67% rename from src/libsyntax/errors/emitter.rs rename to src/librustc_errors/emitter.rs index 71a03e846a2b5..a7c68e3a87b31 100644 --- a/src/libsyntax/errors/emitter.rs +++ b/src/librustc_errors/emitter.rs @@ -10,14 +10,14 @@ use self::Destination::*; -use codemap::{self, COMMAND_LINE_SP, DUMMY_SP, Span, MultiSpan}; -use diagnostics; +use syntax_pos::{COMMAND_LINE_SP, DUMMY_SP, Span, MultiSpan, LineInfo}; +use registry; -use errors::check_old_skool; -use errors::{Level, RenderSpan, CodeSuggestion, DiagnosticBuilder}; -use errors::RenderSpan::*; -use errors::Level::*; -use errors::snippet::{RenderedLineKind, SnippetData, Style}; +use check_old_skool; +use {Level, RenderSpan, CodeSuggestion, DiagnosticBuilder, CodeMapper}; +use RenderSpan::*; +use Level::*; +use snippet::{RenderedLineKind, SnippetData, Style, FormatMode}; use std::{cmp, fmt}; use std::io::prelude::*; @@ -151,15 +151,15 @@ impl BasicEmitter { pub struct EmitterWriter { dst: Destination, - registry: Option, - cm: Rc, + registry: Option, + cm: Rc, /// Is this the first error emitted thus far? If not, we emit a /// `\n` before the top-level errors. first: bool, // For now, allow an old-school mode while we transition - old_school: bool, + format_mode: FormatMode } impl CoreEmitter for EmitterWriter { @@ -193,36 +193,36 @@ macro_rules! println_maybe_styled { impl EmitterWriter { pub fn stderr(color_config: ColorConfig, - registry: Option, - code_map: Rc) + registry: Option, + code_map: Rc, + format_mode: FormatMode) -> EmitterWriter { - let old_school = check_old_skool(); if color_config.use_color() { let dst = Destination::from_stderr(); EmitterWriter { dst: dst, registry: registry, cm: code_map, first: true, - old_school: old_school } + format_mode: format_mode.clone() } } else { EmitterWriter { dst: Raw(Box::new(io::stderr())), registry: registry, cm: code_map, first: true, - old_school: old_school } + format_mode: format_mode.clone() } } } pub fn new(dst: Box, - registry: Option, - code_map: Rc) + registry: Option, + code_map: Rc, + format_mode: FormatMode) -> EmitterWriter { - let old_school = check_old_skool(); EmitterWriter { dst: Raw(dst), registry: registry, cm: code_map, first: true, - old_school: old_school } + format_mode: format_mode.clone() } } fn emit_message_(&mut self, @@ -233,11 +233,17 @@ impl EmitterWriter { is_header: bool, show_snippet: bool) -> io::Result<()> { + let old_school = match self.format_mode { + FormatMode::NewErrorFormat => false, + FormatMode::OriginalErrorFormat => true, + FormatMode::EnvironmentSelected => check_old_skool() + }; + if is_header { if self.first { self.first = false; } else { - if !self.old_school { + if !old_school { write!(self.dst, "\n")?; } } @@ -248,7 +254,7 @@ impl EmitterWriter { .and_then(|registry| registry.find_description(code)) .is_some() => { let code_with_explain = String::from("--explain ") + code; - if self.old_school { + if old_school { let loc = match rsp.span().primary_span() { Some(COMMAND_LINE_SP) | Some(DUMMY_SP) => "".to_string(), Some(ps) => self.cm.span_to_string(ps), @@ -261,7 +267,7 @@ impl EmitterWriter { } } _ => { - if self.old_school { + if old_school { let loc = match rsp.span().primary_span() { Some(COMMAND_LINE_SP) | Some(DUMMY_SP) => "".to_string(), Some(ps) => self.cm.span_to_string(ps), @@ -303,7 +309,7 @@ impl EmitterWriter { } } } - if self.old_school { + if old_school { match code { Some(code) if self.registry.as_ref() .and_then(|registry| registry.find_description(code)) @@ -326,11 +332,13 @@ impl EmitterWriter { fn highlight_suggestion(&mut self, suggestion: &CodeSuggestion) -> io::Result<()> { + use std::borrow::Borrow; + let primary_span = suggestion.msp.primary_span().unwrap(); let lines = self.cm.span_to_lines(primary_span).unwrap(); assert!(!lines.lines.is_empty()); - let complete = suggestion.splice_lines(&self.cm); + let complete = suggestion.splice_lines(self.cm.borrow()); let line_count = cmp::min(lines.lines.len(), MAX_HIGHLIGHT_LINES); let display_lines = &lines.lines[..line_count]; @@ -356,19 +364,27 @@ impl EmitterWriter { Ok(()) } - fn highlight_lines(&mut self, + pub fn highlight_lines(&mut self, msp: &MultiSpan, lvl: Level) -> io::Result<()> { + let old_school = match self.format_mode { + FormatMode::NewErrorFormat => false, + FormatMode::OriginalErrorFormat => true, + FormatMode::EnvironmentSelected => check_old_skool() + }; + let mut snippet_data = SnippetData::new(self.cm.clone(), - msp.primary_span()); - if self.old_school { + msp.primary_span(), + self.format_mode.clone()); + if old_school { let mut output_vec = vec![]; for span_label in msp.span_labels() { let mut snippet_data = SnippetData::new(self.cm.clone(), - Some(span_label.span)); + Some(span_label.span), + self.format_mode.clone()); snippet_data.push(span_label.span, span_label.is_primary, @@ -430,7 +446,7 @@ impl EmitterWriter { } } -fn line_num_max_digits(line: &codemap::LineInfo) -> usize { +fn line_num_max_digits(line: &LineInfo) -> usize { let mut max_line_num = line.line_index + 1; let mut digits = 0; while max_line_num > 0 { @@ -617,255 +633,3 @@ impl Write for Destination { } } } - - -#[cfg(test)] -mod test { - use errors::{Level, CodeSuggestion}; - use super::EmitterWriter; - use codemap::{mk_sp, CodeMap, Span, MultiSpan, BytePos, NO_EXPANSION}; - use std::sync::{Arc, Mutex}; - use std::io::{self, Write}; - use std::str::from_utf8; - use std::rc::Rc; - - struct Sink(Arc>>); - impl Write for Sink { - fn write(&mut self, data: &[u8]) -> io::Result { - Write::write(&mut *self.0.lock().unwrap(), data) - } - fn flush(&mut self) -> io::Result<()> { Ok(()) } - } - - /// Given a string like " ^~~~~~~~~~~~ ", produces a span - /// coverting that range. The idea is that the string has the same - /// length as the input, and we uncover the byte positions. Note - /// that this can span lines and so on. - fn span_from_selection(input: &str, selection: &str) -> Span { - assert_eq!(input.len(), selection.len()); - let left_index = selection.find('~').unwrap() as u32; - let right_index = selection.rfind('~').map(|x|x as u32).unwrap_or(left_index); - Span { lo: BytePos(left_index), hi: BytePos(right_index + 1), expn_id: NO_EXPANSION } - } - - // Diagnostic doesn't align properly in span where line number increases by one digit - #[test] - fn test_hilight_suggestion_issue_11715() { - let data = Arc::new(Mutex::new(Vec::new())); - let cm = Rc::new(CodeMap::new()); - let mut ew = EmitterWriter::new(Box::new(Sink(data.clone())), None, cm.clone()); - let content = "abcdefg - koksi - line3 - line4 - cinq - line6 - line7 - line8 - line9 - line10 - e-lä-vän - tolv - dreizehn - "; - let file = cm.new_filemap_and_lines("dummy.txt", None, content); - let start = file.lines.borrow()[10]; - let end = file.lines.borrow()[11]; - let sp = mk_sp(start, end); - let lvl = Level::Error; - println!("highlight_lines"); - ew.highlight_lines(&sp.into(), lvl).unwrap(); - println!("done"); - let vec = data.lock().unwrap().clone(); - let vec: &[u8] = &vec; - let str = from_utf8(vec).unwrap(); - println!("r#\"\n{}\"#", str); - assert_eq!(str, &r#" - --> dummy.txt:11:1 - |> -11 |> e-lä-vän - |> ^ -"#[1..]); - } - - #[test] - fn test_single_span_splice() { - // Test that a `MultiSpan` containing a single span splices a substition correctly - let cm = CodeMap::new(); - let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n"; - let selection = " \n ~~\n~~~\n~~~~~ \n \n"; - cm.new_filemap_and_lines("blork.rs", None, inputtext); - let sp = span_from_selection(inputtext, selection); - let msp: MultiSpan = sp.into(); - - // check that we are extracting the text we thought we were extracting - assert_eq!(&cm.span_to_snippet(sp).unwrap(), "BB\nCCC\nDDDDD"); - - let substitute = "ZZZZZZ".to_owned(); - let expected = "bbbbZZZZZZddddd"; - let suggest = CodeSuggestion { - msp: msp, - substitutes: vec![substitute], - }; - assert_eq!(suggest.splice_lines(&cm), expected); - } - - #[test] - fn test_multi_span_splice() { - // Test that a `MultiSpan` containing multiple spans splices a substition correctly - let cm = CodeMap::new(); - let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n"; - let selection1 = " \n \n \n \n ~ \n"; // intentionally out of order - let selection2 = " \n ~~\n~~~\n~~~~~ \n \n"; - cm.new_filemap_and_lines("blork.rs", None, inputtext); - let sp1 = span_from_selection(inputtext, selection1); - let sp2 = span_from_selection(inputtext, selection2); - let msp: MultiSpan = MultiSpan::from_spans(vec![sp1, sp2]); - - let expected = "bbbbZZZZZZddddd\neXYZe"; - let suggest = CodeSuggestion { - msp: msp, - substitutes: vec!["ZZZZZZ".to_owned(), - "XYZ".to_owned()] - }; - - assert_eq!(suggest.splice_lines(&cm), expected); - } - - #[test] - fn test_multispan_highlight() { - let data = Arc::new(Mutex::new(Vec::new())); - let cm = Rc::new(CodeMap::new()); - let mut diag = EmitterWriter::new(Box::new(Sink(data.clone())), None, cm.clone()); - - let inp = "_____aaaaaa____bbbbbb__cccccdd_"; - let sp1 = " ~~~~~~ "; - let sp2 = " ~~~~~~ "; - let sp3 = " ~~~~~ "; - let sp4 = " ~~~~ "; - let sp34 = " ~~~~~~~ "; - - let expect_start = &r#" - --> dummy.txt:1:6 - |> -1 |> _____aaaaaa____bbbbbb__cccccdd_ - |> ^^^^^^ ^^^^^^ ^^^^^^^ -"#[1..]; - - let span = |sp, expected| { - let sp = span_from_selection(inp, sp); - assert_eq!(&cm.span_to_snippet(sp).unwrap(), expected); - sp - }; - cm.new_filemap_and_lines("dummy.txt", None, inp); - let sp1 = span(sp1, "aaaaaa"); - let sp2 = span(sp2, "bbbbbb"); - let sp3 = span(sp3, "ccccc"); - let sp4 = span(sp4, "ccdd"); - let sp34 = span(sp34, "cccccdd"); - - let spans = vec![sp1, sp2, sp3, sp4]; - - let test = |expected, highlight: &mut FnMut()| { - data.lock().unwrap().clear(); - highlight(); - let vec = data.lock().unwrap().clone(); - let actual = from_utf8(&vec[..]).unwrap(); - println!("actual=\n{}", actual); - assert_eq!(actual, expected); - }; - - let msp = MultiSpan::from_spans(vec![sp1, sp2, sp34]); - test(expect_start, &mut || { - diag.highlight_lines(&msp, Level::Error).unwrap(); - }); - test(expect_start, &mut || { - let msp = MultiSpan::from_spans(spans.clone()); - diag.highlight_lines(&msp, Level::Error).unwrap(); - }); - } - - #[test] - fn test_huge_multispan_highlight() { - let data = Arc::new(Mutex::new(Vec::new())); - let cm = Rc::new(CodeMap::new()); - let mut diag = EmitterWriter::new(Box::new(Sink(data.clone())), None, cm.clone()); - - let inp = "aaaaa\n\ - aaaaa\n\ - aaaaa\n\ - bbbbb\n\ - ccccc\n\ - xxxxx\n\ - yyyyy\n\ - _____\n\ - ddd__eee_\n\ - elided\n\ - __f_gg"; - let file = cm.new_filemap_and_lines("dummy.txt", None, inp); - - let span = |lo, hi, (off_lo, off_hi)| { - let lines = file.lines.borrow(); - let (mut lo, mut hi): (BytePos, BytePos) = (lines[lo], lines[hi]); - lo.0 += off_lo; - hi.0 += off_hi; - mk_sp(lo, hi) - }; - let sp0 = span(4, 6, (0, 5)); - let sp1 = span(0, 6, (0, 5)); - let sp2 = span(8, 8, (0, 3)); - let sp3 = span(8, 8, (5, 8)); - let sp4 = span(10, 10, (2, 3)); - let sp5 = span(10, 10, (4, 6)); - - let expect0 = &r#" - --> dummy.txt:5:1 - |> -5 |> ccccc - |> ^ -... -9 |> ddd__eee_ - |> ^^^ ^^^ -10 |> elided -11 |> __f_gg - |> ^ ^^ -"#[1..]; - - let expect = &r#" - --> dummy.txt:1:1 - |> -1 |> aaaaa - |> ^ -... -9 |> ddd__eee_ - |> ^^^ ^^^ -10 |> elided -11 |> __f_gg - |> ^ ^^ -"#[1..]; - - macro_rules! test { - ($expected: expr, $highlight: expr) => ({ - data.lock().unwrap().clear(); - $highlight(); - let vec = data.lock().unwrap().clone(); - let actual = from_utf8(&vec[..]).unwrap(); - println!("actual:"); - println!("{}", actual); - println!("expected:"); - println!("{}", $expected); - assert_eq!(&actual[..], &$expected[..]); - }); - } - - let msp0 = MultiSpan::from_spans(vec![sp0, sp2, sp3, sp4, sp5]); - let msp = MultiSpan::from_spans(vec![sp1, sp2, sp3, sp4, sp5]); - - test!(expect0, || { - diag.highlight_lines(&msp0, Level::Error).unwrap(); - }); - test!(expect, || { - diag.highlight_lines(&msp, Level::Error).unwrap(); - }); - } -} diff --git a/src/libsyntax/errors/mod.rs b/src/librustc_errors/lib.rs similarity index 92% rename from src/libsyntax/errors/mod.rs rename to src/librustc_errors/lib.rs index f06672fe111bf..18fc826f9aa4b 100644 --- a/src/libsyntax/errors/mod.rs +++ b/src/librustc_errors/lib.rs @@ -8,24 +8,50 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -pub use errors::emitter::ColorConfig; +#![crate_name = "rustc_errors"] +#![unstable(feature = "rustc_private", issue = "27812")] +#![crate_type = "dylib"] +#![crate_type = "rlib"] +#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", + html_favicon_url = "https://doc.rust-lang.org/favicon.ico", + html_root_url = "https://doc.rust-lang.org/nightly/")] +#![cfg_attr(not(stage0), deny(warnings))] + +#![feature(custom_attribute)] +#![allow(unused_attributes)] +#![feature(rustc_private)] +#![feature(staged_api)] +#![feature(question_mark)] +#![feature(range_contains)] +#![feature(libc)] +#![feature(unicode)] + +extern crate serialize; +extern crate term; +#[macro_use] extern crate log; +#[macro_use] extern crate libc; +extern crate rustc_unicode; +extern crate serialize as rustc_serialize; // used by deriving +extern crate syntax_pos; + +pub use emitter::ColorConfig; use self::Level::*; use self::RenderSpan::*; -use codemap::{self, CodeMap, MultiSpan, NO_EXPANSION, Span}; -use diagnostics; -use errors::emitter::{Emitter, EmitterWriter}; +use emitter::{Emitter, EmitterWriter}; use std::cell::{RefCell, Cell}; use std::{error, fmt}; use std::rc::Rc; use std::thread::panicking; -use term; pub mod emitter; -pub mod json; pub mod snippet; +pub mod registry; + +use syntax_pos::{BytePos, Loc, FileLinesResult, FileName, MultiSpan, Span, NO_EXPANSION }; +use syntax_pos::{MacroBacktrace}; #[derive(Clone)] pub enum RenderSpan { @@ -43,8 +69,16 @@ pub enum RenderSpan { #[derive(Clone)] pub struct CodeSuggestion { - msp: MultiSpan, - substitutes: Vec, + pub msp: MultiSpan, + pub substitutes: Vec, +} + +pub trait CodeMapper { + fn lookup_char_pos(&self, pos: BytePos) -> Loc; + fn span_to_lines(&self, sp: Span) -> FileLinesResult; + fn span_to_string(&self, sp: Span) -> String; + fn span_to_filename(&self, sp: Span) -> FileName; + fn macro_backtrace(&self, span: Span) -> Vec; } impl RenderSpan { @@ -59,8 +93,8 @@ impl RenderSpan { impl CodeSuggestion { /// Returns the assembled code suggestion. - pub fn splice_lines(&self, cm: &CodeMap) -> String { - use codemap::{CharPos, Loc, Pos}; + pub fn splice_lines(&self, cm: &CodeMapper) -> String { + use syntax_pos::{CharPos, Loc, Pos}; fn push_trailing(buf: &mut String, line_opt: Option<&str>, lo: &Loc, hi_opt: Option<&Loc>) { @@ -181,20 +215,20 @@ impl error::Error for ExplicitBug { #[derive(Clone)] pub struct DiagnosticBuilder<'a> { handler: &'a Handler, - level: Level, - message: String, - code: Option, - span: MultiSpan, - children: Vec, + pub level: Level, + pub message: String, + pub code: Option, + pub span: MultiSpan, + pub children: Vec, } /// For example a note attached to an error. #[derive(Clone)] -struct SubDiagnostic { - level: Level, - message: String, - span: MultiSpan, - render_span: Option, +pub struct SubDiagnostic { + pub level: Level, + pub message: String, + pub span: MultiSpan, + pub render_span: Option, } impl<'a> DiagnosticBuilder<'a> { @@ -386,12 +420,13 @@ pub struct Handler { impl Handler { pub fn with_tty_emitter(color_config: ColorConfig, - registry: Option, + registry: Option, can_emit_warnings: bool, treat_err_as_bug: bool, - cm: Rc) + cm: Rc) -> Handler { - let emitter = Box::new(EmitterWriter::stderr(color_config, registry, cm)); + let emitter = Box::new(EmitterWriter::stderr(color_config, registry, cm, + snippet::FormatMode::EnvironmentSelected)); Handler::with_emitter(can_emit_warnings, treat_err_as_bug, emitter) } @@ -662,7 +697,7 @@ impl fmt::Display for Level { } impl Level { - fn color(self) -> term::color::Color { + pub fn color(self) -> term::color::Color { match self { Bug | Fatal | PhaseFatal | Error => term::color::BRIGHT_RED, Warning => term::color::YELLOW, @@ -672,7 +707,7 @@ impl Level { } } - fn to_str(self) -> &'static str { + pub fn to_str(self) -> &'static str { match self { Bug => "error: internal compiler error", Fatal | PhaseFatal | Error => "error", diff --git a/src/libsyntax/diagnostics/registry.rs b/src/librustc_errors/registry.rs similarity index 100% rename from src/libsyntax/diagnostics/registry.rs rename to src/librustc_errors/registry.rs diff --git a/src/libsyntax/errors/snippet/mod.rs b/src/librustc_errors/snippet.rs similarity index 95% rename from src/libsyntax/errors/snippet/mod.rs rename to src/librustc_errors/snippet.rs index 2a43a14ddf873..33f40ffc71a9f 100644 --- a/src/libsyntax/errors/snippet/mod.rs +++ b/src/librustc_errors/snippet.rs @@ -10,18 +10,25 @@ // Code for annotating snippets. -use codemap::{CharPos, CodeMap, FileMap, LineInfo, Span}; -use errors::check_old_skool; +use syntax_pos::{Span, FileMap, CharPos, LineInfo}; +use check_old_skool; +use CodeMapper; use std::cmp; use std::rc::Rc; use std::mem; -mod test; +#[derive(Clone)] +pub enum FormatMode { + NewErrorFormat, + OriginalErrorFormat, + EnvironmentSelected +} #[derive(Clone)] pub struct SnippetData { - codemap: Rc, + codemap: Rc, files: Vec, + format_mode: FormatMode, } #[derive(Clone)] @@ -36,6 +43,10 @@ pub struct FileInfo { primary_span: Option, lines: Vec, + + /// The type of error format to render. We keep it here so that + /// it's easy to configure for both tests and regular usage + format_mode: FormatMode, } #[derive(Clone, Debug)] @@ -111,8 +122,9 @@ pub enum RenderedLineKind { } impl SnippetData { - pub fn new(codemap: Rc, - primary_span: Option) // (*) + pub fn new(codemap: Rc, + primary_span: Option, + format_mode: FormatMode) // (*) -> Self { // (*) The primary span indicates the file that must appear // first, and which will have a line number etc in its @@ -126,7 +138,8 @@ impl SnippetData { let mut data = SnippetData { codemap: codemap.clone(), - files: vec![] + files: vec![], + format_mode: format_mode.clone() }; if let Some(primary_span) = primary_span { let lo = codemap.lookup_char_pos(primary_span.lo); @@ -135,6 +148,7 @@ impl SnippetData { file: lo.file, primary_span: Some(primary_span), lines: vec![], + format_mode: format_mode.clone(), }); } data @@ -167,6 +181,7 @@ impl SnippetData { file: file_map.clone(), lines: vec![], primary_span: None, + format_mode: self.format_mode.clone() }); self.files.last_mut().unwrap() } @@ -178,7 +193,7 @@ impl SnippetData { self.files.iter() .flat_map(|f| f.render_file_lines(&self.codemap)) .collect(); - prepend_prefixes(&mut rendered_lines); + prepend_prefixes(&mut rendered_lines, &self.format_mode); trim_lines(&mut rendered_lines); rendered_lines } @@ -454,8 +469,12 @@ impl FileInfo { return line_index - first_line_index; } - fn render_file_lines(&self, codemap: &Rc) -> Vec { - let old_school = check_old_skool(); + fn render_file_lines(&self, codemap: &Rc) -> Vec { + let old_school = match self.format_mode { + FormatMode::OriginalErrorFormat => true, + FormatMode::NewErrorFormat => false, + FormatMode::EnvironmentSelected => check_old_skool() + }; // As a first step, we elide any instance of more than one // continuous unannotated line. @@ -591,7 +610,12 @@ impl FileInfo { } fn render_line(&self, line: &Line) -> Vec { - let old_school = check_old_skool(); + let old_school = match self.format_mode { + FormatMode::OriginalErrorFormat => true, + FormatMode::NewErrorFormat => false, + FormatMode::EnvironmentSelected => check_old_skool() + }; + let source_string = self.file.get_line(line.line_index) .unwrap_or(""); let source_kind = RenderedLineKind::SourceText { @@ -776,8 +800,12 @@ impl FileInfo { } } -fn prepend_prefixes(rendered_lines: &mut [RenderedLine]) { - let old_school = check_old_skool(); +fn prepend_prefixes(rendered_lines: &mut [RenderedLine], format_mode: &FormatMode) { + let old_school = match *format_mode { + FormatMode::OriginalErrorFormat => true, + FormatMode::NewErrorFormat => false, + FormatMode::EnvironmentSelected => check_old_skool() + }; if old_school { return; } diff --git a/src/librustc_incremental/Cargo.toml b/src/librustc_incremental/Cargo.toml index 927e4126472f6..7db1a6348b27c 100644 --- a/src/librustc_incremental/Cargo.toml +++ b/src/librustc_incremental/Cargo.toml @@ -16,3 +16,4 @@ rustc_data_structures = { path = "../librustc_data_structures" } serialize = { path = "../libserialize" } log = { path = "../liblog" } syntax = { path = "../libsyntax" } +syntax_pos = { path = "../libsyntax_pos" } \ No newline at end of file diff --git a/src/librustc_incremental/assert_dep_graph.rs b/src/librustc_incremental/assert_dep_graph.rs index 1c0274cdcca9a..d38f979e33c5a 100644 --- a/src/librustc_incremental/assert_dep_graph.rs +++ b/src/librustc_incremental/assert_dep_graph.rs @@ -57,8 +57,8 @@ use std::fs::File; use std::io::Write; use syntax::ast; use syntax::attr::AttrMetaMethods; -use syntax::codemap::Span; use syntax::parse::token::InternedString; +use syntax_pos::Span; const IF_THIS_CHANGED: &'static str = "rustc_if_this_changed"; const THEN_THIS_WOULD_NEED: &'static str = "rustc_then_this_would_need"; diff --git a/src/librustc_incremental/calculate_svh.rs b/src/librustc_incremental/calculate_svh.rs index a039467c8afc0..cbc246ac2a11b 100644 --- a/src/librustc_incremental/calculate_svh.rs +++ b/src/librustc_incremental/calculate_svh.rs @@ -114,8 +114,8 @@ mod svh_visitor { pub use self::SawStmtComponent::*; use self::SawAbiComponent::*; use syntax::ast::{self, Name, NodeId}; - use syntax::codemap::Span; use syntax::parse::token; + use syntax_pos::Span; use rustc::ty::TyCtxt; use rustc::hir; use rustc::hir::*; diff --git a/src/librustc_incremental/lib.rs b/src/librustc_incremental/lib.rs index 005146d91eae4..ed31e0ba51056 100644 --- a/src/librustc_incremental/lib.rs +++ b/src/librustc_incremental/lib.rs @@ -30,6 +30,7 @@ extern crate serialize as rustc_serialize; #[macro_use] extern crate log; #[macro_use] extern crate syntax; +extern crate syntax_pos; mod assert_dep_graph; mod calculate_svh; diff --git a/src/librustc_lint/Cargo.toml b/src/librustc_lint/Cargo.toml index 7674cc529bef8..4d5c0d7ba0ae1 100644 --- a/src/librustc_lint/Cargo.toml +++ b/src/librustc_lint/Cargo.toml @@ -15,3 +15,4 @@ rustc = { path = "../librustc" } rustc_back = { path = "../librustc_back" } rustc_const_eval = { path = "../librustc_const_eval" } syntax = { path = "../libsyntax" } +syntax_pos = { path = "../libsyntax_pos" } diff --git a/src/librustc_lint/bad_style.rs b/src/librustc_lint/bad_style.rs index 54e5824cbc786..7e9b6f561b984 100644 --- a/src/librustc_lint/bad_style.rs +++ b/src/librustc_lint/bad_style.rs @@ -15,7 +15,7 @@ use lint::{LintPass, LateLintPass}; use syntax::ast; use syntax::attr::{self, AttrMetaMethods}; -use syntax::codemap::Span; +use syntax_pos::Span; use rustc::hir::{self, PatKind}; use rustc::hir::intravisit::FnKind; diff --git a/src/librustc_lint/builtin.rs b/src/librustc_lint/builtin.rs index 3ceca9218bdf5..18f9733040e0f 100644 --- a/src/librustc_lint/builtin.rs +++ b/src/librustc_lint/builtin.rs @@ -45,7 +45,7 @@ use std::collections::HashSet; use syntax::{ast}; use syntax::attr::{self, AttrMetaMethods}; -use syntax::codemap::{self, Span}; +use syntax_pos::{self, Span}; use rustc::hir::{self, PatKind}; use rustc::hir::intravisit::FnKind; @@ -1140,9 +1140,9 @@ impl LateLintPass for DropWithReprExtern { if hints.iter().any(|attr| *attr == attr::ReprExtern) && self_type_def.dtor_kind().has_drop_flag() { let drop_impl_span = ctx.tcx.map.def_id_span(drop_impl_did, - codemap::DUMMY_SP); + syntax_pos::DUMMY_SP); let self_defn_span = ctx.tcx.map.def_id_span(self_type_did, - codemap::DUMMY_SP); + syntax_pos::DUMMY_SP); ctx.span_lint_note(DROP_WITH_REPR_EXTERN, drop_impl_span, "implementing Drop adds hidden state to types, \ diff --git a/src/librustc_lint/lib.rs b/src/librustc_lint/lib.rs index 7baadb2b69a5f..4ae5b3afdba19 100644 --- a/src/librustc_lint/lib.rs +++ b/src/librustc_lint/lib.rs @@ -45,6 +45,7 @@ extern crate rustc; extern crate log; extern crate rustc_back; extern crate rustc_const_eval; +extern crate syntax_pos; pub use rustc::lint as lint; pub use rustc::middle as middle; diff --git a/src/librustc_lint/types.rs b/src/librustc_lint/types.rs index b7f14fffafb77..97f97a889edc3 100644 --- a/src/librustc_lint/types.rs +++ b/src/librustc_lint/types.rs @@ -27,7 +27,8 @@ use std::{i8, i16, i32, i64, u8, u16, u32, u64, f32, f64}; use syntax::ast; use syntax::abi::Abi; use syntax::attr; -use syntax::codemap::{self, Span}; +use syntax_pos::Span; +use syntax::codemap; use rustc::hir; diff --git a/src/librustc_lint/unused.rs b/src/librustc_lint/unused.rs index b765043da8853..b5b87718d2df7 100644 --- a/src/librustc_lint/unused.rs +++ b/src/librustc_lint/unused.rs @@ -19,9 +19,9 @@ use std::collections::hash_map::Entry::{Occupied, Vacant}; use syntax::ast; use syntax::attr::{self, AttrMetaMethods}; -use syntax::codemap::Span; use syntax::feature_gate::{KNOWN_ATTRIBUTES, AttributeType}; use syntax::ptr::P; +use syntax_pos::Span; use rustc_back::slice; use rustc::hir; @@ -365,12 +365,9 @@ impl EarlyLintPass for UnusedParens { fn check_stmt(&mut self, cx: &EarlyContext, s: &ast::Stmt) { let (value, msg) = match s.node { - ast::StmtKind::Decl(ref decl, _) => match decl.node { - ast::DeclKind::Local(ref local) => match local.init { - Some(ref value) => (value, "assigned value"), - None => return - }, - _ => return + ast::StmtKind::Local(ref local) => match local.init { + Some(ref value) => (value, "assigned value"), + None => return }, _ => return }; diff --git a/src/librustc_metadata/Cargo.toml b/src/librustc_metadata/Cargo.toml index 697cbe39c67fc..11896e176301b 100644 --- a/src/librustc_metadata/Cargo.toml +++ b/src/librustc_metadata/Cargo.toml @@ -16,6 +16,8 @@ rustc = { path = "../librustc" } rustc_back = { path = "../librustc_back" } rustc_bitflags = { path = "../librustc_bitflags" } rustc_const_math = { path = "../librustc_const_math" } +rustc_errors = { path = "../librustc_errors" } rustc_llvm = { path = "../librustc_llvm" } serialize = { path = "../libserialize" } syntax = { path = "../libsyntax" } +syntax_pos = { path = "../libsyntax_pos" } \ No newline at end of file diff --git a/src/librustc_metadata/astencode.rs b/src/librustc_metadata/astencode.rs index 78825aca4188e..dc37bdf6322af 100644 --- a/src/librustc_metadata/astencode.rs +++ b/src/librustc_metadata/astencode.rs @@ -37,9 +37,10 @@ use middle::region; use rustc::ty::subst; use rustc::ty::{self, Ty, TyCtxt}; -use syntax::{ast, codemap}; +use syntax::ast; use syntax::ast::NodeIdAssigner; use syntax::ptr::P; +use syntax_pos; use std::cell::Cell; use std::io::SeekFrom; @@ -115,7 +116,7 @@ impl<'a, 'b, 'c, 'tcx> ast_map::FoldOps for &'a DecodeContext<'b, 'c, 'tcx> { fn new_def_id(&self, def_id: DefId) -> DefId { self.tr_def_id(def_id) } - fn new_span(&self, span: codemap::Span) -> codemap::Span { + fn new_span(&self, span: syntax_pos::Span) -> syntax_pos::Span { self.tr_span(span) } } @@ -206,7 +207,7 @@ impl<'a, 'b, 'tcx> DecodeContext<'a, 'b, 'tcx> { /// Translates a `Span` from an extern crate to the corresponding `Span` /// within the local crate's codemap. - pub fn tr_span(&self, span: codemap::Span) -> codemap::Span { + pub fn tr_span(&self, span: syntax_pos::Span) -> syntax_pos::Span { decoder::translate_span(self.cdata, self.tcx.sess.codemap(), &self.last_filemap_index, @@ -226,8 +227,8 @@ impl tr for Option { } } -impl tr for codemap::Span { - fn tr(&self, dcx: &DecodeContext) -> codemap::Span { +impl tr for syntax_pos::Span { + fn tr(&self, dcx: &DecodeContext) -> syntax_pos::Span { dcx.tr_span(*self) } } @@ -1268,7 +1269,7 @@ fn decode_item_ast(item_doc: rbml::Doc) -> hir::Item { #[cfg(test)] trait FakeExtCtxt { - fn call_site(&self) -> codemap::Span; + fn call_site(&self) -> syntax_pos::Span; fn cfg(&self) -> ast::CrateConfig; fn ident_of(&self, st: &str) -> ast::Ident; fn name_of(&self, st: &str) -> ast::Name; @@ -1277,11 +1278,11 @@ trait FakeExtCtxt { #[cfg(test)] impl FakeExtCtxt for parse::ParseSess { - fn call_site(&self) -> codemap::Span { - codemap::Span { - lo: codemap::BytePos(0), - hi: codemap::BytePos(0), - expn_id: codemap::NO_EXPANSION, + fn call_site(&self) -> syntax_pos::Span { + syntax_pos::Span { + lo: syntax_pos::BytePos(0), + hi: syntax_pos::BytePos(0), + expn_id: syntax_pos::NO_EXPANSION, } } fn cfg(&self) -> ast::CrateConfig { Vec::new() } diff --git a/src/librustc_metadata/creader.rs b/src/librustc_metadata/creader.rs index 6c24384cddc57..2ccac91ae9105 100644 --- a/src/librustc_metadata/creader.rs +++ b/src/librustc_metadata/creader.rs @@ -34,12 +34,13 @@ use std::fs; use syntax::ast; use syntax::abi::Abi; -use syntax::codemap::{self, Span, mk_sp, Pos}; +use syntax::codemap; use syntax::parse; use syntax::attr; use syntax::attr::AttrMetaMethods; use syntax::parse::token::InternedString; use syntax::visit; +use syntax_pos::{self, Span, mk_sp, Pos}; use log; struct LocalCrateReader<'a> { @@ -58,8 +59,8 @@ pub struct CrateReader<'a> { local_crate_name: String, } -impl<'a, 'ast> visit::Visitor<'ast> for LocalCrateReader<'a> { - fn visit_item(&mut self, a: &'ast ast::Item) { +impl<'a> visit::Visitor for LocalCrateReader<'a> { + fn visit_item(&mut self, a: &ast::Item) { self.process_item(a); visit::walk_item(self, a); } @@ -726,7 +727,7 @@ impl<'a> CrateReader<'a> { info!("panic runtime not found -- loading {}", name); let (cnum, data, _) = self.resolve_crate(&None, name, name, None, - codemap::DUMMY_SP, + syntax_pos::DUMMY_SP, PathKind::Crate, false); // Sanity check the loaded crate to ensure it is indeed a panic runtime @@ -807,7 +808,7 @@ impl<'a> CrateReader<'a> { &self.sess.target.target.options.exe_allocation_crate }; let (cnum, data, _) = self.resolve_crate(&None, name, name, None, - codemap::DUMMY_SP, + syntax_pos::DUMMY_SP, PathKind::Crate, false); // Sanity check the crate we loaded to ensure that it is indeed an @@ -1076,7 +1077,7 @@ pub fn import_codemap(local_codemap: &codemap::CodeMap, None => { // We can't reuse an existing FileMap, so allocate a new one // containing the information we need. - let codemap::FileMap { + let syntax_pos::FileMap { name, abs_path, start_pos, @@ -1118,8 +1119,8 @@ pub fn import_codemap(local_codemap: &codemap::CodeMap, return imported_filemaps; - fn are_equal_modulo_startpos(fm1: &codemap::FileMap, - fm2: &codemap::FileMap) + fn are_equal_modulo_startpos(fm1: &syntax_pos::FileMap, + fm2: &syntax_pos::FileMap) -> bool { if fm1.name != fm2.name { return false; diff --git a/src/librustc_metadata/cstore.rs b/src/librustc_metadata/cstore.rs index 2e1bdf21c9a5a..e89f428c96f8c 100644 --- a/src/librustc_metadata/cstore.rs +++ b/src/librustc_metadata/cstore.rs @@ -36,6 +36,7 @@ use syntax::ast; use syntax::attr; use syntax::codemap; use syntax::parse::token::IdentInterner; +use syntax_pos; pub use middle::cstore::{NativeLibraryKind, LinkagePreference}; pub use middle::cstore::{NativeStatic, NativeFramework, NativeUnknown}; @@ -52,15 +53,15 @@ pub enum MetadataBlob { MetadataArchive(loader::ArchiveMetadata), } -/// Holds information about a codemap::FileMap imported from another crate. +/// Holds information about a syntax_pos::FileMap imported from another crate. /// See creader::import_codemap() for more information. pub struct ImportedFileMap { /// This FileMap's byte-offset within the codemap of its original crate - pub original_start_pos: codemap::BytePos, + pub original_start_pos: syntax_pos::BytePos, /// The end of this FileMap within the codemap of its original crate - pub original_end_pos: codemap::BytePos, + pub original_end_pos: syntax_pos::BytePos, /// The imported FileMap's representation within the local codemap - pub translated_filemap: Rc + pub translated_filemap: Rc } pub struct crate_metadata { diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index 68387941b65ed..3efdf36acd973 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -56,10 +56,10 @@ use syntax::attr; use syntax::parse::token::{self, IdentInterner}; use syntax::ast; use syntax::abi::Abi; -use syntax::codemap::{self, Span, BytePos, NO_EXPANSION}; +use syntax::codemap; use syntax::print::pprust; use syntax::ptr::P; - +use syntax_pos::{self, Span, BytePos, NO_EXPANSION}; pub type Cmd<'a> = &'a crate_metadata; @@ -1230,7 +1230,7 @@ fn get_attributes(md: rbml::Doc) -> Vec { value: meta_item, is_sugared_doc: is_sugared_doc, }, - span: codemap::DUMMY_SP + span: syntax_pos::DUMMY_SP } }).collect() }, @@ -1380,8 +1380,8 @@ fn reverse_translate_def_id(cdata: Cmd, did: DefId) -> Option { pub fn translate_span(cdata: Cmd, codemap: &codemap::CodeMap, last_filemap_index_hint: &Cell, - span: codemap::Span) - -> codemap::Span { + span: syntax_pos::Span) + -> syntax_pos::Span { let span = if span.lo > span.hi { // Currently macro expansion sometimes produces invalid Span values // where lo > hi. In order not to crash the compiler when trying to @@ -1390,7 +1390,7 @@ pub fn translate_span(cdata: Cmd, // least some of the time). // This workaround is only necessary as long as macro expansion is // not fixed. FIXME(#23480) - codemap::mk_sp(span.lo, span.lo) + syntax_pos::mk_sp(span.lo, span.lo) } else { span }; @@ -1430,7 +1430,7 @@ pub fn translate_span(cdata: Cmd, let hi = (span.hi - filemap.original_start_pos) + filemap.translated_filemap.start_pos; - codemap::mk_sp(lo, hi) + syntax_pos::mk_sp(lo, hi) } pub fn each_inherent_implementation_for_type(cdata: Cmd, @@ -1733,7 +1733,7 @@ pub fn is_default_impl(cdata: Cmd, impl_id: DefIndex) -> bool { item_family(impl_doc) == Family::DefaultImpl } -pub fn get_imported_filemaps(metadata: &[u8]) -> Vec { +pub fn get_imported_filemaps(metadata: &[u8]) -> Vec { let crate_doc = rbml::Doc::new(metadata); let cm_doc = reader::get_doc(crate_doc, tag_codemap); diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs index b14be158197e8..c23ad6d5f078f 100644 --- a/src/librustc_metadata/encoder.rs +++ b/src/librustc_metadata/encoder.rs @@ -44,10 +44,10 @@ use std::rc::Rc; use std::u32; use syntax::abi::Abi; use syntax::ast::{self, NodeId, Name, CRATE_NODE_ID, CrateNum}; -use syntax::codemap::BytePos; use syntax::attr; -use syntax::errors::Handler; +use errors::Handler; use syntax; +use syntax_pos::BytePos; use rbml::writer::Encoder; use rustc::hir::{self, PatKind}; diff --git a/src/librustc_metadata/lib.rs b/src/librustc_metadata/lib.rs index f7ea60c4078dc..1cf7282e9e95f 100644 --- a/src/librustc_metadata/lib.rs +++ b/src/librustc_metadata/lib.rs @@ -28,10 +28,11 @@ #[macro_use] extern crate log; #[macro_use] extern crate syntax; #[macro_use] #[no_link] extern crate rustc_bitflags; - +extern crate syntax_pos; extern crate flate; extern crate rbml; extern crate serialize as rustc_serialize; // used by deriving +extern crate rustc_errors as errors; #[macro_use] extern crate rustc; diff --git a/src/librustc_metadata/loader.rs b/src/librustc_metadata/loader.rs index a5b1c3d301b10..56393b79980f6 100644 --- a/src/librustc_metadata/loader.rs +++ b/src/librustc_metadata/loader.rs @@ -225,8 +225,8 @@ use rustc::util::common; use rustc_llvm as llvm; use rustc_llvm::{False, ObjectFile, mk_section_iter}; use rustc_llvm::archive_ro::ArchiveRO; -use syntax::codemap::Span; -use syntax::errors::DiagnosticBuilder; +use errors::DiagnosticBuilder; +use syntax_pos::Span; use rustc_back::target::Target; use std::cmp; diff --git a/src/librustc_metadata/macro_import.rs b/src/librustc_metadata/macro_import.rs index 1c7d37709c220..7dadf8d108a71 100644 --- a/src/librustc_metadata/macro_import.rs +++ b/src/librustc_metadata/macro_import.rs @@ -16,12 +16,12 @@ use cstore::CStore; use rustc::session::Session; use std::collections::{HashSet, HashMap}; -use syntax::codemap::Span; use syntax::parse::token; use syntax::ast; use syntax::attr; use syntax::attr::AttrMetaMethods; use syntax::ext; +use syntax_pos::Span; pub struct MacroLoader<'a> { sess: &'a Session, diff --git a/src/librustc_metadata/tyencode.rs b/src/librustc_metadata/tyencode.rs index 48811c68f5869..2b8ba107fefc6 100644 --- a/src/librustc_metadata/tyencode.rs +++ b/src/librustc_metadata/tyencode.rs @@ -29,7 +29,7 @@ use rustc::hir; use syntax::abi::Abi; use syntax::ast; -use syntax::errors::Handler; +use errors::Handler; use rbml::leb128; use encoder; diff --git a/src/librustc_mir/Cargo.toml b/src/librustc_mir/Cargo.toml index 77dccb7e0d483..2a1a815330675 100644 --- a/src/librustc_mir/Cargo.toml +++ b/src/librustc_mir/Cargo.toml @@ -18,3 +18,4 @@ rustc_const_math = { path = "../librustc_const_math" } rustc_data_structures = { path = "../librustc_data_structures" } rustc_bitflags = { path = "../librustc_bitflags" } syntax = { path = "../libsyntax" } +syntax_pos = { path = "../libsyntax_pos" } diff --git a/src/librustc_mir/build/expr/as_rvalue.rs b/src/librustc_mir/build/expr/as_rvalue.rs index ab7bc4eec91cc..f1487992cb522 100644 --- a/src/librustc_mir/build/expr/as_rvalue.rs +++ b/src/librustc_mir/build/expr/as_rvalue.rs @@ -24,7 +24,7 @@ use rustc::middle::const_val::ConstVal; use rustc::ty; use rustc::mir::repr::*; use syntax::ast; -use syntax::codemap::Span; +use syntax_pos::Span; impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { /// Compile `expr`, yielding an rvalue. diff --git a/src/librustc_mir/build/expr/stmt.rs b/src/librustc_mir/build/expr/stmt.rs index ad55a3d8b73f2..8ae23c9103b02 100644 --- a/src/librustc_mir/build/expr/stmt.rs +++ b/src/librustc_mir/build/expr/stmt.rs @@ -13,7 +13,7 @@ use build::scope::LoopScope; use hair::*; use rustc::middle::region::CodeExtent; use rustc::mir::repr::*; -use syntax::codemap::Span; +use syntax_pos::Span; impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { diff --git a/src/librustc_mir/build/matches/mod.rs b/src/librustc_mir/build/matches/mod.rs index b3315ab7d290f..a94adafa80213 100644 --- a/src/librustc_mir/build/matches/mod.rs +++ b/src/librustc_mir/build/matches/mod.rs @@ -21,7 +21,7 @@ use rustc::ty::{AdtDef, Ty}; use rustc::mir::repr::*; use hair::*; use syntax::ast::{Name, NodeId}; -use syntax::codemap::Span; +use syntax_pos::Span; // helper functions, broken out by category: mod simplify; diff --git a/src/librustc_mir/build/matches/test.rs b/src/librustc_mir/build/matches/test.rs index 668bdcf735802..8c9ed53c8ab4d 100644 --- a/src/librustc_mir/build/matches/test.rs +++ b/src/librustc_mir/build/matches/test.rs @@ -23,7 +23,7 @@ use rustc_data_structures::bitvec::BitVector; use rustc::middle::const_val::ConstVal; use rustc::ty::{self, Ty}; use rustc::mir::repr::*; -use syntax::codemap::Span; +use syntax_pos::Span; use std::cmp::Ordering; impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { diff --git a/src/librustc_mir/build/misc.rs b/src/librustc_mir/build/misc.rs index 0d7a502834881..79a4cf73041d7 100644 --- a/src/librustc_mir/build/misc.rs +++ b/src/librustc_mir/build/misc.rs @@ -19,7 +19,7 @@ use rustc::ty::{self, Ty}; use rustc::mir::repr::*; use syntax::ast; -use syntax::codemap::Span; +use syntax_pos::Span; impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { /// Add a new temporary value of type `ty` storing the result of diff --git a/src/librustc_mir/build/mod.rs b/src/librustc_mir/build/mod.rs index 2626a02281f7d..362e1e26fdf1e 100644 --- a/src/librustc_mir/build/mod.rs +++ b/src/librustc_mir/build/mod.rs @@ -16,8 +16,8 @@ use rustc::util::nodemap::NodeMap; use rustc::hir; use syntax::abi::Abi; use syntax::ast; -use syntax::codemap::Span; use syntax::parse::token::keywords; +use syntax_pos::Span; use rustc_data_structures::indexed_vec::{IndexVec, Idx}; diff --git a/src/librustc_mir/build/scope.rs b/src/librustc_mir/build/scope.rs index 65457a9cc80d6..1703fee9360cc 100644 --- a/src/librustc_mir/build/scope.rs +++ b/src/librustc_mir/build/scope.rs @@ -92,7 +92,7 @@ use rustc::middle::lang_items; use rustc::ty::subst::{Substs, Subst, VecPerParamSpace}; use rustc::ty::{Ty, TyCtxt}; use rustc::mir::repr::*; -use syntax::codemap::Span; +use syntax_pos::Span; use rustc_data_structures::indexed_vec::Idx; use rustc_data_structures::fnv::FnvHashMap; diff --git a/src/librustc_mir/hair/cx/pattern.rs b/src/librustc_mir/hair/cx/pattern.rs index 1bc3954a5fe75..b5da50792762f 100644 --- a/src/librustc_mir/hair/cx/pattern.rs +++ b/src/librustc_mir/hair/cx/pattern.rs @@ -17,8 +17,8 @@ use rustc::hir::pat_util::{EnumerateAndAdjustIterator, pat_is_resolved_const}; use rustc::ty::{self, Ty}; use rustc::mir::repr::*; use rustc::hir::{self, PatKind}; -use syntax::codemap::Span; use syntax::ptr::P; +use syntax_pos::Span; /// When there are multiple patterns in a single arm, each one has its /// own node-ids for the bindings. References to the variables always diff --git a/src/librustc_mir/hair/mod.rs b/src/librustc_mir/hair/mod.rs index 020fbb6fcd19c..a2746bf30c91f 100644 --- a/src/librustc_mir/hair/mod.rs +++ b/src/librustc_mir/hair/mod.rs @@ -23,7 +23,7 @@ use rustc::ty::subst::Substs; use rustc::ty::{self, AdtDef, ClosureSubsts, Region, Ty}; use rustc::hir; use syntax::ast; -use syntax::codemap::Span; +use syntax_pos::Span; use self::cx::Cx; pub mod cx; diff --git a/src/librustc_mir/lib.rs b/src/librustc_mir/lib.rs index 3d1ef31bd5c2a..3d01d49c53472 100644 --- a/src/librustc_mir/lib.rs +++ b/src/librustc_mir/lib.rs @@ -38,6 +38,7 @@ extern crate rustc_back; extern crate rustc_bitflags; #[macro_use] extern crate syntax; +extern crate syntax_pos; extern crate rustc_const_math; extern crate rustc_const_eval; diff --git a/src/librustc_mir/mir_map.rs b/src/librustc_mir/mir_map.rs index a55fbe3641c66..b7c5f35892b0b 100644 --- a/src/librustc_mir/mir_map.rs +++ b/src/librustc_mir/mir_map.rs @@ -33,7 +33,7 @@ use rustc::util::nodemap::NodeMap; use rustc::hir; use rustc::hir::intravisit::{self, FnKind, Visitor}; use syntax::ast; -use syntax::codemap::Span; +use syntax_pos::Span; use std::mem; diff --git a/src/librustc_mir/transform/promote_consts.rs b/src/librustc_mir/transform/promote_consts.rs index 3ebfef10d4311..4b551d6bb083c 100644 --- a/src/librustc_mir/transform/promote_consts.rs +++ b/src/librustc_mir/transform/promote_consts.rs @@ -26,7 +26,7 @@ use rustc::mir::repr::*; use rustc::mir::visit::{LvalueContext, MutVisitor, Visitor}; use rustc::mir::traversal::ReversePostorder; use rustc::ty::{self, TyCtxt}; -use syntax::codemap::Span; +use syntax_pos::Span; use build::Location; diff --git a/src/librustc_mir/transform/qualify_consts.rs b/src/librustc_mir/transform/qualify_consts.rs index 784ddc1ede4f1..1d00938fb25eb 100644 --- a/src/librustc_mir/transform/qualify_consts.rs +++ b/src/librustc_mir/transform/qualify_consts.rs @@ -30,8 +30,8 @@ use rustc::mir::transform::{Pass, MirMapPass, MirPassHook, MirSource}; use rustc::mir::visit::{LvalueContext, Visitor}; use rustc::util::nodemap::DefIdMap; use syntax::abi::Abi; -use syntax::codemap::Span; use syntax::feature_gate::UnstableFeatures; +use syntax_pos::Span; use std::collections::hash_map::Entry; use std::fmt; diff --git a/src/librustc_mir/transform/type_check.rs b/src/librustc_mir/transform/type_check.rs index e4398fcab3163..db49e1e040791 100644 --- a/src/librustc_mir/transform/type_check.rs +++ b/src/librustc_mir/transform/type_check.rs @@ -22,7 +22,7 @@ use rustc::mir::tcx::LvalueTy; use rustc::mir::transform::{MirPass, MirSource, Pass}; use rustc::mir::visit::{self, Visitor}; use std::fmt; -use syntax::codemap::{Span, DUMMY_SP}; +use syntax_pos::{Span, DUMMY_SP}; use rustc_data_structures::indexed_vec::Idx; diff --git a/src/librustc_passes/Cargo.toml b/src/librustc_passes/Cargo.toml index 0c85ffd2e9c39..cc710e0ac3563 100644 --- a/src/librustc_passes/Cargo.toml +++ b/src/librustc_passes/Cargo.toml @@ -14,3 +14,5 @@ rustc = { path = "../librustc" } rustc_const_eval = { path = "../librustc_const_eval" } rustc_const_math = { path = "../librustc_const_math" } syntax = { path = "../libsyntax" } +syntax_pos = { path = "../libsyntax_pos" } +rustc_errors = { path = "../librustc_errors" } \ No newline at end of file diff --git a/src/librustc_passes/ast_validation.rs b/src/librustc_passes/ast_validation.rs index 919c717f888ff..a90b563515eae 100644 --- a/src/librustc_passes/ast_validation.rs +++ b/src/librustc_passes/ast_validation.rs @@ -19,10 +19,10 @@ use rustc::lint; use rustc::session::Session; use syntax::ast::*; -use syntax::codemap::Span; -use syntax::errors; use syntax::parse::token::{self, keywords}; use syntax::visit::{self, Visitor}; +use syntax_pos::Span; +use errors; struct AstValidator<'a> { session: &'a Session, @@ -57,7 +57,7 @@ impl<'a> AstValidator<'a> { } } -impl<'a, 'v> Visitor<'v> for AstValidator<'a> { +impl<'a> Visitor for AstValidator<'a> { fn visit_lifetime(&mut self, lt: &Lifetime) { if lt.name.as_str() == "'_" { self.session.add_lint( @@ -73,7 +73,7 @@ impl<'a, 'v> Visitor<'v> for AstValidator<'a> { match expr.node { ExprKind::While(_, _, Some(ident)) | ExprKind::Loop(_, Some(ident)) | ExprKind::WhileLet(_, _, _, Some(ident)) | ExprKind::ForLoop(_, _, _, Some(ident)) | - ExprKind::Break(Some(ident)) | ExprKind::Again(Some(ident)) => { + ExprKind::Break(Some(ident)) | ExprKind::Continue(Some(ident)) => { self.check_label(ident.node, ident.span, expr.id); } _ => {} diff --git a/src/librustc_passes/consts.rs b/src/librustc_passes/consts.rs index 75bfe7c0f2f95..27ce03b2d9390 100644 --- a/src/librustc_passes/consts.rs +++ b/src/librustc_passes/consts.rs @@ -46,7 +46,7 @@ use rustc::lint::builtin::CONST_ERR; use rustc::hir::{self, PatKind}; use syntax::ast; -use syntax::codemap::Span; +use syntax_pos::Span; use rustc::hir::intravisit::{self, FnKind, Visitor}; use std::collections::hash_map::Entry; diff --git a/src/librustc_passes/lib.rs b/src/librustc_passes/lib.rs index 1576ca6bdeaa4..650613f4844f5 100644 --- a/src/librustc_passes/lib.rs +++ b/src/librustc_passes/lib.rs @@ -34,6 +34,8 @@ extern crate rustc_const_math; #[macro_use] extern crate log; #[macro_use] extern crate syntax; +extern crate syntax_pos; +extern crate rustc_errors as errors; pub mod diagnostics; diff --git a/src/librustc_passes/loops.rs b/src/librustc_passes/loops.rs index 2174d1cf9b82a..dd0f16baaa395 100644 --- a/src/librustc_passes/loops.rs +++ b/src/librustc_passes/loops.rs @@ -15,7 +15,7 @@ use rustc::dep_graph::DepNode; use rustc::hir::map::Map; use rustc::hir::intravisit::{self, Visitor}; use rustc::hir; -use syntax::codemap::Span; +use syntax_pos::Span; #[derive(Clone, Copy, PartialEq)] enum Context { diff --git a/src/librustc_passes/no_asm.rs b/src/librustc_passes/no_asm.rs index 90f92c25b05ea..314513a974ecd 100644 --- a/src/librustc_passes/no_asm.rs +++ b/src/librustc_passes/no_asm.rs @@ -29,7 +29,7 @@ struct CheckNoAsm<'a> { sess: &'a Session, } -impl<'a, 'v> Visitor<'v> for CheckNoAsm<'a> { +impl<'a> Visitor for CheckNoAsm<'a> { fn visit_expr(&mut self, e: &ast::Expr) { match e.node { ast::ExprKind::InlineAsm(_) => span_err!(self.sess, e.span, E0472, diff --git a/src/librustc_passes/rvalues.rs b/src/librustc_passes/rvalues.rs index 137a50642fcf4..4684683f02501 100644 --- a/src/librustc_passes/rvalues.rs +++ b/src/librustc_passes/rvalues.rs @@ -20,7 +20,7 @@ use rustc::traits::ProjectionMode; use rustc::hir; use rustc::hir::intravisit; use syntax::ast; -use syntax::codemap::Span; +use syntax_pos::Span; pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { let mut rvcx = RvalueContext { tcx: tcx }; diff --git a/src/librustc_passes/static_recursion.rs b/src/librustc_passes/static_recursion.rs index 245960a04f030..d0938ad09a0da 100644 --- a/src/librustc_passes/static_recursion.rs +++ b/src/librustc_passes/static_recursion.rs @@ -18,8 +18,8 @@ use rustc::hir::def::{Def, DefMap}; use rustc::util::nodemap::NodeMap; use syntax::{ast}; -use syntax::codemap::Span; use syntax::feature_gate::{GateIssue, emit_feature_err}; +use syntax_pos::Span; use rustc::hir::intravisit::{self, Visitor}; use rustc::hir; diff --git a/src/librustc_plugin/Cargo.toml b/src/librustc_plugin/Cargo.toml index 6acd1e76ff2c2..514d81ecc94f2 100644 --- a/src/librustc_plugin/Cargo.toml +++ b/src/librustc_plugin/Cargo.toml @@ -15,3 +15,5 @@ rustc_back = { path = "../librustc_back" } rustc_bitflags = { path = "../librustc_bitflags" } rustc_metadata = { path = "../librustc_metadata" } syntax = { path = "../libsyntax" } +syntax_pos = { path = "../libsyntax_pos" } +rustc_errors = { path = "../librustc_errors" } diff --git a/src/librustc_plugin/build.rs b/src/librustc_plugin/build.rs index 4c7755b1b0937..ff3038c3d1175 100644 --- a/src/librustc_plugin/build.rs +++ b/src/librustc_plugin/build.rs @@ -12,8 +12,8 @@ use syntax::ast; use syntax::attr; -use syntax::codemap::Span; -use syntax::errors; +use errors; +use syntax_pos::Span; use rustc::dep_graph::DepNode; use rustc::hir::map::Map; use rustc::hir::intravisit::Visitor; diff --git a/src/librustc_plugin/lib.rs b/src/librustc_plugin/lib.rs index 7ef628e8f9b16..e60a657ba193d 100644 --- a/src/librustc_plugin/lib.rs +++ b/src/librustc_plugin/lib.rs @@ -70,6 +70,8 @@ extern crate rustc; extern crate rustc_back; extern crate rustc_metadata; +extern crate syntax_pos; +extern crate rustc_errors as errors; pub use self::registry::Registry; diff --git a/src/librustc_plugin/load.rs b/src/librustc_plugin/load.rs index 11e1841f7493e..a3cd9b5da02bc 100644 --- a/src/librustc_plugin/load.rs +++ b/src/librustc_plugin/load.rs @@ -20,9 +20,9 @@ use std::env; use std::mem; use std::path::PathBuf; use syntax::ast; -use syntax::codemap::{Span, COMMAND_LINE_SP}; use syntax::ptr::P; use syntax::attr::AttrMetaMethods; +use syntax_pos::{Span, COMMAND_LINE_SP}; /// Pointer to a registrar function. pub type PluginRegistrarFun = diff --git a/src/librustc_plugin/registry.rs b/src/librustc_plugin/registry.rs index dc5a38bb7647e..54fa0197de4fe 100644 --- a/src/librustc_plugin/registry.rs +++ b/src/librustc_plugin/registry.rs @@ -18,11 +18,11 @@ use rustc::mir::transform::MirMapPass; use syntax::ext::base::{SyntaxExtension, NamedSyntaxExtension, NormalTT}; use syntax::ext::base::{IdentTT, MultiModifier, MultiDecorator}; use syntax::ext::base::{MacroExpanderFn, MacroRulesTT}; -use syntax::codemap::Span; use syntax::parse::token; use syntax::ptr::P; use syntax::ast; use syntax::feature_gate::AttributeType; +use syntax_pos::Span; use std::collections::HashMap; use std::borrow::ToOwned; diff --git a/src/librustc_privacy/Cargo.toml b/src/librustc_privacy/Cargo.toml index ac33c23f023d8..439fa661e0ab5 100644 --- a/src/librustc_privacy/Cargo.toml +++ b/src/librustc_privacy/Cargo.toml @@ -11,3 +11,4 @@ crate-type = ["dylib"] [dependencies] rustc = { path = "../librustc" } syntax = { path = "../libsyntax" } +syntax_pos = { path = "../libsyntax_pos" } diff --git a/src/librustc_privacy/lib.rs b/src/librustc_privacy/lib.rs index 918c149ef8589..85a6f732dd52e 100644 --- a/src/librustc_privacy/lib.rs +++ b/src/librustc_privacy/lib.rs @@ -23,6 +23,7 @@ extern crate rustc; #[macro_use] extern crate syntax; +extern crate syntax_pos; use rustc::dep_graph::DepNode; use rustc::hir::{self, PatKind}; @@ -35,7 +36,7 @@ use rustc::middle::privacy::{AccessLevel, AccessLevels}; use rustc::ty::{self, TyCtxt}; use rustc::util::nodemap::NodeSet; use syntax::ast; -use syntax::codemap::Span; +use syntax_pos::Span; use std::cmp; use std::mem::replace; diff --git a/src/librustc_resolve/Cargo.toml b/src/librustc_resolve/Cargo.toml index a63460d912d7d..5ce4c74e735fd 100644 --- a/src/librustc_resolve/Cargo.toml +++ b/src/librustc_resolve/Cargo.toml @@ -14,3 +14,5 @@ log = { path = "../liblog" } syntax = { path = "../libsyntax" } rustc = { path = "../librustc" } arena = { path = "../libarena" } +rustc_errors = { path = "../librustc_errors" } +syntax_pos = { path = "../libsyntax_pos" } diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs index 775c24b6d4a67..5867e48c7ca27 100644 --- a/src/librustc_resolve/build_reduced_graph.rs +++ b/src/librustc_resolve/build_reduced_graph.rs @@ -29,15 +29,16 @@ use rustc::ty::{self, VariantKind}; use syntax::ast::Name; use syntax::attr; use syntax::parse::token; -use syntax::codemap::{Span, DUMMY_SP}; -use syntax::ast::{Block, Crate, DeclKind}; +use syntax::ast::{Block, Crate}; use syntax::ast::{ForeignItem, ForeignItemKind, Item, ItemKind}; use syntax::ast::{Mutability, PathListItemKind}; -use syntax::ast::{Stmt, StmtKind, TraitItemKind}; +use syntax::ast::{StmtKind, TraitItemKind}; use syntax::ast::{Variant, ViewPathGlob, ViewPathList, ViewPathSimple}; use syntax::visit::{self, Visitor}; +use syntax_pos::{Span, DUMMY_SP}; + trait ToNameBinding<'a> { fn to_name_binding(self) -> NameBinding<'a>; } @@ -84,17 +85,11 @@ impl<'b> Resolver<'b> { } fn block_needs_anonymous_module(&mut self, block: &Block) -> bool { - fn is_item(statement: &Stmt) -> bool { - if let StmtKind::Decl(ref declaration, _) = statement.node { - if let DeclKind::Item(_) = declaration.node { - return true; - } - } - false - } - // If any statements are items, we need to create an anonymous module - block.stmts.iter().any(is_item) + block.stmts.iter().any(|statement| match statement.node { + StmtKind::Item(_) => true, + _ => false, + }) } /// Constructs the reduced graph for one item. @@ -313,6 +308,7 @@ impl<'b> Resolver<'b> { (Def::Method(item_def_id), ValueNS) } TraitItemKind::Type(..) => (Def::AssociatedTy(def_id, item_def_id), TypeNS), + TraitItemKind::Macro(_) => panic!("unexpanded macro in resolve!"), }; self.define(module_parent, item.ident.name, ns, (def, item.span, vis)); @@ -503,7 +499,7 @@ struct BuildReducedGraphVisitor<'a, 'b: 'a> { parent: Module<'b>, } -impl<'a, 'b, 'v> Visitor<'v> for BuildReducedGraphVisitor<'a, 'b> { +impl<'a, 'b> Visitor for BuildReducedGraphVisitor<'a, 'b> { fn visit_item(&mut self, item: &Item) { let old_parent = self.parent; self.resolver.build_reduced_graph_for_item(item, &mut self.parent); diff --git a/src/librustc_resolve/check_unused.rs b/src/librustc_resolve/check_unused.rs index 64347d7b84d3c..3084d9abbe1e4 100644 --- a/src/librustc_resolve/check_unused.rs +++ b/src/librustc_resolve/check_unused.rs @@ -27,7 +27,7 @@ use Namespace::{TypeNS, ValueNS}; use rustc::lint; use syntax::ast::{self, ViewPathGlob, ViewPathList, ViewPathSimple}; use syntax::visit::{self, Visitor}; -use syntax::codemap::{Span, DUMMY_SP}; +use syntax_pos::{Span, DUMMY_SP}; struct UnusedImportCheckVisitor<'a, 'b: 'a> { @@ -71,7 +71,7 @@ impl<'a, 'b> UnusedImportCheckVisitor<'a, 'b> { } } -impl<'a, 'b, 'v> Visitor<'v> for UnusedImportCheckVisitor<'a, 'b> { +impl<'a, 'b> Visitor for UnusedImportCheckVisitor<'a, 'b> { fn visit_item(&mut self, item: &ast::Item) { visit::walk_item(self, item); // Ignore is_public import statements because there's no way to be sure diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index e82b4e2fcd72f..8ffa95ec7e96f 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -27,6 +27,8 @@ extern crate log; #[macro_use] extern crate syntax; +extern crate syntax_pos; +extern crate rustc_errors as errors; extern crate arena; #[macro_use] extern crate rustc; @@ -54,8 +56,6 @@ use rustc::util::nodemap::{NodeMap, NodeSet, FnvHashMap, FnvHashSet}; use syntax::ext::mtwt; use syntax::ast::{self, FloatTy}; use syntax::ast::{CRATE_NODE_ID, Name, NodeId, CrateNum, IntTy, UintTy}; -use syntax::codemap::{self, Span}; -use syntax::errors::DiagnosticBuilder; use syntax::parse::token::{self, keywords}; use syntax::util::lev_distance::find_best_match_for_name; @@ -66,6 +66,9 @@ use syntax::ast::{Item, ItemKind, ImplItem, ImplItemKind}; use syntax::ast::{Local, Mutability, Pat, PatKind, Path}; use syntax::ast::{PathSegment, PathParameters, QSelf, TraitItemKind, TraitRef, Ty, TyKind}; +use syntax_pos::Span; +use errors::DiagnosticBuilder; + use std::collections::{HashMap, HashSet}; use std::cell::{Cell, RefCell}; use std::fmt; @@ -177,13 +180,13 @@ enum UnresolvedNameContext<'a> { } fn resolve_error<'b, 'a: 'b, 'c>(resolver: &'b Resolver<'a>, - span: syntax::codemap::Span, + span: syntax_pos::Span, resolution_error: ResolutionError<'c>) { resolve_struct_error(resolver, span, resolution_error).emit(); } fn resolve_struct_error<'b, 'a: 'b, 'c>(resolver: &'b Resolver<'a>, - span: syntax::codemap::Span, + span: syntax_pos::Span, resolution_error: ResolutionError<'c>) -> DiagnosticBuilder<'a> { if !resolver.emit_errors { @@ -495,7 +498,7 @@ pub enum Namespace { ValueNS, } -impl<'a, 'v> Visitor<'v> for Resolver<'a> { +impl<'a> Visitor for Resolver<'a> { fn visit_item(&mut self, item: &Item) { self.resolve_item(item); } @@ -554,9 +557,9 @@ impl<'a, 'v> Visitor<'v> for Resolver<'a> { }); } fn visit_fn(&mut self, - function_kind: FnKind<'v>, - declaration: &'v FnDecl, - block: &'v Block, + function_kind: FnKind, + declaration: &FnDecl, + block: &Block, _: Span, node_id: NodeId) { let rib_kind = match function_kind { @@ -1637,6 +1640,7 @@ impl<'a> Resolver<'a> { visit::walk_trait_item(this, trait_item) }); } + TraitItemKind::Macro(_) => panic!("unexpanded macro in resolve!"), }; } }); @@ -1804,10 +1808,10 @@ impl<'a> Resolver<'a> { self.resolve_crate_relative_path(trait_path.span, segments, TypeNS) } else { self.resolve_module_relative_path(trait_path.span, segments, TypeNS) - }.map(|binding| binding.span).unwrap_or(codemap::DUMMY_SP) + }.map(|binding| binding.span).unwrap_or(syntax_pos::DUMMY_SP) }; - if definition_site != codemap::DUMMY_SP { + if definition_site != syntax_pos::DUMMY_SP { err.span_label(definition_site, &format!("type aliases cannot be used for traits")); } @@ -2332,8 +2336,8 @@ impl<'a> Resolver<'a> { }, "variant or struct"); } - PatKind::Path(ref path) => { - self.resolve_pattern_path(pat.id, None, path, ValueNS, |def| { + PatKind::Path(ref qself, ref path) => { + self.resolve_pattern_path(pat.id, qself.as_ref(), path, ValueNS, |def| { match def { Def::Struct(..) | Def::Variant(..) | Def::Const(..) | Def::AssociatedConst(..) | Def::Err => true, @@ -2342,15 +2346,6 @@ impl<'a> Resolver<'a> { }, "variant, struct or constant"); } - PatKind::QPath(ref qself, ref path) => { - self.resolve_pattern_path(pat.id, Some(qself), path, ValueNS, |def| { - match def { - Def::AssociatedConst(..) | Def::Err => true, - _ => false, - } - }, "associated constant"); - } - PatKind::Struct(ref path, _, _) => { self.resolve_pattern_path(pat.id, None, path, TypeNS, |def| { match def { @@ -2956,7 +2951,7 @@ impl<'a> Resolver<'a> { }) } - ExprKind::Break(Some(label)) | ExprKind::Again(Some(label)) => { + ExprKind::Break(Some(label)) | ExprKind::Continue(Some(label)) => { match self.search_label(mtwt::resolve(label.node)) { None => { self.record_def(expr.id, err_path_resolution()); @@ -3338,7 +3333,7 @@ impl<'a> Resolver<'a> { }, }; - if old_binding.span != codemap::DUMMY_SP { + if old_binding.span != syntax_pos::DUMMY_SP { err.span_label(old_binding.span, &format!("previous {} of `{}` here", noun, name)); } err.emit(); diff --git a/src/librustc_resolve/resolve_imports.rs b/src/librustc_resolve/resolve_imports.rs index 3082a8b43076f..cb308f9120404 100644 --- a/src/librustc_resolve/resolve_imports.rs +++ b/src/librustc_resolve/resolve_imports.rs @@ -25,8 +25,8 @@ use rustc::lint; use rustc::hir::def::*; use syntax::ast::{NodeId, Name}; -use syntax::codemap::{Span, DUMMY_SP}; use syntax::util::lev_distance::find_best_match_for_name; +use syntax_pos::{Span, DUMMY_SP}; use std::cell::{Cell, RefCell}; diff --git a/src/librustc_save_analysis/Cargo.toml b/src/librustc_save_analysis/Cargo.toml index c786b4d711c39..3d66e5a300787 100644 --- a/src/librustc_save_analysis/Cargo.toml +++ b/src/librustc_save_analysis/Cargo.toml @@ -13,3 +13,4 @@ log = { path = "../liblog" } rustc = { path = "../librustc" } syntax = { path = "../libsyntax" } serialize = { path = "../libserialize" } +syntax_pos = { path = "../libsyntax_pos" } \ No newline at end of file diff --git a/src/librustc_save_analysis/data.rs b/src/librustc_save_analysis/data.rs index 15aaa77cc3518..493f7669337fe 100644 --- a/src/librustc_save_analysis/data.rs +++ b/src/librustc_save_analysis/data.rs @@ -15,7 +15,7 @@ use rustc::hir::def_id::DefId; use syntax::ast::{CrateNum, NodeId}; -use syntax::codemap::Span; +use syntax_pos::Span; pub struct CrateData { pub name: String, diff --git a/src/librustc_save_analysis/dump_visitor.rs b/src/librustc_save_analysis/dump_visitor.rs index 56c7436a8fe5c..c1960eeee46b8 100644 --- a/src/librustc_save_analysis/dump_visitor.rs +++ b/src/librustc_save_analysis/dump_visitor.rs @@ -36,11 +36,12 @@ use std::collections::HashSet; use std::hash::*; use syntax::ast::{self, NodeId, PatKind}; -use syntax::codemap::*; use syntax::parse::token::{self, keywords}; use syntax::visit::{self, Visitor}; use syntax::print::pprust::{path_to_string, ty_to_string, bounds_to_string, generics_to_string}; use syntax::ptr::P; +use syntax::codemap::Spanned; +use syntax_pos::*; use super::{escape, generated_code, SaveContext, PathCollector}; use super::data::*; @@ -1037,7 +1038,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> { } } -impl<'v, 'l, 'tcx: 'l, 'll, D: Dump +'ll> Visitor<'v> for DumpVisitor<'l, 'tcx, 'll, D> { +impl<'l, 'tcx: 'l, 'll, D: Dump +'ll> Visitor for DumpVisitor<'l, 'tcx, 'll, D> { fn visit_item(&mut self, item: &ast::Item) { use syntax::ast::ItemKind::*; self.process_macro_use(item.span, item.id); @@ -1215,7 +1216,8 @@ impl<'v, 'l, 'tcx: 'l, 'll, D: Dump +'ll> Visitor<'v> for DumpVisitor<'l, 'tcx, trait_item.span); } ast::TraitItemKind::Const(_, None) | - ast::TraitItemKind::Type(..) => {} + ast::TraitItemKind::Type(..) | + ast::TraitItemKind::Macro(_) => {} } } @@ -1421,8 +1423,7 @@ impl<'v, 'l, 'tcx: 'l, 'll, D: Dump +'ll> Visitor<'v> for DumpVisitor<'l, 'tcx, } fn visit_stmt(&mut self, s: &ast::Stmt) { - let id = s.node.id(); - self.process_macro_use(s.span, id.unwrap()); + self.process_macro_use(s.span, s.id); visit::walk_stmt(self, s) } diff --git a/src/librustc_save_analysis/external_data.rs b/src/librustc_save_analysis/external_data.rs index 7efd2624561e7..65e4f7e869b0d 100644 --- a/src/librustc_save_analysis/external_data.rs +++ b/src/librustc_save_analysis/external_data.rs @@ -12,7 +12,8 @@ use rustc::hir::def_id::{DefId, DefIndex}; use rustc::hir::map::Map; use rustc::ty::TyCtxt; use syntax::ast::{CrateNum, NodeId}; -use syntax::codemap::{Span, CodeMap}; +use syntax::codemap::CodeMap; +use syntax_pos::Span; use data; diff --git a/src/librustc_save_analysis/lib.rs b/src/librustc_save_analysis/lib.rs index 3335133816043..8eaf398778380 100644 --- a/src/librustc_save_analysis/lib.rs +++ b/src/librustc_save_analysis/lib.rs @@ -27,6 +27,7 @@ #[macro_use] extern crate log; #[macro_use] extern crate syntax; extern crate serialize as rustc_serialize; +extern crate syntax_pos; mod csv_dumper; mod json_dumper; @@ -49,10 +50,11 @@ use std::fs::{self, File}; use std::path::{Path, PathBuf}; use syntax::ast::{self, NodeId, PatKind}; -use syntax::codemap::*; use syntax::parse::token::{self, keywords}; use syntax::visit::{self, Visitor}; use syntax::print::pprust::{ty_to_string, arg_to_string}; +use syntax::codemap::MacroAttribute; +use syntax_pos::*; pub use self::csv_dumper::CsvDumper; pub use self::json_dumper::JsonDumper; @@ -691,7 +693,7 @@ impl PathCollector { } } -impl<'v> Visitor<'v> for PathCollector { +impl Visitor for PathCollector { fn visit_pat(&mut self, p: &ast::Pat) { match p.node { PatKind::Struct(ref path, _, _) => { @@ -699,8 +701,7 @@ impl<'v> Visitor<'v> for PathCollector { ast::Mutability::Mutable, recorder::TypeRef)); } PatKind::TupleStruct(ref path, _, _) | - PatKind::Path(ref path) | - PatKind::QPath(_, ref path) => { + PatKind::Path(_, ref path) => { self.collected_paths.push((p.id, path.clone(), ast::Mutability::Mutable, recorder::VarRef)); } diff --git a/src/librustc_save_analysis/span_utils.rs b/src/librustc_save_analysis/span_utils.rs index 4b3975faa8076..953c65549195a 100644 --- a/src/librustc_save_analysis/span_utils.rs +++ b/src/librustc_save_analysis/span_utils.rs @@ -17,9 +17,9 @@ use std::env; use std::path::Path; use syntax::ast; -use syntax::codemap::*; use syntax::parse::lexer::{self, Reader, StringReader}; use syntax::parse::token::{self, keywords, Token}; +use syntax_pos::*; #[derive(Clone)] pub struct SpanUtils<'a> { diff --git a/src/librustc_trans/Cargo.toml b/src/librustc_trans/Cargo.toml index 749ceda3db094..38f9e7ab0c51c 100644 --- a/src/librustc_trans/Cargo.toml +++ b/src/librustc_trans/Cargo.toml @@ -19,8 +19,10 @@ rustc_back = { path = "../librustc_back" } rustc_const_eval = { path = "../librustc_const_eval" } rustc_const_math = { path = "../librustc_const_math" } rustc_data_structures = { path = "../librustc_data_structures" } +rustc_errors = { path = "../librustc_errors" } rustc_incremental = { path = "../librustc_incremental" } rustc_llvm = { path = "../librustc_llvm" } rustc_platform_intrinsics = { path = "../librustc_platform_intrinsics" } serialize = { path = "../libserialize" } syntax = { path = "../libsyntax" } +syntax_pos = { path = "../libsyntax_pos" } \ No newline at end of file diff --git a/src/librustc_trans/_match.rs b/src/librustc_trans/_match.rs index 8ad1ba2a61418..15beba0d9a6ee 100644 --- a/src/librustc_trans/_match.rs +++ b/src/librustc_trans/_match.rs @@ -230,7 +230,7 @@ use std::fmt; use std::rc::Rc; use rustc::hir::{self, PatKind}; use syntax::ast::{self, DUMMY_NODE_ID, NodeId}; -use syntax::codemap::Span; +use syntax_pos::Span; use rustc::hir::fold::Folder; use syntax::ptr::P; diff --git a/src/librustc_trans/back/link.rs b/src/librustc_trans/back/link.rs index 4676b0a67e4ae..0ca59cfd7571b 100644 --- a/src/librustc_trans/back/link.rs +++ b/src/librustc_trans/back/link.rs @@ -42,8 +42,8 @@ use std::process::Command; use std::str; use flate; use syntax::ast; -use syntax::codemap::Span; use syntax::attr::AttrMetaMethods; +use syntax_pos::Span; // RLIB LLVM-BYTECODE OBJECT LAYOUT // Version 1 diff --git a/src/librustc_trans/back/write.rs b/src/librustc_trans/back/write.rs index cf81777be261d..d644fcca3bad8 100644 --- a/src/librustc_trans/back/write.rs +++ b/src/librustc_trans/back/write.rs @@ -19,9 +19,9 @@ use llvm::SMDiagnosticRef; use {CrateTranslation, ModuleTranslation}; use util::common::time; use util::common::path2cstr; -use syntax::codemap::MultiSpan; -use syntax::errors::{self, Handler, Level, RenderSpan}; -use syntax::errors::emitter::CoreEmitter; +use errors::{self, Handler, Level, RenderSpan}; +use errors::emitter::CoreEmitter; +use syntax_pos::MultiSpan; use std::collections::HashMap; use std::ffi::{CStr, CString}; @@ -357,7 +357,7 @@ struct HandlerFreeVars<'a> { unsafe extern "C" fn report_inline_asm<'a, 'b>(cgcx: &'a CodegenContext<'a>, msg: &'b str, cookie: c_uint) { - use syntax::codemap::ExpnId; + use syntax_pos::ExpnId; match cgcx.lto_ctxt { Some((sess, _)) => { diff --git a/src/librustc_trans/base.rs b/src/librustc_trans/base.rs index 5250361cd17ae..2998c834aca7a 100644 --- a/src/librustc_trans/base.rs +++ b/src/librustc_trans/base.rs @@ -101,7 +101,7 @@ use std::cell::{Cell, RefCell}; use std::collections::{HashMap, HashSet}; use std::str; use std::{i8, i16, i32, i64}; -use syntax::codemap::{Span, DUMMY_SP}; +use syntax_pos::{Span, DUMMY_SP}; use syntax::parse::token::InternedString; use syntax::attr::AttrMetaMethods; use syntax::attr; diff --git a/src/librustc_trans/build.rs b/src/librustc_trans/build.rs index 0185d1587625c..4a7a5736b13a6 100644 --- a/src/librustc_trans/build.rs +++ b/src/librustc_trans/build.rs @@ -16,7 +16,7 @@ use llvm::{AtomicBinOp, AtomicOrdering, SynchronizationScope, AsmDialect}; use llvm::{Opcode, IntPredicate, RealPredicate}; use llvm::{ValueRef, BasicBlockRef}; use common::*; -use syntax::codemap::Span; +use syntax_pos::Span; use builder::Builder; use type_::Type; diff --git a/src/librustc_trans/builder.rs b/src/librustc_trans/builder.rs index 9f032cdbfe513..d415698660000 100644 --- a/src/librustc_trans/builder.rs +++ b/src/librustc_trans/builder.rs @@ -24,7 +24,7 @@ use libc::{c_uint, c_char}; use std::ffi::CString; use std::ptr; -use syntax::codemap::Span; +use syntax_pos::Span; pub struct Builder<'a, 'tcx: 'a> { pub llbuilder: BuilderRef, diff --git a/src/librustc_trans/callee.rs b/src/librustc_trans/callee.rs index 7099246c6abde..9ea65532b35b6 100644 --- a/src/librustc_trans/callee.rs +++ b/src/librustc_trans/callee.rs @@ -53,8 +53,8 @@ use Disr; use rustc::ty::{self, Ty, TyCtxt, TypeFoldable}; use rustc::hir; -use syntax::codemap::DUMMY_SP; -use syntax::errors; +use syntax_pos::DUMMY_SP; +use errors; use syntax::ptr::P; #[derive(Debug)] diff --git a/src/librustc_trans/collector.rs b/src/librustc_trans/collector.rs index bbc01f0935f2b..eea6aec37260e 100644 --- a/src/librustc_trans/collector.rs +++ b/src/librustc_trans/collector.rs @@ -203,8 +203,8 @@ use rustc::mir::visit as mir_visit; use rustc::mir::visit::Visitor as MirVisitor; use syntax::abi::Abi; -use syntax::codemap::DUMMY_SP; -use syntax::errors; +use errors; +use syntax_pos::DUMMY_SP; use base::custom_coerce_unsize_info; use context::SharedCrateContext; use common::{fulfill_obligation, normalize_and_test_predicates, type_is_sized}; diff --git a/src/librustc_trans/common.rs b/src/librustc_trans/common.rs index 6b8198881c83c..d057f623383d3 100644 --- a/src/librustc_trans/common.rs +++ b/src/librustc_trans/common.rs @@ -52,9 +52,9 @@ use std::ffi::CString; use std::cell::{Cell, RefCell}; use syntax::ast; -use syntax::codemap::{DUMMY_SP, Span}; use syntax::parse::token::InternedString; use syntax::parse::token; +use syntax_pos::{DUMMY_SP, Span}; pub use context::{CrateContext, SharedCrateContext}; diff --git a/src/librustc_trans/consts.rs b/src/librustc_trans/consts.rs index e988d2e6ac314..4e12d3d5d8231 100644 --- a/src/librustc_trans/consts.rs +++ b/src/librustc_trans/consts.rs @@ -49,9 +49,9 @@ use std::borrow::Cow; use libc::c_uint; use syntax::ast::{self, LitKind}; use syntax::attr::{self, AttrMetaMethods}; -use syntax::codemap::Span; use syntax::parse::token; use syntax::ptr::P; +use syntax_pos::Span; pub type FnArgMap<'a> = Option<&'a NodeMap>; diff --git a/src/librustc_trans/datum.rs b/src/librustc_trans/datum.rs index eda3ce1d1062c..875f88e37c916 100644 --- a/src/librustc_trans/datum.rs +++ b/src/librustc_trans/datum.rs @@ -106,7 +106,7 @@ use rustc::ty::Ty; use std::fmt; use syntax::ast; -use syntax::codemap::DUMMY_SP; +use syntax_pos::DUMMY_SP; /// A `Datum` encapsulates the result of evaluating an expression. It /// describes where the value is stored, what Rust type the value has, diff --git a/src/librustc_trans/debuginfo/create_scope_map.rs b/src/librustc_trans/debuginfo/create_scope_map.rs index b29ca515f8fc3..2b079e7dcc8d9 100644 --- a/src/librustc_trans/debuginfo/create_scope_map.rs +++ b/src/librustc_trans/debuginfo/create_scope_map.rs @@ -22,7 +22,7 @@ use rustc::util::nodemap::NodeMap; use libc::c_uint; use std::ptr; -use syntax::codemap::{Span, Pos}; +use syntax_pos::{Span, Pos}; use syntax::{ast, codemap}; use rustc_data_structures::bitvec::BitVector; diff --git a/src/librustc_trans/debuginfo/metadata.rs b/src/librustc_trans/debuginfo/metadata.rs index c6c28fc9ba6a5..34dedeede98e0 100644 --- a/src/librustc_trans/debuginfo/metadata.rs +++ b/src/librustc_trans/debuginfo/metadata.rs @@ -44,9 +44,9 @@ use std::ptr; use std::rc::Rc; use syntax; use syntax::util::interner::Interner; -use syntax::codemap::Span; -use syntax::{ast, codemap}; +use syntax::ast; use syntax::parse::token; +use syntax_pos::{self, Span}; // From DWARF 5. @@ -660,7 +660,7 @@ fn trait_pointer_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, &[], containing_scope, NO_FILE_METADATA, - codemap::DUMMY_SP) + syntax_pos::DUMMY_SP) } pub fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, @@ -1385,7 +1385,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> { &[sole_struct_member_description], self.containing_scope, self.file_metadata, - codemap::DUMMY_SP); + syntax_pos::DUMMY_SP); // Encode the information about the null variant in the union // member's name. @@ -1615,7 +1615,7 @@ fn prepare_enum_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let discriminant_base_type_metadata = type_metadata(cx, adt::ty_of_inttype(cx.tcx(), inttype), - codemap::DUMMY_SP); + syntax_pos::DUMMY_SP); let discriminant_name = get_enum_discriminant_name(cx, enum_def_id); let name = CString::new(discriminant_name.as_bytes()).unwrap(); @@ -1849,7 +1849,7 @@ pub fn create_global_var_metadata(cx: &CrateContext, let node_def_id = cx.tcx().map.local_def_id(node_id); let (var_scope, span) = get_namespace_and_span_for_item(cx, node_def_id); - let (file_metadata, line_number) = if span != codemap::DUMMY_SP { + let (file_metadata, line_number) = if span != syntax_pos::DUMMY_SP { let loc = span_start(cx, span); (file_metadata(cx, &loc.file.name, &loc.file.abs_path), loc.line as c_uint) } else { diff --git a/src/librustc_trans/debuginfo/mod.rs b/src/librustc_trans/debuginfo/mod.rs index 92b151c7c4076..8c5b3ed54c2f5 100644 --- a/src/librustc_trans/debuginfo/mod.rs +++ b/src/librustc_trans/debuginfo/mod.rs @@ -42,8 +42,8 @@ use std::cell::{Cell, RefCell}; use std::ffi::CString; use std::ptr; -use syntax::codemap::{Span, Pos}; -use syntax::{ast, codemap}; +use syntax_pos::{self, Span, Pos}; +use syntax::ast; use syntax::attr::IntType; pub mod gdb; @@ -242,7 +242,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let (containing_scope, span) = get_containing_scope_and_span(cx, instance); // This can be the case for functions inlined from another crate - if span == codemap::DUMMY_SP { + if span == syntax_pos::DUMMY_SP { return FunctionDebugContext::FunctionWithoutDebugInfo; } @@ -327,7 +327,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, signature.push(match sig.output { ty::FnConverging(ret_ty) => match ret_ty.sty { ty::TyTuple(ref tys) if tys.is_empty() => ptr::null_mut(), - _ => type_metadata(cx, ret_ty, codemap::DUMMY_SP) + _ => type_metadata(cx, ret_ty, syntax_pos::DUMMY_SP) }, ty::FnDiverging => diverging_type_metadata(cx) }); @@ -340,13 +340,13 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, // Arguments types for &argument_type in inputs { - signature.push(type_metadata(cx, argument_type, codemap::DUMMY_SP)); + signature.push(type_metadata(cx, argument_type, syntax_pos::DUMMY_SP)); } if abi == Abi::RustCall && !sig.inputs.is_empty() { if let ty::TyTuple(args) = sig.inputs[sig.inputs.len() - 1].sty { for &argument_type in args { - signature.push(type_metadata(cx, argument_type, codemap::DUMMY_SP)); + signature.push(type_metadata(cx, argument_type, syntax_pos::DUMMY_SP)); } } } @@ -386,7 +386,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let template_params: Vec<_> = if cx.sess().opts.debuginfo == FullDebugInfo { generics.types.as_slice().iter().enumerate().map(|(i, param)| { let actual_type = cx.tcx().normalize_associated_type(&actual_types[i]); - let actual_type_metadata = type_metadata(cx, actual_type, codemap::DUMMY_SP); + let actual_type_metadata = type_metadata(cx, actual_type, syntax_pos::DUMMY_SP); let name = CString::new(param.name.as_str().as_bytes()).unwrap(); unsafe { llvm::LLVMDIBuilderCreateTemplateTypeParameter( @@ -420,7 +420,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let impl_self_ty = monomorphize::apply_param_substs(cx.tcx(), instance.substs, &impl_self_ty); - Some(type_metadata(cx, impl_self_ty, codemap::DUMMY_SP)) + Some(type_metadata(cx, impl_self_ty, syntax_pos::DUMMY_SP)) } else { // For trait method impls we still use the "parallel namespace" // strategy @@ -441,7 +441,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, // Try to get some span information, if we have an inlined item. let definition_span = match cx.external().borrow().get(&instance.def) { Some(&Some(node_id)) => cx.tcx().map.span(node_id), - _ => cx.tcx().map.def_id_span(instance.def, codemap::DUMMY_SP) + _ => cx.tcx().map.def_id_span(instance.def, syntax_pos::DUMMY_SP) }; (containing_scope, definition_span) diff --git a/src/librustc_trans/debuginfo/namespace.rs b/src/librustc_trans/debuginfo/namespace.rs index a37fbdccc8f0a..167229ddfd985 100644 --- a/src/librustc_trans/debuginfo/namespace.rs +++ b/src/librustc_trans/debuginfo/namespace.rs @@ -22,7 +22,7 @@ use common::CrateContext; use libc::c_uint; use std::ffi::CString; use std::ptr; -use syntax::codemap::DUMMY_SP; +use syntax_pos::DUMMY_SP; pub fn mangled_name_of_item(ccx: &CrateContext, def_id: DefId, extra: &str) -> String { fn fill_nested(ccx: &CrateContext, def_id: DefId, extra: &str, output: &mut String) { diff --git a/src/librustc_trans/debuginfo/source_loc.rs b/src/librustc_trans/debuginfo/source_loc.rs index 6b00c1bb1a855..9726001b4d42b 100644 --- a/src/librustc_trans/debuginfo/source_loc.rs +++ b/src/librustc_trans/debuginfo/source_loc.rs @@ -21,8 +21,8 @@ use common::{NodeIdAndSpan, CrateContext, FunctionContext}; use libc::c_uint; use std::ptr; -use syntax::codemap::{Span, Pos}; -use syntax::{ast, codemap}; +use syntax_pos::{self, Span, Pos}; +use syntax::ast; pub fn get_cleanup_debug_loc_for_ast_node<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, node_id: ast::NodeId, @@ -70,7 +70,7 @@ pub fn get_cleanup_debug_loc_for_ast_node<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, if !bytes.is_empty() && &bytes[bytes.len()-1..] == b"}" { cleanup_span = Span { - lo: node_span.hi - codemap::BytePos(1), + lo: node_span.hi - syntax_pos::BytePos(1), hi: node_span.hi, expn_id: node_span.expn_id }; diff --git a/src/librustc_trans/debuginfo/utils.rs b/src/librustc_trans/debuginfo/utils.rs index 3fd9793718434..1e0afa4534b15 100644 --- a/src/librustc_trans/debuginfo/utils.rs +++ b/src/librustc_trans/debuginfo/utils.rs @@ -21,8 +21,8 @@ use machine; use common::{CrateContext, FunctionContext}; use type_::Type; -use syntax::codemap::Span; -use syntax::{ast, codemap}; +use syntax_pos::{self, Span}; +use syntax::ast; pub fn is_node_local_to_unit(cx: &CrateContext, node_id: ast::NodeId) -> bool { @@ -44,8 +44,8 @@ pub fn create_DIArray(builder: DIBuilderRef, arr: &[DIDescriptor]) -> DIArray { }; } -/// Return codemap::Loc corresponding to the beginning of the span -pub fn span_start(cx: &CrateContext, span: Span) -> codemap::Loc { +/// Return syntax_pos::Loc corresponding to the beginning of the span +pub fn span_start(cx: &CrateContext, span: Span) -> syntax_pos::Loc { cx.sess().codemap().lookup_char_pos(span.lo) } @@ -88,7 +88,7 @@ pub fn get_namespace_and_span_for_item(cx: &CrateContext, def_id: DefId) // Try to get some span information, if we have an inlined item. let definition_span = match cx.external().borrow().get(&def_id) { Some(&Some(node_id)) => cx.tcx().map.span(node_id), - _ => cx.tcx().map.def_id_span(def_id, codemap::DUMMY_SP) + _ => cx.tcx().map.def_id_span(def_id, syntax_pos::DUMMY_SP) }; (containing_scope, definition_span) diff --git a/src/librustc_trans/expr.rs b/src/librustc_trans/expr.rs index 652886ff2a044..71c6cba9cc22a 100644 --- a/src/librustc_trans/expr.rs +++ b/src/librustc_trans/expr.rs @@ -81,8 +81,9 @@ use type_::Type; use rustc::hir; -use syntax::{ast, codemap}; +use syntax::ast; use syntax::parse::token::InternedString; +use syntax_pos; use std::fmt; use std::mem; @@ -454,7 +455,7 @@ fn apply_adjustments<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, } fn coerce_unsized<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, - span: codemap::Span, + span: syntax_pos::Span, source: Datum<'tcx, Rvalue>, target: Datum<'tcx, Rvalue>) -> Block<'blk, 'tcx> { @@ -1265,7 +1266,7 @@ fn trans_def_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, fn trans_struct<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, fields: &[hir::Field], base: Option<&hir::Expr>, - expr_span: codemap::Span, + expr_span: syntax_pos::Span, expr_id: ast::NodeId, ty: Ty<'tcx>, dest: Dest) -> Block<'blk, 'tcx> { diff --git a/src/librustc_trans/glue.rs b/src/librustc_trans/glue.rs index 211efeb4e4baa..ac23d713d2727 100644 --- a/src/librustc_trans/glue.rs +++ b/src/librustc_trans/glue.rs @@ -43,7 +43,7 @@ use type_::Type; use value::Value; use arena::TypedArena; -use syntax::codemap::DUMMY_SP; +use syntax_pos::DUMMY_SP; pub fn trans_exchange_free_dyn<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, v: ValueRef, diff --git a/src/librustc_trans/intrinsic.rs b/src/librustc_trans/intrinsic.rs index f2793c5e18a89..a721361fce0e3 100644 --- a/src/librustc_trans/intrinsic.rs +++ b/src/librustc_trans/intrinsic.rs @@ -44,7 +44,7 @@ use syntax::ptr::P; use syntax::parse::token; use rustc::session::Session; -use syntax::codemap::{Span, DUMMY_SP}; +use syntax_pos::{Span, DUMMY_SP}; use std::cmp::Ordering; diff --git a/src/librustc_trans/lib.rs b/src/librustc_trans/lib.rs index 8724945ed901b..c369858556d3a 100644 --- a/src/librustc_trans/lib.rs +++ b/src/librustc_trans/lib.rs @@ -55,6 +55,8 @@ extern crate rustc_const_eval; #[macro_use] extern crate log; #[macro_use] extern crate syntax; +extern crate syntax_pos; +extern crate rustc_errors as errors; pub use rustc::session; pub use rustc::middle; diff --git a/src/librustc_trans/meth.rs b/src/librustc_trans/meth.rs index 062b3d4a6e4bf..ac6af8d66e19f 100644 --- a/src/librustc_trans/meth.rs +++ b/src/librustc_trans/meth.rs @@ -36,7 +36,7 @@ use value::Value; use rustc::ty::{self, Ty, TyCtxt, TypeFoldable}; use syntax::ast::Name; -use syntax::codemap::DUMMY_SP; +use syntax_pos::DUMMY_SP; // drop_glue pointer, size, align. const VTABLE_OFFSET: usize = 3; diff --git a/src/librustc_trans/mir/constant.rs b/src/librustc_trans/mir/constant.rs index 30be4a9737270..da72793abf6da 100644 --- a/src/librustc_trans/mir/constant.rs +++ b/src/librustc_trans/mir/constant.rs @@ -34,7 +34,7 @@ use type_of; use type_::Type; use value::Value; -use syntax::codemap::{Span, DUMMY_SP}; +use syntax_pos::{Span, DUMMY_SP}; use std::ptr; diff --git a/src/librustc_trans/mir/mod.rs b/src/librustc_trans/mir/mod.rs index 0db5d3ae4d131..0221232a77df5 100644 --- a/src/librustc_trans/mir/mod.rs +++ b/src/librustc_trans/mir/mod.rs @@ -21,7 +21,7 @@ use debuginfo::{self, declare_local, DebugLoc, VariableAccess, VariableKind}; use machine; use type_of; -use syntax::codemap::DUMMY_SP; +use syntax_pos::DUMMY_SP; use syntax::parse::token::keywords; use std::ops::Deref; diff --git a/src/librustc_trans/mir/rvalue.rs b/src/librustc_trans/mir/rvalue.rs index 09b07c1440ec5..28bcd8a633c1c 100644 --- a/src/librustc_trans/mir/rvalue.rs +++ b/src/librustc_trans/mir/rvalue.rs @@ -133,8 +133,8 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { if let mir::AggregateKind::Closure(def_id, substs) = *kind { use rustc::hir; use syntax::ast::DUMMY_NODE_ID; - use syntax::codemap::DUMMY_SP; use syntax::ptr::P; + use syntax_pos::DUMMY_SP; use closure; closure::trans_closure_expr(closure::Dest::Ignore(bcx.ccx()), diff --git a/src/librustc_trans/monomorphize.rs b/src/librustc_trans/monomorphize.rs index dad82167a76b3..fa00ea1e7801b 100644 --- a/src/librustc_trans/monomorphize.rs +++ b/src/librustc_trans/monomorphize.rs @@ -28,7 +28,7 @@ use rustc::util::ppaux; use rustc::hir; use syntax::attr; -use syntax::errors; +use errors; use std::fmt; diff --git a/src/librustc_typeck/Cargo.toml b/src/librustc_typeck/Cargo.toml index a0c4c7534fab2..720423371a83a 100644 --- a/src/librustc_typeck/Cargo.toml +++ b/src/librustc_typeck/Cargo.toml @@ -19,3 +19,5 @@ rustc_back = { path = "../librustc_back" } rustc_const_eval = { path = "../librustc_const_eval" } rustc_const_math = { path = "../librustc_const_math" } rustc_platform_intrinsics = { path = "../librustc_platform_intrinsics" } +syntax_pos = { path = "../libsyntax_pos" } +rustc_errors = { path = "../librustc_errors" } diff --git a/src/librustc_typeck/astconv.rs b/src/librustc_typeck/astconv.rs index 350ebf201653c..088ac1aac1a40 100644 --- a/src/librustc_typeck/astconv.rs +++ b/src/librustc_typeck/astconv.rs @@ -73,10 +73,10 @@ use util::nodemap::{NodeMap, FnvHashSet}; use rustc_const_math::ConstInt; use std::cell::RefCell; use syntax::{abi, ast}; -use syntax::codemap::{Span, Pos}; -use syntax::errors::DiagnosticBuilder; use syntax::feature_gate::{GateIssue, emit_feature_err}; use syntax::parse::token::{self, keywords}; +use syntax_pos::{Span, Pos}; +use errors::DiagnosticBuilder; pub trait AstConv<'gcx, 'tcx> { fn tcx<'a>(&'a self) -> TyCtxt<'a, 'gcx, 'tcx>; diff --git a/src/librustc_typeck/check/_match.rs b/src/librustc_typeck/check/_match.rs index 1cff392cf8338..069a09183a738 100644 --- a/src/librustc_typeck/check/_match.rs +++ b/src/librustc_typeck/check/_match.rs @@ -21,8 +21,9 @@ use session::Session; use std::collections::hash_map::Entry::{Occupied, Vacant}; use std::cmp; use syntax::ast; -use syntax::codemap::{Span, Spanned}; +use syntax::codemap::Spanned; use syntax::ptr::P; +use syntax_pos::Span; use rustc::hir::{self, PatKind}; use rustc::hir::print as pprust; diff --git a/src/librustc_typeck/check/assoc.rs b/src/librustc_typeck/check/assoc.rs index 04b0248ccdac2..41f34b9040e83 100644 --- a/src/librustc_typeck/check/assoc.rs +++ b/src/librustc_typeck/check/assoc.rs @@ -13,7 +13,7 @@ use rustc::traits::{self, FulfillmentContext, Normalized, MiscObligation, SelectionContext, ObligationCause}; use rustc::ty::fold::TypeFoldable; use syntax::ast; -use syntax::codemap::Span; +use syntax_pos::Span; //FIXME(@jroesch): Ideally we should be able to drop the fulfillment_cx argument. pub fn normalize_associated_types_in<'a, 'gcx, 'tcx, T>( diff --git a/src/librustc_typeck/check/autoderef.rs b/src/librustc_typeck/check/autoderef.rs index 9e2b7cd034652..3c176744fca59 100644 --- a/src/librustc_typeck/check/autoderef.rs +++ b/src/librustc_typeck/check/autoderef.rs @@ -20,7 +20,7 @@ use rustc::ty::subst::Substs; use rustc::ty::{LvaluePreference, NoPreference, PreferMutLvalue}; use rustc::hir; -use syntax::codemap::Span; +use syntax_pos::Span; use syntax::parse::token; #[derive(Copy, Clone, Debug)] diff --git a/src/librustc_typeck/check/callee.rs b/src/librustc_typeck/check/callee.rs index 58abf8db2c33d..2c7e7d284fa16 100644 --- a/src/librustc_typeck/check/callee.rs +++ b/src/librustc_typeck/check/callee.rs @@ -17,9 +17,9 @@ use hir::def::Def; use hir::def_id::DefId; use rustc::infer; use rustc::ty::{self, LvaluePreference, Ty}; -use syntax::codemap::Span; use syntax::parse::token; use syntax::ptr::P; +use syntax_pos::Span; use rustc::hir; diff --git a/src/librustc_typeck/check/cast.rs b/src/librustc_typeck/check/cast.rs index 690250edb8cb4..22ac8bc56907b 100644 --- a/src/librustc_typeck/check/cast.rs +++ b/src/librustc_typeck/check/cast.rs @@ -47,7 +47,7 @@ use rustc::traits; use rustc::ty::{self, Ty, TypeFoldable}; use rustc::ty::cast::{CastKind, CastTy}; use syntax::ast; -use syntax::codemap::Span; +use syntax_pos::Span; use util::common::ErrorReported; /// Reifies a cast check to be checked once we have full type information for diff --git a/src/librustc_typeck/check/compare_method.rs b/src/librustc_typeck/check/compare_method.rs index 20f82271b9cd6..35a5bc9c60967 100644 --- a/src/librustc_typeck/check/compare_method.rs +++ b/src/librustc_typeck/check/compare_method.rs @@ -15,7 +15,7 @@ use rustc::traits::{self, ProjectionMode}; use rustc::ty::subst::{self, Subst, Substs, VecPerParamSpace}; use syntax::ast; -use syntax::codemap::Span; +use syntax_pos::Span; use CrateCtxt; use super::assoc; diff --git a/src/librustc_typeck/check/demand.rs b/src/librustc_typeck/check/demand.rs index 7c8eb62b0e749..eeebd6a7f626b 100644 --- a/src/librustc_typeck/check/demand.rs +++ b/src/librustc_typeck/check/demand.rs @@ -13,7 +13,7 @@ use check::FnCtxt; use rustc::ty::Ty; use rustc::infer::{InferOk, TypeOrigin}; -use syntax::codemap::Span; +use syntax_pos::Span; use rustc::hir; impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { diff --git a/src/librustc_typeck/check/dropck.rs b/src/librustc_typeck/check/dropck.rs index ae614d7b02157..56e4108153e1e 100644 --- a/src/librustc_typeck/check/dropck.rs +++ b/src/librustc_typeck/check/dropck.rs @@ -21,7 +21,7 @@ use rustc::traits::{self, ProjectionMode}; use util::nodemap::FnvHashSet; use syntax::ast; -use syntax::codemap::{self, Span}; +use syntax_pos::{self, Span}; /// check_drop_impl confirms that the Drop implementation identfied by /// `drop_impl_did` is not any more specialized than the type it is @@ -62,7 +62,7 @@ pub fn check_drop_impl(ccx: &CrateCtxt, drop_impl_did: DefId) -> Result<(), ()> _ => { // Destructors only work on nominal types. This was // already checked by coherence, so we can panic here. - let span = ccx.tcx.map.def_id_span(drop_impl_did, codemap::DUMMY_SP); + let span = ccx.tcx.map.def_id_span(drop_impl_did, syntax_pos::DUMMY_SP); span_bug!(span, "should have been rejected by coherence check: {}", dtor_self_type); @@ -91,7 +91,7 @@ fn ensure_drop_params_and_item_params_correspond<'a, 'tcx>( let named_type = tcx.lookup_item_type(self_type_did).ty; let named_type = named_type.subst(tcx, &infcx.parameter_environment.free_substs); - let drop_impl_span = tcx.map.def_id_span(drop_impl_did, codemap::DUMMY_SP); + let drop_impl_span = tcx.map.def_id_span(drop_impl_did, syntax_pos::DUMMY_SP); let fresh_impl_substs = infcx.fresh_substs_for_generics(drop_impl_span, drop_impl_generics); let fresh_impl_self_ty = drop_impl_ty.subst(tcx, &fresh_impl_substs); @@ -172,7 +172,7 @@ fn ensure_drop_predicates_are_implied_by_item_defn<'a, 'tcx>( let self_type_node_id = tcx.map.as_local_node_id(self_type_did).unwrap(); - let drop_impl_span = tcx.map.def_id_span(drop_impl_did, codemap::DUMMY_SP); + let drop_impl_span = tcx.map.def_id_span(drop_impl_did, syntax_pos::DUMMY_SP); // We can assume the predicates attached to struct/enum definition // hold. diff --git a/src/librustc_typeck/check/intrinsic.rs b/src/librustc_typeck/check/intrinsic.rs index 9148d68f39e0b..5a3268e9e447b 100644 --- a/src/librustc_typeck/check/intrinsic.rs +++ b/src/librustc_typeck/check/intrinsic.rs @@ -20,8 +20,8 @@ use {CrateCtxt, require_same_types}; use std::collections::{HashMap}; use syntax::abi::Abi; use syntax::ast; -use syntax::codemap::Span; use syntax::parse::token; +use syntax_pos::Span; use rustc::hir; diff --git a/src/librustc_typeck/check/method/confirm.rs b/src/librustc_typeck/check/method/confirm.rs index 683a67ff07cf9..5fac65bbfd655 100644 --- a/src/librustc_typeck/check/method/confirm.rs +++ b/src/librustc_typeck/check/method/confirm.rs @@ -18,7 +18,7 @@ use rustc::ty::{self, LvaluePreference, NoPreference, PreferMutLvalue, Ty}; use rustc::ty::adjustment::{AdjustDerefRef, AutoDerefRef, AutoPtr}; use rustc::ty::fold::TypeFoldable; use rustc::infer::{self, InferOk, TypeOrigin}; -use syntax::codemap::Span; +use syntax_pos::Span; use rustc::hir; use std::ops::Deref; diff --git a/src/librustc_typeck/check/method/mod.rs b/src/librustc_typeck/check/method/mod.rs index 00eeefa0449ce..e6401be5b3ef6 100644 --- a/src/librustc_typeck/check/method/mod.rs +++ b/src/librustc_typeck/check/method/mod.rs @@ -20,7 +20,7 @@ use rustc::ty::adjustment::{AdjustDerefRef, AutoDerefRef, AutoPtr}; use rustc::infer; use syntax::ast; -use syntax::codemap::Span; +use syntax_pos::Span; use rustc::hir; diff --git a/src/librustc_typeck/check/method/probe.rs b/src/librustc_typeck/check/method/probe.rs index d6b696a25483c..373fc83fa7444 100644 --- a/src/librustc_typeck/check/method/probe.rs +++ b/src/librustc_typeck/check/method/probe.rs @@ -22,7 +22,7 @@ use rustc::traits; use rustc::ty::{self, Ty, ToPolyTraitRef, TraitRef, TypeFoldable}; use rustc::infer::{InferOk, TypeOrigin}; use syntax::ast; -use syntax::codemap::{Span, DUMMY_SP}; +use syntax_pos::{Span, DUMMY_SP}; use rustc::hir; use std::collections::HashSet; use std::mem; diff --git a/src/librustc_typeck/check/method/suggest.rs b/src/librustc_typeck/check/method/suggest.rs index 6f0d2bc0ca5e6..f20dcdc35aea5 100644 --- a/src/librustc_typeck/check/method/suggest.rs +++ b/src/librustc_typeck/check/method/suggest.rs @@ -24,10 +24,10 @@ use rustc::ty::subst::Substs; use rustc::traits::{Obligation, SelectionContext}; use util::nodemap::{FnvHashSet}; - use syntax::ast; -use syntax::codemap::Span; -use syntax::errors::DiagnosticBuilder; +use errors::DiagnosticBuilder; +use syntax_pos::Span; + use rustc::hir::print as pprust; use rustc::hir; use rustc::hir::Expr_; diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index 6f7f33fe24d16..83b5ab71cc224 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -115,11 +115,12 @@ use syntax::abi::Abi; use syntax::ast; use syntax::attr; use syntax::attr::AttrMetaMethods; -use syntax::codemap::{self, Span, Spanned}; -use syntax::errors::DiagnosticBuilder; +use syntax::codemap::{self, Spanned}; use syntax::parse::token::{self, InternedString, keywords}; use syntax::ptr::P; use syntax::util::lev_distance::find_best_match_for_name; +use syntax_pos::{self, Span}; +use errors::DiagnosticBuilder; use rustc::hir::intravisit::{self, Visitor}; use rustc::hir::{self, PatKind}; @@ -1908,7 +1909,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { for ty in &self.unsolved_variables() { if let ty::TyInfer(_) = self.shallow_resolve(ty).sty { debug!("default_type_parameters: defaulting `{:?}` to error", ty); - self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx().types.err); + self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx().types.err); } } return; @@ -1919,18 +1920,18 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { if self.type_var_diverges(resolved) { debug!("default_type_parameters: defaulting `{:?}` to `()` because it diverges", resolved); - self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.mk_nil()); + self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.mk_nil()); } else { match self.type_is_unconstrained_numeric(resolved) { UnconstrainedInt => { debug!("default_type_parameters: defaulting `{:?}` to `i32`", resolved); - self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.types.i32) + self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.i32) }, UnconstrainedFloat => { debug!("default_type_parameters: defaulting `{:?}` to `f32`", resolved); - self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.types.f64) + self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.f64) } Neither => { } } @@ -1993,7 +1994,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { for ty in &unsolved_variables { let resolved = self.resolve_type_vars_if_possible(ty); if self.type_var_diverges(resolved) { - self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.mk_nil()); + self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.mk_nil()); } else { match self.type_is_unconstrained_numeric(resolved) { UnconstrainedInt | UnconstrainedFloat => { @@ -2051,14 +2052,14 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { let _ = self.commit_if_ok(|_: &infer::CombinedSnapshot| { for ty in &unbound_tyvars { if self.type_var_diverges(ty) { - self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.mk_nil()); + self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.mk_nil()); } else { match self.type_is_unconstrained_numeric(ty) { UnconstrainedInt => { - self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.types.i32) + self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.i32) }, UnconstrainedFloat => { - self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.types.f64) + self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.f64) } Neither => { if let Some(default) = default_map.get(ty) { @@ -2096,7 +2097,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { self.find_conflicting_default(&unbound_tyvars, &default_map, conflict) .unwrap_or(type_variable::Default { ty: self.next_ty_var(), - origin_span: codemap::DUMMY_SP, + origin_span: syntax_pos::DUMMY_SP, def_id: self.tcx.map.local_def_id(0) // what do I put here? }); @@ -2147,14 +2148,14 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // reporting for more then one conflict. for ty in &unbound_tyvars { if self.type_var_diverges(ty) { - self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.mk_nil()); + self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.mk_nil()); } else { match self.type_is_unconstrained_numeric(ty) { UnconstrainedInt => { - self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.types.i32) + self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.i32) }, UnconstrainedFloat => { - self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.types.f64) + self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.f64) }, Neither => { if let Some(default) = default_map.get(ty) { diff --git a/src/librustc_typeck/check/regionck.rs b/src/librustc_typeck/check/regionck.rs index a8cb21126c079..5a7038a056982 100644 --- a/src/librustc_typeck/check/regionck.rs +++ b/src/librustc_typeck/check/regionck.rs @@ -99,7 +99,7 @@ use rustc::ty::wf::ImpliedBound; use std::mem; use std::ops::Deref; use syntax::ast; -use syntax::codemap::Span; +use syntax_pos::Span; use rustc::hir::intravisit::{self, Visitor}; use rustc::hir::{self, PatKind}; diff --git a/src/librustc_typeck/check/upvar.rs b/src/librustc_typeck/check/upvar.rs index 19964d736f592..702dd5f8de58a 100644 --- a/src/librustc_typeck/check/upvar.rs +++ b/src/librustc_typeck/check/upvar.rs @@ -49,7 +49,7 @@ use rustc::ty::{self, Ty}; use rustc::infer::UpvarRegion; use std::collections::HashSet; use syntax::ast; -use syntax::codemap::Span; +use syntax_pos::Span; use rustc::hir; use rustc::hir::intravisit::{self, Visitor}; diff --git a/src/librustc_typeck/check/wfcheck.rs b/src/librustc_typeck/check/wfcheck.rs index e0a34189773b1..d101381e2565c 100644 --- a/src/librustc_typeck/check/wfcheck.rs +++ b/src/librustc_typeck/check/wfcheck.rs @@ -19,9 +19,10 @@ use rustc::ty::{self, Ty, TyCtxt}; use std::collections::HashSet; use syntax::ast; -use syntax::codemap::{Span}; -use syntax::errors::DiagnosticBuilder; use syntax::parse::token::keywords; +use syntax_pos::Span; +use errors::DiagnosticBuilder; + use rustc::hir::intravisit::{self, Visitor}; use rustc::hir; diff --git a/src/librustc_typeck/check/writeback.rs b/src/librustc_typeck/check/writeback.rs index 205eaf1a38e14..7f5f3ae120b7a 100644 --- a/src/librustc_typeck/check/writeback.rs +++ b/src/librustc_typeck/check/writeback.rs @@ -25,7 +25,8 @@ use write_ty_to_tcx; use std::cell::Cell; use syntax::ast; -use syntax::codemap::{DUMMY_SP, Span}; +use syntax_pos::{DUMMY_SP, Span}; + use rustc::hir::print::pat_to_string; use rustc::hir::intravisit::{self, Visitor}; use rustc::hir::{self, PatKind}; diff --git a/src/librustc_typeck/check_unused.rs b/src/librustc_typeck/check_unused.rs index c3538ace34749..2ee0927f3c8ea 100644 --- a/src/librustc_typeck/check_unused.rs +++ b/src/librustc_typeck/check_unused.rs @@ -13,7 +13,7 @@ use rustc::dep_graph::DepNode; use rustc::ty::TyCtxt; use syntax::ast; -use syntax::codemap::{Span, DUMMY_SP}; +use syntax_pos::{Span, DUMMY_SP}; use rustc::hir; use rustc::hir::intravisit::Visitor; diff --git a/src/librustc_typeck/coherence/mod.rs b/src/librustc_typeck/coherence/mod.rs index 8bee0467f11b3..ade7806e71d12 100644 --- a/src/librustc_typeck/coherence/mod.rs +++ b/src/librustc_typeck/coherence/mod.rs @@ -34,7 +34,7 @@ use CrateCtxt; use rustc::infer::{self, InferCtxt, TypeOrigin}; use std::cell::RefCell; use std::rc::Rc; -use syntax::codemap::Span; +use syntax_pos::Span; use util::nodemap::{DefIdMap, FnvHashMap}; use rustc::dep_graph::DepNode; use rustc::hir::map as hir_map; diff --git a/src/librustc_typeck/coherence/orphan.rs b/src/librustc_typeck/coherence/orphan.rs index d9ad03222029d..15d4026254fa5 100644 --- a/src/librustc_typeck/coherence/orphan.rs +++ b/src/librustc_typeck/coherence/orphan.rs @@ -16,7 +16,7 @@ use hir::def_id::DefId; use rustc::traits; use rustc::ty::{self, TyCtxt}; use syntax::ast; -use syntax::codemap::Span; +use syntax_pos::Span; use rustc::dep_graph::DepNode; use rustc::hir::intravisit; use rustc::hir; diff --git a/src/librustc_typeck/collect.rs b/src/librustc_typeck/collect.rs index 586c4f5c1858d..2c33d1a81556e 100644 --- a/src/librustc_typeck/collect.rs +++ b/src/librustc_typeck/collect.rs @@ -88,9 +88,10 @@ use std::collections::hash_map::Entry::{Occupied, Vacant}; use std::rc::Rc; use syntax::{abi, ast, attr}; -use syntax::codemap::Span; use syntax::parse::token::keywords; use syntax::ptr::P; +use syntax_pos::Span; + use rustc::hir::{self, PatKind}; use rustc::hir::intravisit; use rustc::hir::print as pprust; diff --git a/src/librustc_typeck/lib.rs b/src/librustc_typeck/lib.rs index c6c575719c015..7ccff7ad3d87c 100644 --- a/src/librustc_typeck/lib.rs +++ b/src/librustc_typeck/lib.rs @@ -85,6 +85,7 @@ This API is completely unstable and subject to change. #[macro_use] extern crate log; #[macro_use] extern crate syntax; +extern crate syntax_pos; extern crate arena; extern crate fmt_macros; @@ -93,6 +94,7 @@ extern crate rustc_platform_intrinsics as intrinsics; extern crate rustc_back; extern crate rustc_const_math; extern crate rustc_const_eval; +extern crate rustc_errors as errors; pub use rustc::dep_graph; pub use rustc::hir; @@ -110,9 +112,9 @@ use rustc::traits::ProjectionMode; use session::{config, CompileResult}; use util::common::time; -use syntax::codemap::Span; use syntax::ast; use syntax::abi::Abi; +use syntax_pos::Span; use std::cell::RefCell; use util::nodemap::NodeMap; diff --git a/src/librustc_typeck/rscope.rs b/src/librustc_typeck/rscope.rs index 793dba1e3f5f8..336a61708683e 100644 --- a/src/librustc_typeck/rscope.rs +++ b/src/librustc_typeck/rscope.rs @@ -12,7 +12,7 @@ use rustc::ty; use std::cell::Cell; -use syntax::codemap::Span; +use syntax_pos::Span; #[derive(Clone)] pub struct ElisionFailureInfo { diff --git a/src/librustdoc/Cargo.toml b/src/librustdoc/Cargo.toml index 9d76dd81e5f76..cf87aabdfdb5e 100644 --- a/src/librustdoc/Cargo.toml +++ b/src/librustdoc/Cargo.toml @@ -15,12 +15,14 @@ rustc = { path = "../librustc" } rustc_back = { path = "../librustc_back" } rustc_const_eval = { path = "../librustc_const_eval" } rustc_driver = { path = "../librustc_driver" } +rustc_errors = { path = "../librustc_errors" } rustc_lint = { path = "../librustc_lint" } rustc_metadata = { path = "../librustc_metadata" } rustc_resolve = { path = "../librustc_resolve" } rustc_trans = { path = "../librustc_trans" } serialize = { path = "../libserialize" } syntax = { path = "../libsyntax" } +syntax_pos = { path = "../libsyntax_pos" } log = { path = "../liblog" } [build-dependencies] diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 0801da19c822e..659022053128e 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -24,15 +24,14 @@ pub use self::SelfTy::*; pub use self::FunctionRetTy::*; pub use self::Visibility::*; -use syntax; use syntax::abi::Abi; use syntax::ast; use syntax::attr; use syntax::attr::{AttributeMethods, AttrMetaMethods}; -use syntax::codemap; -use syntax::codemap::{DUMMY_SP, Pos, Spanned}; +use syntax::codemap::Spanned; use syntax::parse::token::{self, InternedString, keywords}; use syntax::ptr::P; +use syntax_pos::{self, DUMMY_SP, Pos}; use rustc_trans::back::link; use rustc::middle::cstore; @@ -533,7 +532,7 @@ impl attr::AttrMetaMethods for Attribute { } } fn meta_item_list<'a>(&'a self) -> Option<&'a [P]> { None } - fn span(&self) -> codemap::Span { unimplemented!() } + fn span(&self) -> syntax_pos::Span { unimplemented!() } } #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] @@ -1977,7 +1976,7 @@ impl Span { } } -impl Clean for syntax::codemap::Span { +impl Clean for syntax_pos::Span { fn clean(&self, cx: &DocContext) -> Span { if *self == DUMMY_SP { return Span::empty(); @@ -2543,7 +2542,7 @@ trait ToSource { fn to_src(&self, cx: &DocContext) -> String; } -impl ToSource for syntax::codemap::Span { +impl ToSource for syntax_pos::Span { fn to_src(&self, cx: &DocContext) -> String { debug!("converting span {:?} to snippet", self.clean(cx)); let sn = match cx.sess().codemap().span_to_snippet(*self) { diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs index 73e0a529a45c6..f4da8167ea286 100644 --- a/src/librustdoc/core.rs +++ b/src/librustdoc/core.rs @@ -22,10 +22,11 @@ use rustc_trans::back::link; use rustc_resolve as resolve; use rustc_metadata::cstore::CStore; -use syntax::{ast, codemap, errors}; -use syntax::errors::emitter::ColorConfig; +use syntax::{ast, codemap}; use syntax::feature_gate::UnstableFeatures; use syntax::parse::token; +use errors; +use errors::emitter::ColorConfig; use std::cell::{RefCell, Cell}; use std::collections::{HashMap, HashSet}; diff --git a/src/librustdoc/doctree.rs b/src/librustdoc/doctree.rs index 408782a698a2a..04d176c36c8cf 100644 --- a/src/librustdoc/doctree.rs +++ b/src/librustdoc/doctree.rs @@ -13,13 +13,13 @@ pub use self::StructType::*; pub use self::TypeBound::*; -use syntax; -use syntax::codemap::Span; use syntax::abi; use syntax::ast; use syntax::ast::{Name, NodeId}; use syntax::attr; use syntax::ptr::P; +use syntax_pos::{self, Span}; + use rustc::hir; pub struct Module { @@ -56,8 +56,8 @@ impl Module { vis: hir::Inherited, stab: None, depr: None, - where_outer: syntax::codemap::DUMMY_SP, - where_inner: syntax::codemap::DUMMY_SP, + where_outer: syntax_pos::DUMMY_SP, + where_inner: syntax_pos::DUMMY_SP, attrs : hir::HirVec::new(), extern_crates: Vec::new(), imports : Vec::new(), diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index 321bc51f90386..2e2f99897733d 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -26,10 +26,11 @@ use std::fmt::Display; use std::io; use std::io::prelude::*; -use syntax::codemap::{CodeMap, Span}; +use syntax::codemap::CodeMap; use syntax::parse::lexer::{self, Reader, TokenAndSpan}; use syntax::parse::token; use syntax::parse; +use syntax_pos::Span; /// Highlights `src`, returning the HTML output. pub fn render_with_highlighting(src: &str, class: Option<&str>, id: Option<&str>) -> String { diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index 86aad10e02fc7..2015bb295eabd 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -42,9 +42,11 @@ extern crate rustc_back; extern crate rustc_metadata; extern crate serialize; #[macro_use] extern crate syntax; +extern crate syntax_pos; extern crate test as testing; extern crate rustc_unicode; #[macro_use] extern crate log; +extern crate rustc_errors as errors; extern crate serialize as rustc_serialize; // used by deriving diff --git a/src/librustdoc/test.rs b/src/librustdoc/test.rs index c17af55ca10af..95d02d6ce4bee 100644 --- a/src/librustdoc/test.rs +++ b/src/librustdoc/test.rs @@ -35,8 +35,8 @@ use rustc_driver::driver::phase_2_configure_and_expand; use rustc_metadata::cstore::CStore; use rustc_resolve::MakeGlobMap; use syntax::codemap::CodeMap; -use syntax::errors; -use syntax::errors::emitter::ColorConfig; +use errors; +use errors::emitter::ColorConfig; use syntax::parse::token; use core; @@ -229,8 +229,9 @@ fn runtest(test: &str, cratename: &str, cfgs: Vec, libs: SearchPaths, let data = Arc::new(Mutex::new(Vec::new())); let codemap = Rc::new(CodeMap::new()); let emitter = errors::emitter::EmitterWriter::new(box Sink(data.clone()), - None, - codemap.clone()); + None, + codemap.clone(), + errors::snippet::FormatMode::EnvironmentSelected); let old = io::set_panic(box Sink(data.clone())); let _bomb = Bomb(data.clone(), old.unwrap_or(box io::stdout())); diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs index b0b55a76e266e..92424f113f987 100644 --- a/src/librustdoc/visit_ast.rs +++ b/src/librustdoc/visit_ast.rs @@ -18,7 +18,7 @@ use syntax::abi; use syntax::ast; use syntax::attr; use syntax::attr::AttrMetaMethods; -use syntax::codemap::Span; +use syntax_pos::Span; use rustc::hir::map as hir_map; use rustc::hir::def::Def; diff --git a/src/libsyntax/Cargo.toml b/src/libsyntax/Cargo.toml index 964f2dcb6b6b7..8b61e1b0d3a38 100644 --- a/src/libsyntax/Cargo.toml +++ b/src/libsyntax/Cargo.toml @@ -12,3 +12,5 @@ crate-type = ["dylib"] serialize = { path = "../libserialize" } log = { path = "../liblog" } rustc_bitflags = { path = "../librustc_bitflags" } +syntax_pos = { path = "../libsyntax_pos" } +rustc_errors = { path = "../librustc_errors" } diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index c3cc84b770155..a352715b20b12 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -14,22 +14,19 @@ pub use self::TyParamBound::*; pub use self::UnsafeSource::*; pub use self::ViewPath_::*; pub use self::PathParameters::*; +pub use util::ThinVec; -use attr::{ThinAttributes, HasAttrs}; -use codemap::{mk_sp, respan, Span, Spanned, DUMMY_SP, ExpnId}; +use syntax_pos::{mk_sp, Span, DUMMY_SP, ExpnId}; +use codemap::{respan, Spanned}; use abi::Abi; use errors; -use ext::base; -use ext::tt::macro_parser; use parse::token::{self, keywords, InternedString}; -use parse::lexer; -use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; use print::pprust; use ptr::P; +use tokenstream::{TokenTree}; use std::fmt; use std::rc::Rc; -use std::borrow::Cow; use std::hash::{Hash, Hasher}; use serialize::{Encodable, Decodable, Encoder, Decoder}; @@ -560,9 +557,6 @@ impl PartialEq for MetaItemKind { pub struct Block { /// Statements in a block pub stmts: Vec, - /// An expression at the end of the block - /// without a semicolon, if any - pub expr: Option>, pub id: NodeId, /// Distinguishes between `unsafe { ... }` and `{ ... }` pub rules: BlockCheckMode, @@ -611,7 +605,6 @@ impl Pat { PatKind::Range(_, _) | PatKind::Ident(_, _, _) | PatKind::Path(..) | - PatKind::QPath(_, _) | PatKind::Mac(_) => { true } @@ -659,15 +652,11 @@ pub enum PatKind { /// 0 <= position <= subpats.len() TupleStruct(Path, Vec>, Option), - /// A path pattern. - /// Such pattern can be resolved to a unit struct/variant or a constant. - Path(Path), - - /// An associated const named using the qualified path `::CONST` or - /// `::CONST`. Associated consts from inherent impls can be - /// referred to as simply `T::CONST`, in which case they will end up as - /// PatKind::Path, and the resolver will have to sort that out. - QPath(QSelf, Path), + /// A possibly qualified path pattern. + /// Unquailfied path patterns `A::B::C` can legally refer to variants, structs, constants + /// or associated constants. Quailfied path patterns `::B::C`/`::B::C` can + /// only legally refer to associated constants. + Path(Option, Path), /// A tuple pattern `(a, b)`. /// If the `..` pattern fragment is present, then `Option` denotes its position. @@ -818,45 +807,34 @@ impl UnOp { } /// A statement -pub type Stmt = Spanned; +#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)] +pub struct Stmt { + pub id: NodeId, + pub node: StmtKind, + pub span: Span, +} impl fmt::Debug for Stmt { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "stmt({}: {})", - self.node.id() - .map_or(Cow::Borrowed(""),|id|Cow::Owned(id.to_string())), - pprust::stmt_to_string(self)) + write!(f, "stmt({}: {})", self.id.to_string(), pprust::stmt_to_string(self)) } } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)] pub enum StmtKind { - /// Could be an item or a local (let) binding: - Decl(P, NodeId), + /// A local (let) binding. + Local(P), - /// Expr without trailing semi-colon (must have unit type): - Expr(P, NodeId), + /// An item definition. + Item(P), - /// Expr with trailing semi-colon (may have any type): - Semi(P, NodeId), + /// Expr without trailing semi-colon. + Expr(P), - Mac(P, MacStmtStyle, ThinAttributes), -} - -impl StmtKind { - pub fn id(&self) -> Option { - match *self { - StmtKind::Decl(_, id) => Some(id), - StmtKind::Expr(_, id) => Some(id), - StmtKind::Semi(_, id) => Some(id), - StmtKind::Mac(..) => None, - } - } + Semi(P), - pub fn attrs(&self) -> &[Attribute] { - HasAttrs::attrs(self) - } + Mac(P<(Mac, MacStmtStyle, ThinVec)>), } #[derive(Clone, Copy, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] @@ -883,29 +861,7 @@ pub struct Local { pub init: Option>, pub id: NodeId, pub span: Span, - pub attrs: ThinAttributes, -} - -impl Local { - pub fn attrs(&self) -> &[Attribute] { - HasAttrs::attrs(self) - } -} - -pub type Decl = Spanned; - -#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] -pub enum DeclKind { - /// A local (let) binding: - Local(P), - /// An item binding: - Item(P), -} - -impl Decl { - pub fn attrs(&self) -> &[Attribute] { - HasAttrs::attrs(self) - } + pub attrs: ThinVec, } /// An arm of a 'match'. @@ -953,13 +909,7 @@ pub struct Expr { pub id: NodeId, pub node: ExprKind, pub span: Span, - pub attrs: ThinAttributes -} - -impl Expr { - pub fn attrs(&self) -> &[Attribute] { - HasAttrs::attrs(self) - } + pub attrs: ThinVec } impl fmt::Debug for Expr { @@ -1082,7 +1032,7 @@ pub enum ExprKind { /// A `break`, with an optional label to break Break(Option), /// A `continue`, with an optional label - Again(Option), + Continue(Option), /// A `return`, with an optional value to be returned Ret(Option>), @@ -1138,193 +1088,6 @@ pub enum CaptureBy { Ref, } -/// A delimited sequence of token trees -#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] -pub struct Delimited { - /// The type of delimiter - pub delim: token::DelimToken, - /// The span covering the opening delimiter - pub open_span: Span, - /// The delimited sequence of token trees - pub tts: Vec, - /// The span covering the closing delimiter - pub close_span: Span, -} - -impl Delimited { - /// Returns the opening delimiter as a token. - pub fn open_token(&self) -> token::Token { - token::OpenDelim(self.delim) - } - - /// Returns the closing delimiter as a token. - pub fn close_token(&self) -> token::Token { - token::CloseDelim(self.delim) - } - - /// Returns the opening delimiter as a token tree. - pub fn open_tt(&self) -> TokenTree { - TokenTree::Token(self.open_span, self.open_token()) - } - - /// Returns the closing delimiter as a token tree. - pub fn close_tt(&self) -> TokenTree { - TokenTree::Token(self.close_span, self.close_token()) - } -} - -/// A sequence of token trees -#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] -pub struct SequenceRepetition { - /// The sequence of token trees - pub tts: Vec, - /// The optional separator - pub separator: Option, - /// Whether the sequence can be repeated zero (*), or one or more times (+) - pub op: KleeneOp, - /// The number of `MatchNt`s that appear in the sequence (and subsequences) - pub num_captures: usize, -} - -/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star) -/// for token sequences. -#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] -pub enum KleeneOp { - ZeroOrMore, - OneOrMore, -} - -/// When the main rust parser encounters a syntax-extension invocation, it -/// parses the arguments to the invocation as a token-tree. This is a very -/// loose structure, such that all sorts of different AST-fragments can -/// be passed to syntax extensions using a uniform type. -/// -/// If the syntax extension is an MBE macro, it will attempt to match its -/// LHS token tree against the provided token tree, and if it finds a -/// match, will transcribe the RHS token tree, splicing in any captured -/// macro_parser::matched_nonterminals into the `SubstNt`s it finds. -/// -/// The RHS of an MBE macro is the only place `SubstNt`s are substituted. -/// Nothing special happens to misnamed or misplaced `SubstNt`s. -#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] -pub enum TokenTree { - /// A single token - Token(Span, token::Token), - /// A delimited sequence of token trees - Delimited(Span, Rc), - - // This only makes sense in MBE macros. - - /// A kleene-style repetition sequence with a span - // FIXME(eddyb) #12938 Use DST. - Sequence(Span, Rc), -} - -impl TokenTree { - pub fn len(&self) -> usize { - match *self { - TokenTree::Token(_, token::DocComment(name)) => { - match doc_comment_style(&name.as_str()) { - AttrStyle::Outer => 2, - AttrStyle::Inner => 3 - } - } - TokenTree::Token(_, token::SpecialVarNt(..)) => 2, - TokenTree::Token(_, token::MatchNt(..)) => 3, - TokenTree::Delimited(_, ref delimed) => { - delimed.tts.len() + 2 - } - TokenTree::Sequence(_, ref seq) => { - seq.tts.len() - } - TokenTree::Token(..) => 0 - } - } - - pub fn get_tt(&self, index: usize) -> TokenTree { - match (self, index) { - (&TokenTree::Token(sp, token::DocComment(_)), 0) => { - TokenTree::Token(sp, token::Pound) - } - (&TokenTree::Token(sp, token::DocComment(name)), 1) - if doc_comment_style(&name.as_str()) == AttrStyle::Inner => { - TokenTree::Token(sp, token::Not) - } - (&TokenTree::Token(sp, token::DocComment(name)), _) => { - let stripped = strip_doc_comment_decoration(&name.as_str()); - - // Searches for the occurrences of `"#*` and returns the minimum number of `#`s - // required to wrap the text. - let num_of_hashes = stripped.chars().scan(0, |cnt, x| { - *cnt = if x == '"' { - 1 - } else if *cnt != 0 && x == '#' { - *cnt + 1 - } else { - 0 - }; - Some(*cnt) - }).max().unwrap_or(0); - - TokenTree::Delimited(sp, Rc::new(Delimited { - delim: token::Bracket, - open_span: sp, - tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"))), - TokenTree::Token(sp, token::Eq), - TokenTree::Token(sp, token::Literal( - token::StrRaw(token::intern(&stripped), num_of_hashes), None))], - close_span: sp, - })) - } - (&TokenTree::Delimited(_, ref delimed), _) => { - if index == 0 { - return delimed.open_tt(); - } - if index == delimed.tts.len() + 1 { - return delimed.close_tt(); - } - delimed.tts[index - 1].clone() - } - (&TokenTree::Token(sp, token::SpecialVarNt(var)), _) => { - let v = [TokenTree::Token(sp, token::Dollar), - TokenTree::Token(sp, token::Ident(token::str_to_ident(var.as_str())))]; - v[index].clone() - } - (&TokenTree::Token(sp, token::MatchNt(name, kind)), _) => { - let v = [TokenTree::Token(sp, token::SubstNt(name)), - TokenTree::Token(sp, token::Colon), - TokenTree::Token(sp, token::Ident(kind))]; - v[index].clone() - } - (&TokenTree::Sequence(_, ref seq), _) => { - seq.tts[index].clone() - } - _ => panic!("Cannot expand a token tree") - } - } - - /// Returns the `Span` corresponding to this token tree. - pub fn get_span(&self) -> Span { - match *self { - TokenTree::Token(span, _) => span, - TokenTree::Delimited(span, _) => span, - TokenTree::Sequence(span, _) => span, - } - } - - /// Use this token tree as a matcher to parse given tts. - pub fn parse(cx: &base::ExtCtxt, mtch: &[TokenTree], tts: &[TokenTree]) - -> macro_parser::NamedParseResult { - // `None` is because we're not interpolating - let arg_rdr = lexer::new_tt_reader_with_doc_flag(&cx.parse_sess().span_diagnostic, - None, - None, - tts.iter().cloned().collect(), - true); - macro_parser::parse(cx.parse_sess(), cx.cfg(), arg_rdr, mtch) - } -} - pub type Mac = Spanned; /// Represents a macro invocation. The Path indicates which macro @@ -1337,7 +1100,6 @@ pub type Mac = Spanned; pub struct Mac_ { pub path: Path, pub tts: Vec, - pub ctxt: SyntaxContext, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] @@ -1430,6 +1192,7 @@ pub enum TraitItemKind { Const(P, Option>), Method(MethodSig, Option>), Type(TyParamBounds, Option>), + Macro(Mac), } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] @@ -2109,12 +1872,6 @@ pub struct Item { pub span: Span, } -impl Item { - pub fn attrs(&self) -> &[Attribute] { - &self.attrs - } -} - #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum ItemKind { /// An`extern crate` item, with optional original crate name. diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index e36e15802f0ae..da2967e306f65 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -16,16 +16,16 @@ pub use self::IntType::*; use ast; use ast::{AttrId, Attribute, Attribute_, MetaItem, MetaItemKind}; -use ast::{Stmt, StmtKind, DeclKind}; -use ast::{Expr, Item, Local, Decl}; -use codemap::{Span, Spanned, spanned, dummy_spanned}; -use codemap::BytePos; +use ast::{Expr, Item, Local, Stmt, StmtKind}; +use codemap::{spanned, dummy_spanned, Spanned}; +use syntax_pos::{Span, BytePos}; use errors::Handler; use feature_gate::{Features, GatedCfg}; use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; use parse::token::InternedString; use parse::{ParseSess, token}; use ptr::P; +use util::ThinVec; use std::cell::{RefCell, Cell}; use std::collections::HashSet; @@ -803,100 +803,11 @@ impl IntType { } } -/// A list of attributes, behind a optional box as -/// a space optimization. -pub type ThinAttributes = Option>>; - -pub trait ThinAttributesExt { - fn map_thin_attrs(self, f: F) -> Self - where F: FnOnce(Vec) -> Vec; - fn prepend(mut self, attrs: Self) -> Self; - fn append(mut self, attrs: Self) -> Self; - fn update(&mut self, f: F) - where Self: Sized, - F: FnOnce(Self) -> Self; - fn as_attr_slice(&self) -> &[Attribute]; - fn into_attr_vec(self) -> Vec; -} - -impl ThinAttributesExt for ThinAttributes { - fn map_thin_attrs(self, f: F) -> Self - where F: FnOnce(Vec) -> Vec - { - f(self.map(|b| *b).unwrap_or(Vec::new())).into_thin_attrs() - } - - fn prepend(self, attrs: ThinAttributes) -> Self { - attrs.map_thin_attrs(|mut attrs| { - attrs.extend(self.into_attr_vec()); - attrs - }) - } - - fn append(self, attrs: ThinAttributes) -> Self { - self.map_thin_attrs(|mut self_| { - self_.extend(attrs.into_attr_vec()); - self_ - }) - } - - fn update(&mut self, f: F) - where Self: Sized, - F: FnOnce(ThinAttributes) -> ThinAttributes - { - let self_ = f(self.take()); - *self = self_; - } - - fn as_attr_slice(&self) -> &[Attribute] { - match *self { - Some(ref b) => b, - None => &[], - } - } - - fn into_attr_vec(self) -> Vec { - match self { - Some(b) => *b, - None => Vec::new(), - } - } -} - -pub trait AttributesExt { - fn into_thin_attrs(self) -> ThinAttributes; -} - -impl AttributesExt for Vec { - fn into_thin_attrs(self) -> ThinAttributes { - if self.len() == 0 { - None - } else { - Some(Box::new(self)) - } - } -} - pub trait HasAttrs: Sized { fn attrs(&self) -> &[ast::Attribute]; fn map_attrs) -> Vec>(self, f: F) -> Self; } -/// A cheap way to add Attributes to an AST node. -pub trait WithAttrs { - // FIXME: Could be extended to anything IntoIter - fn with_attrs(self, attrs: ThinAttributes) -> Self; -} - -impl WithAttrs for T { - fn with_attrs(self, attrs: ThinAttributes) -> Self { - self.map_attrs(|mut orig_attrs| { - orig_attrs.extend(attrs.into_attr_vec()); - orig_attrs - }) - } -} - impl HasAttrs for Vec { fn attrs(&self) -> &[Attribute] { &self @@ -906,12 +817,12 @@ impl HasAttrs for Vec { } } -impl HasAttrs for ThinAttributes { +impl HasAttrs for ThinVec { fn attrs(&self) -> &[Attribute] { - self.as_attr_slice() + &self } fn map_attrs) -> Vec>(self, f: F) -> Self { - self.map_thin_attrs(f) + f(self.into()).into() } } @@ -924,38 +835,28 @@ impl HasAttrs for P { } } -impl HasAttrs for DeclKind { - fn attrs(&self) -> &[Attribute] { - match *self { - DeclKind::Local(ref local) => local.attrs(), - DeclKind::Item(ref item) => item.attrs(), - } - } - - fn map_attrs) -> Vec>(self, f: F) -> Self { - match self { - DeclKind::Local(local) => DeclKind::Local(local.map_attrs(f)), - DeclKind::Item(item) => DeclKind::Item(item.map_attrs(f)), - } - } -} - impl HasAttrs for StmtKind { fn attrs(&self) -> &[Attribute] { match *self { - StmtKind::Decl(ref decl, _) => decl.attrs(), - StmtKind::Expr(ref expr, _) | StmtKind::Semi(ref expr, _) => expr.attrs(), - StmtKind::Mac(_, _, ref attrs) => attrs.attrs(), + StmtKind::Local(ref local) => local.attrs(), + StmtKind::Item(ref item) => item.attrs(), + StmtKind::Expr(ref expr) | StmtKind::Semi(ref expr) => expr.attrs(), + StmtKind::Mac(ref mac) => { + let (_, _, ref attrs) = **mac; + attrs.attrs() + } } } fn map_attrs) -> Vec>(self, f: F) -> Self { match self { - StmtKind::Decl(decl, id) => StmtKind::Decl(decl.map_attrs(f), id), - StmtKind::Expr(expr, id) => StmtKind::Expr(expr.map_attrs(f), id), - StmtKind::Semi(expr, id) => StmtKind::Semi(expr.map_attrs(f), id), - StmtKind::Mac(mac, style, attrs) => - StmtKind::Mac(mac, style, attrs.map_attrs(f)), + StmtKind::Local(local) => StmtKind::Local(local.map_attrs(f)), + StmtKind::Item(item) => StmtKind::Item(item.map_attrs(f)), + StmtKind::Expr(expr) => StmtKind::Expr(expr.map_attrs(f)), + StmtKind::Semi(expr) => StmtKind::Semi(expr.map_attrs(f)), + StmtKind::Mac(mac) => StmtKind::Mac(mac.map(|(mac, style, attrs)| { + (mac, style, attrs.map_attrs(f)) + })), } } } @@ -982,4 +883,4 @@ derive_has_attrs_from_field! { Item, Expr, Local, ast::ForeignItem, ast::StructField, ast::ImplItem, ast::TraitItem, ast::Arm } -derive_has_attrs_from_field! { Decl: .node, Stmt: .node, ast::Variant: .node.attrs } +derive_has_attrs_from_field! { Stmt: .node, ast::Variant: .node.attrs } diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 5e1335b45aa0c..743f96d737e2d 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -19,269 +19,18 @@ pub use self::ExpnFormat::*; -use std::cell::{Cell, RefCell}; -use std::ops::{Add, Sub}; +use std::cell::RefCell; use std::path::{Path,PathBuf}; use std::rc::Rc; -use std::cmp; use std::env; -use std::{fmt, fs}; +use std::fs; use std::io::{self, Read}; - -use serialize::{Encodable, Decodable, Encoder, Decoder}; +pub use syntax_pos::*; +use errors::CodeMapper; use ast::Name; -// _____________________________________________________________________________ -// Pos, BytePos, CharPos -// - -pub trait Pos { - fn from_usize(n: usize) -> Self; - fn to_usize(&self) -> usize; -} - -/// A byte offset. Keep this small (currently 32-bits), as AST contains -/// a lot of them. -#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)] -pub struct BytePos(pub u32); - -/// A character offset. Because of multibyte utf8 characters, a byte offset -/// is not equivalent to a character offset. The CodeMap will convert BytePos -/// values to CharPos values as necessary. -#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)] -pub struct CharPos(pub usize); - -// FIXME: Lots of boilerplate in these impls, but so far my attempts to fix -// have been unsuccessful - -impl Pos for BytePos { - fn from_usize(n: usize) -> BytePos { BytePos(n as u32) } - fn to_usize(&self) -> usize { let BytePos(n) = *self; n as usize } -} - -impl Add for BytePos { - type Output = BytePos; - - fn add(self, rhs: BytePos) -> BytePos { - BytePos((self.to_usize() + rhs.to_usize()) as u32) - } -} - -impl Sub for BytePos { - type Output = BytePos; - - fn sub(self, rhs: BytePos) -> BytePos { - BytePos((self.to_usize() - rhs.to_usize()) as u32) - } -} - -impl Encodable for BytePos { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_u32(self.0) - } -} - -impl Decodable for BytePos { - fn decode(d: &mut D) -> Result { - Ok(BytePos(d.read_u32()?)) - } -} - -impl Pos for CharPos { - fn from_usize(n: usize) -> CharPos { CharPos(n) } - fn to_usize(&self) -> usize { let CharPos(n) = *self; n } -} - -impl Add for CharPos { - type Output = CharPos; - - fn add(self, rhs: CharPos) -> CharPos { - CharPos(self.to_usize() + rhs.to_usize()) - } -} - -impl Sub for CharPos { - type Output = CharPos; - - fn sub(self, rhs: CharPos) -> CharPos { - CharPos(self.to_usize() - rhs.to_usize()) - } -} - -// _____________________________________________________________________________ -// Span, MultiSpan, Spanned -// - -/// Spans represent a region of code, used for error reporting. Positions in spans -/// are *absolute* positions from the beginning of the codemap, not positions -/// relative to FileMaps. Methods on the CodeMap can be used to relate spans back -/// to the original source. -/// You must be careful if the span crosses more than one file - you will not be -/// able to use many of the functions on spans in codemap and you cannot assume -/// that the length of the span = hi - lo; there may be space in the BytePos -/// range between files. -#[derive(Clone, Copy, Hash, PartialEq, Eq)] -pub struct Span { - pub lo: BytePos, - pub hi: BytePos, - /// Information about where the macro came from, if this piece of - /// code was created by a macro expansion. - pub expn_id: ExpnId -} - -/// A collection of spans. Spans have two orthogonal attributes: -/// -/// - they can be *primary spans*. In this case they are the locus of -/// the error, and would be rendered with `^^^`. -/// - they can have a *label*. In this case, the label is written next -/// to the mark in the snippet when we render. -#[derive(Clone)] -pub struct MultiSpan { - primary_spans: Vec, - span_labels: Vec<(Span, String)>, -} - -#[derive(Clone, Debug)] -pub struct SpanLabel { - /// The span we are going to include in the final snippet. - pub span: Span, - - /// Is this a primary span? This is the "locus" of the message, - /// and is indicated with a `^^^^` underline, versus `----`. - pub is_primary: bool, - - /// What label should we attach to this span (if any)? - pub label: Option, -} - -pub const DUMMY_SP: Span = Span { lo: BytePos(0), hi: BytePos(0), expn_id: NO_EXPANSION }; - -// Generic span to be used for code originating from the command line -pub const COMMAND_LINE_SP: Span = Span { lo: BytePos(0), - hi: BytePos(0), - expn_id: COMMAND_LINE_EXPN }; - -impl Span { - /// Returns a new span representing just the end-point of this span - pub fn end_point(self) -> Span { - let lo = cmp::max(self.hi.0 - 1, self.lo.0); - Span { lo: BytePos(lo), hi: self.hi, expn_id: self.expn_id} - } - - /// Returns `self` if `self` is not the dummy span, and `other` otherwise. - pub fn substitute_dummy(self, other: Span) -> Span { - if self.source_equal(&DUMMY_SP) { other } else { self } - } - - pub fn contains(self, other: Span) -> bool { - self.lo <= other.lo && other.hi <= self.hi - } - - /// Return true if the spans are equal with regards to the source text. - /// - /// Use this instead of `==` when either span could be generated code, - /// and you only care that they point to the same bytes of source text. - pub fn source_equal(&self, other: &Span) -> bool { - self.lo == other.lo && self.hi == other.hi - } - - /// Returns `Some(span)`, a union of `self` and `other`, on overlap. - pub fn merge(self, other: Span) -> Option { - if self.expn_id != other.expn_id { - return None; - } - - if (self.lo <= other.lo && self.hi > other.lo) || - (self.lo >= other.lo && self.lo < other.hi) { - Some(Span { - lo: cmp::min(self.lo, other.lo), - hi: cmp::max(self.hi, other.hi), - expn_id: self.expn_id, - }) - } else { - None - } - } - - /// Returns `Some(span)`, where the start is trimmed by the end of `other` - pub fn trim_start(self, other: Span) -> Option { - if self.hi > other.hi { - Some(Span { lo: cmp::max(self.lo, other.hi), .. self }) - } else { - None - } - } -} - -#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] -pub struct Spanned { - pub node: T, - pub span: Span, -} - -impl Encodable for Span { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_struct("Span", 2, |s| { - s.emit_struct_field("lo", 0, |s| { - self.lo.encode(s) - })?; - - s.emit_struct_field("hi", 1, |s| { - self.hi.encode(s) - }) - }) - } -} - -impl Decodable for Span { - fn decode(d: &mut D) -> Result { - d.read_struct("Span", 2, |d| { - let lo = d.read_struct_field("lo", 0, |d| { - BytePos::decode(d) - })?; - - let hi = d.read_struct_field("hi", 1, |d| { - BytePos::decode(d) - })?; - - Ok(mk_sp(lo, hi)) - }) - } -} - -fn default_span_debug(span: Span, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "Span {{ lo: {:?}, hi: {:?}, expn_id: {:?} }}", - span.lo, span.hi, span.expn_id) -} - -thread_local!(pub static SPAN_DEBUG: Cell fmt::Result> = - Cell::new(default_span_debug)); - -impl fmt::Debug for Span { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - SPAN_DEBUG.with(|span_debug| span_debug.get()(*self, f)) - } -} - -pub fn spanned(lo: BytePos, hi: BytePos, t: T) -> Spanned { - respan(mk_sp(lo, hi), t) -} - -pub fn respan(sp: Span, t: T) -> Spanned { - Spanned {node: t, span: sp} -} - -pub fn dummy_spanned(t: T) -> Spanned { - respan(DUMMY_SP, t) -} - -/* assuming that we're not in macro expansion */ -pub fn mk_sp(lo: BytePos, hi: BytePos) -> Span { - Span {lo: lo, hi: hi, expn_id: NO_EXPANSION} -} - /// Return the span itself if it doesn't come from a macro expansion, /// otherwise return the call site span up to the `enclosing_sp` by /// following the `expn_info` chain. @@ -295,123 +44,31 @@ pub fn original_sp(cm: &CodeMap, sp: Span, enclosing_sp: Span) -> Span { } } -impl MultiSpan { - pub fn new() -> MultiSpan { - MultiSpan { - primary_spans: vec![], - span_labels: vec![] - } - } - - pub fn from_span(primary_span: Span) -> MultiSpan { - MultiSpan { - primary_spans: vec![primary_span], - span_labels: vec![] - } - } - - pub fn from_spans(vec: Vec) -> MultiSpan { - MultiSpan { - primary_spans: vec, - span_labels: vec![] - } - } - - pub fn push_span_label(&mut self, span: Span, label: String) { - self.span_labels.push((span, label)); - } - - /// Selects the first primary span (if any) - pub fn primary_span(&self) -> Option { - self.primary_spans.first().cloned() - } - - /// Returns all primary spans. - pub fn primary_spans(&self) -> &[Span] { - &self.primary_spans - } - - /// Returns the strings to highlight. We always ensure that there - /// is an entry for each of the primary spans -- for each primary - /// span P, if there is at least one label with span P, we return - /// those labels (marked as primary). But otherwise we return - /// `SpanLabel` instances with empty labels. - pub fn span_labels(&self) -> Vec { - let is_primary = |span| self.primary_spans.contains(&span); - let mut span_labels = vec![]; - - for &(span, ref label) in &self.span_labels { - span_labels.push(SpanLabel { - span: span, - is_primary: is_primary(span), - label: Some(label.clone()) - }); - } - - for &span in &self.primary_spans { - if !span_labels.iter().any(|sl| sl.span == span) { - span_labels.push(SpanLabel { - span: span, - is_primary: true, - label: None - }); - } - } - - span_labels - } +/// The source of expansion. +#[derive(Clone, Hash, Debug, PartialEq, Eq)] +pub enum ExpnFormat { + /// e.g. #[derive(...)] + MacroAttribute(Name), + /// e.g. `format!()` + MacroBang(Name), } -impl From for MultiSpan { - fn from(span: Span) -> MultiSpan { - MultiSpan::from_span(span) - } +#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] +pub struct Spanned { + pub node: T, + pub span: Span, } -// _____________________________________________________________________________ -// Loc, LocWithOpt, FileMapAndLine, FileMapAndBytePos -// - -/// A source code location used for error reporting -#[derive(Debug)] -pub struct Loc { - /// Information about the original source - pub file: Rc, - /// The (1-based) line number - pub line: usize, - /// The (0-based) column offset - pub col: CharPos +pub fn spanned(lo: BytePos, hi: BytePos, t: T) -> Spanned { + respan(mk_sp(lo, hi), t) } -/// A source code location used as the result of lookup_char_pos_adj -// Actually, *none* of the clients use the filename *or* file field; -// perhaps they should just be removed. -#[derive(Debug)] -pub struct LocWithOpt { - pub filename: FileName, - pub line: usize, - pub col: CharPos, - pub file: Option>, +pub fn respan(sp: Span, t: T) -> Spanned { + Spanned {node: t, span: sp} } -// used to be structural records. Better names, anyone? -#[derive(Debug)] -pub struct FileMapAndLine { pub fm: Rc, pub line: usize } -#[derive(Debug)] -pub struct FileMapAndBytePos { pub fm: Rc, pub pos: BytePos } - - -// _____________________________________________________________________________ -// ExpnFormat, NameAndSpan, ExpnInfo, ExpnId -// - -/// The source of expansion. -#[derive(Clone, Hash, Debug, PartialEq, Eq)] -pub enum ExpnFormat { - /// e.g. #[derive(...)] - MacroAttribute(Name), - /// e.g. `format!()` - MacroBang(Name), +pub fn dummy_spanned(t: T) -> Spanned { + respan(DUMMY_SP, t) } #[derive(Clone, Hash, Debug)] @@ -454,257 +111,10 @@ pub struct ExpnInfo { pub callee: NameAndSpan } -#[derive(PartialEq, Eq, Clone, Debug, Hash, RustcEncodable, RustcDecodable, Copy)] -pub struct ExpnId(u32); - -pub const NO_EXPANSION: ExpnId = ExpnId(!0); -// For code appearing from the command line -pub const COMMAND_LINE_EXPN: ExpnId = ExpnId(!1); - -impl ExpnId { - pub fn from_u32(id: u32) -> ExpnId { - ExpnId(id) - } - - pub fn into_u32(self) -> u32 { - self.0 - } -} - // _____________________________________________________________________________ // FileMap, MultiByteChar, FileName, FileLines // -pub type FileName = String; - -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct LineInfo { - /// Index of line, starting from 0. - pub line_index: usize, - - /// Column in line where span begins, starting from 0. - pub start_col: CharPos, - - /// Column in line where span ends, starting from 0, exclusive. - pub end_col: CharPos, -} - -pub struct FileLines { - pub file: Rc, - pub lines: Vec -} - -/// Identifies an offset of a multi-byte character in a FileMap -#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq)] -pub struct MultiByteChar { - /// The absolute offset of the character in the CodeMap - pub pos: BytePos, - /// The number of bytes, >=2 - pub bytes: usize, -} - -/// A single source in the CodeMap. -pub struct FileMap { - /// The name of the file that the source came from, source that doesn't - /// originate from files has names between angle brackets by convention, - /// e.g. `` - pub name: FileName, - /// The absolute path of the file that the source came from. - pub abs_path: Option, - /// The complete source code - pub src: Option>, - /// The start position of this source in the CodeMap - pub start_pos: BytePos, - /// The end position of this source in the CodeMap - pub end_pos: BytePos, - /// Locations of lines beginnings in the source code - pub lines: RefCell>, - /// Locations of multi-byte characters in the source code - pub multibyte_chars: RefCell>, -} - -impl Encodable for FileMap { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_struct("FileMap", 6, |s| { - s.emit_struct_field("name", 0, |s| self.name.encode(s))?; - s.emit_struct_field("abs_path", 1, |s| self.abs_path.encode(s))?; - s.emit_struct_field("start_pos", 2, |s| self.start_pos.encode(s))?; - s.emit_struct_field("end_pos", 3, |s| self.end_pos.encode(s))?; - s.emit_struct_field("lines", 4, |s| { - let lines = self.lines.borrow(); - // store the length - s.emit_u32(lines.len() as u32)?; - - if !lines.is_empty() { - // In order to preserve some space, we exploit the fact that - // the lines list is sorted and individual lines are - // probably not that long. Because of that we can store lines - // as a difference list, using as little space as possible - // for the differences. - let max_line_length = if lines.len() == 1 { - 0 - } else { - lines.windows(2) - .map(|w| w[1] - w[0]) - .map(|bp| bp.to_usize()) - .max() - .unwrap() - }; - - let bytes_per_diff: u8 = match max_line_length { - 0 ... 0xFF => 1, - 0x100 ... 0xFFFF => 2, - _ => 4 - }; - - // Encode the number of bytes used per diff. - bytes_per_diff.encode(s)?; - - // Encode the first element. - lines[0].encode(s)?; - - let diff_iter = (&lines[..]).windows(2) - .map(|w| (w[1] - w[0])); - - match bytes_per_diff { - 1 => for diff in diff_iter { (diff.0 as u8).encode(s)? }, - 2 => for diff in diff_iter { (diff.0 as u16).encode(s)? }, - 4 => for diff in diff_iter { diff.0.encode(s)? }, - _ => unreachable!() - } - } - - Ok(()) - })?; - s.emit_struct_field("multibyte_chars", 5, |s| { - (*self.multibyte_chars.borrow()).encode(s) - }) - }) - } -} - -impl Decodable for FileMap { - fn decode(d: &mut D) -> Result { - - d.read_struct("FileMap", 6, |d| { - let name: String = d.read_struct_field("name", 0, |d| Decodable::decode(d))?; - let abs_path: Option = - d.read_struct_field("abs_path", 1, |d| Decodable::decode(d))?; - let start_pos: BytePos = d.read_struct_field("start_pos", 2, |d| Decodable::decode(d))?; - let end_pos: BytePos = d.read_struct_field("end_pos", 3, |d| Decodable::decode(d))?; - let lines: Vec = d.read_struct_field("lines", 4, |d| { - let num_lines: u32 = Decodable::decode(d)?; - let mut lines = Vec::with_capacity(num_lines as usize); - - if num_lines > 0 { - // Read the number of bytes used per diff. - let bytes_per_diff: u8 = Decodable::decode(d)?; - - // Read the first element. - let mut line_start: BytePos = Decodable::decode(d)?; - lines.push(line_start); - - for _ in 1..num_lines { - let diff = match bytes_per_diff { - 1 => d.read_u8()? as u32, - 2 => d.read_u16()? as u32, - 4 => d.read_u32()?, - _ => unreachable!() - }; - - line_start = line_start + BytePos(diff); - - lines.push(line_start); - } - } - - Ok(lines) - })?; - let multibyte_chars: Vec = - d.read_struct_field("multibyte_chars", 5, |d| Decodable::decode(d))?; - Ok(FileMap { - name: name, - abs_path: abs_path, - start_pos: start_pos, - end_pos: end_pos, - src: None, - lines: RefCell::new(lines), - multibyte_chars: RefCell::new(multibyte_chars) - }) - }) - } -} - -impl fmt::Debug for FileMap { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - write!(fmt, "FileMap({})", self.name) - } -} - -impl FileMap { - /// EFFECT: register a start-of-line offset in the - /// table of line-beginnings. - /// UNCHECKED INVARIANT: these offsets must be added in the right - /// order and must be in the right places; there is shared knowledge - /// about what ends a line between this file and parse.rs - /// WARNING: pos param here is the offset relative to start of CodeMap, - /// and CodeMap will append a newline when adding a filemap without a newline at the end, - /// so the safe way to call this is with value calculated as - /// filemap.start_pos + newline_offset_relative_to_the_start_of_filemap. - pub fn next_line(&self, pos: BytePos) { - // the new charpos must be > the last one (or it's the first one). - let mut lines = self.lines.borrow_mut(); - let line_len = lines.len(); - assert!(line_len == 0 || ((*lines)[line_len - 1] < pos)); - lines.push(pos); - } - - /// get a line from the list of pre-computed line-beginnings. - /// line-number here is 0-based. - pub fn get_line(&self, line_number: usize) -> Option<&str> { - match self.src { - Some(ref src) => { - let lines = self.lines.borrow(); - lines.get(line_number).map(|&line| { - let begin: BytePos = line - self.start_pos; - let begin = begin.to_usize(); - // We can't use `lines.get(line_number+1)` because we might - // be parsing when we call this function and thus the current - // line is the last one we have line info for. - let slice = &src[begin..]; - match slice.find('\n') { - Some(e) => &slice[..e], - None => slice - } - }) - } - None => None - } - } - - pub fn record_multibyte_char(&self, pos: BytePos, bytes: usize) { - assert!(bytes >=2 && bytes <= 4); - let mbc = MultiByteChar { - pos: pos, - bytes: bytes, - }; - self.multibyte_chars.borrow_mut().push(mbc); - } - - pub fn is_real_file(&self) -> bool { - !(self.name.starts_with("<") && - self.name.ends_with(">")) - } - - pub fn is_imported(&self) -> bool { - self.src.is_none() - } - - fn count_lines(&self) -> usize { - self.lines.borrow().len() - } -} - /// An abstraction over the fs operations used by the Parser. pub trait FileLoader { /// Query the existence of a file. @@ -1392,52 +802,24 @@ impl CodeMap { } } -pub struct MacroBacktrace { - /// span where macro was applied to generate this code - pub call_site: Span, - - /// name of macro that was applied (e.g., "foo!" or "#[derive(Eq)]") - pub macro_decl_name: String, - - /// span where macro was defined (if known) - pub def_site_span: Option, -} - -// _____________________________________________________________________________ -// SpanLinesError, SpanSnippetError, DistinctSources, MalformedCodemapPositions -// - -pub type FileLinesResult = Result; - -#[derive(Clone, PartialEq, Eq, Debug)] -pub enum SpanLinesError { - IllFormedSpan(Span), - DistinctSources(DistinctSources), -} - -#[derive(Clone, PartialEq, Eq, Debug)] -pub enum SpanSnippetError { - IllFormedSpan(Span), - DistinctSources(DistinctSources), - MalformedForCodemap(MalformedCodemapPositions), - SourceNotAvailable { filename: String } -} - -#[derive(Clone, PartialEq, Eq, Debug)] -pub struct DistinctSources { - begin: (String, BytePos), - end: (String, BytePos) -} - -#[derive(Clone, PartialEq, Eq, Debug)] -pub struct MalformedCodemapPositions { - name: String, - source_len: usize, - begin_pos: BytePos, - end_pos: BytePos +impl CodeMapper for CodeMap { + fn lookup_char_pos(&self, pos: BytePos) -> Loc { + self.lookup_char_pos(pos) + } + fn span_to_lines(&self, sp: Span) -> FileLinesResult { + self.span_to_lines(sp) + } + fn span_to_string(&self, sp: Span) -> String { + self.span_to_string(sp) + } + fn span_to_filename(&self, sp: Span) -> FileName { + self.span_to_filename(sp) + } + fn macro_backtrace(&self, span: Span) -> Vec { + self.macro_backtrace(span) + } } - // _____________________________________________________________________________ // Tests // @@ -1445,6 +827,13 @@ pub struct MalformedCodemapPositions { #[cfg(test)] mod tests { use super::*; + use errors::{Level, CodeSuggestion}; + use errors::emitter::EmitterWriter; + use errors::snippet::{SnippetData, RenderedLine, FormatMode}; + use std::sync::{Arc, Mutex}; + use std::io::{self, Write}; + use std::str::from_utf8; + use std::rc::Rc; #[test] fn t1 () { @@ -1688,6 +1077,69 @@ mod tests { blork.rs:1:1: 1:12\n `first line.`\n"); } + /// Returns the span corresponding to the `n`th occurrence of + /// `substring` in `source_text`. + trait CodeMapExtension { + fn span_substr(&self, + file: &Rc, + source_text: &str, + substring: &str, + n: usize) + -> Span; + } + + impl CodeMapExtension for CodeMap { + fn span_substr(&self, + file: &Rc, + source_text: &str, + substring: &str, + n: usize) + -> Span + { + println!("span_substr(file={:?}/{:?}, substring={:?}, n={})", + file.name, file.start_pos, substring, n); + let mut i = 0; + let mut hi = 0; + loop { + let offset = source_text[hi..].find(substring).unwrap_or_else(|| { + panic!("source_text `{}` does not have {} occurrences of `{}`, only {}", + source_text, n, substring, i); + }); + let lo = hi + offset; + hi = lo + substring.len(); + if i == n { + let span = Span { + lo: BytePos(lo as u32 + file.start_pos.0), + hi: BytePos(hi as u32 + file.start_pos.0), + expn_id: NO_EXPANSION, + }; + assert_eq!(&self.span_to_snippet(span).unwrap()[..], + substring); + return span; + } + i += 1; + } + } + } + + fn splice(start: Span, end: Span) -> Span { + Span { + lo: start.lo, + hi: end.hi, + expn_id: NO_EXPANSION, + } + } + + fn make_string(lines: &[RenderedLine]) -> String { + lines.iter() + .flat_map(|rl| { + rl.text.iter() + .map(|s| &s.text[..]) + .chain(Some("\n")) + }) + .collect() + } + fn init_expansion_chain(cm: &CodeMap) -> Span { // Creates an expansion chain containing two recursive calls // root -> expA -> expA -> expB -> expB -> end @@ -1767,4 +1219,761 @@ r"blork2.rs:2:1: 2:12 "; assert_eq!(sstr, res_str); } + + struct Sink(Arc>>); + impl Write for Sink { + fn write(&mut self, data: &[u8]) -> io::Result { + Write::write(&mut *self.0.lock().unwrap(), data) + } + fn flush(&mut self) -> io::Result<()> { Ok(()) } + } + + // Diagnostic doesn't align properly in span where line number increases by one digit + #[test] + fn test_hilight_suggestion_issue_11715() { + let data = Arc::new(Mutex::new(Vec::new())); + let cm = Rc::new(CodeMap::new()); + let mut ew = EmitterWriter::new(Box::new(Sink(data.clone())), + None, + cm.clone(), + FormatMode::NewErrorFormat); + let content = "abcdefg + koksi + line3 + line4 + cinq + line6 + line7 + line8 + line9 + line10 + e-lä-vän + tolv + dreizehn + "; + let file = cm.new_filemap_and_lines("dummy.txt", None, content); + let start = file.lines.borrow()[10]; + let end = file.lines.borrow()[11]; + let sp = mk_sp(start, end); + let lvl = Level::Error; + println!("highlight_lines"); + ew.highlight_lines(&sp.into(), lvl).unwrap(); + println!("done"); + let vec = data.lock().unwrap().clone(); + let vec: &[u8] = &vec; + let str = from_utf8(vec).unwrap(); + println!("r#\"\n{}\"#", str); + assert_eq!(str, &r#" + --> dummy.txt:11:1 + |> +11 |> e-lä-vän + |> ^ +"#[1..]); + } + + #[test] + fn test_single_span_splice() { + // Test that a `MultiSpan` containing a single span splices a substition correctly + let cm = CodeMap::new(); + let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n"; + let selection = " \n ~~\n~~~\n~~~~~ \n \n"; + cm.new_filemap_and_lines("blork.rs", None, inputtext); + let sp = span_from_selection(inputtext, selection); + let msp: MultiSpan = sp.into(); + + // check that we are extracting the text we thought we were extracting + assert_eq!(&cm.span_to_snippet(sp).unwrap(), "BB\nCCC\nDDDDD"); + + let substitute = "ZZZZZZ".to_owned(); + let expected = "bbbbZZZZZZddddd"; + let suggest = CodeSuggestion { + msp: msp, + substitutes: vec![substitute], + }; + assert_eq!(suggest.splice_lines(&cm), expected); + } + + #[test] + fn test_multi_span_splice() { + // Test that a `MultiSpan` containing multiple spans splices a substition correctly + let cm = CodeMap::new(); + let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n"; + let selection1 = " \n \n \n \n ~ \n"; // intentionally out of order + let selection2 = " \n ~~\n~~~\n~~~~~ \n \n"; + cm.new_filemap_and_lines("blork.rs", None, inputtext); + let sp1 = span_from_selection(inputtext, selection1); + let sp2 = span_from_selection(inputtext, selection2); + let msp: MultiSpan = MultiSpan::from_spans(vec![sp1, sp2]); + + let expected = "bbbbZZZZZZddddd\neXYZe"; + let suggest = CodeSuggestion { + msp: msp, + substitutes: vec!["ZZZZZZ".to_owned(), + "XYZ".to_owned()] + }; + + assert_eq!(suggest.splice_lines(&cm), expected); + } + + #[test] + fn test_multispan_highlight() { + let data = Arc::new(Mutex::new(Vec::new())); + let cm = Rc::new(CodeMap::new()); + let mut diag = EmitterWriter::new(Box::new(Sink(data.clone())), + None, + cm.clone(), + FormatMode::NewErrorFormat); + + let inp = "_____aaaaaa____bbbbbb__cccccdd_"; + let sp1 = " ~~~~~~ "; + let sp2 = " ~~~~~~ "; + let sp3 = " ~~~~~ "; + let sp4 = " ~~~~ "; + let sp34 = " ~~~~~~~ "; + + let expect_start = &r#" + --> dummy.txt:1:6 + |> +1 |> _____aaaaaa____bbbbbb__cccccdd_ + |> ^^^^^^ ^^^^^^ ^^^^^^^ +"#[1..]; + + let span = |sp, expected| { + let sp = span_from_selection(inp, sp); + assert_eq!(&cm.span_to_snippet(sp).unwrap(), expected); + sp + }; + cm.new_filemap_and_lines("dummy.txt", None, inp); + let sp1 = span(sp1, "aaaaaa"); + let sp2 = span(sp2, "bbbbbb"); + let sp3 = span(sp3, "ccccc"); + let sp4 = span(sp4, "ccdd"); + let sp34 = span(sp34, "cccccdd"); + + let spans = vec![sp1, sp2, sp3, sp4]; + + let test = |expected, highlight: &mut FnMut()| { + data.lock().unwrap().clear(); + highlight(); + let vec = data.lock().unwrap().clone(); + let actual = from_utf8(&vec[..]).unwrap(); + println!("actual=\n{}", actual); + assert_eq!(actual, expected); + }; + + let msp = MultiSpan::from_spans(vec![sp1, sp2, sp34]); + test(expect_start, &mut || { + diag.highlight_lines(&msp, Level::Error).unwrap(); + }); + test(expect_start, &mut || { + let msp = MultiSpan::from_spans(spans.clone()); + diag.highlight_lines(&msp, Level::Error).unwrap(); + }); + } + + #[test] + fn test_huge_multispan_highlight() { + let data = Arc::new(Mutex::new(Vec::new())); + let cm = Rc::new(CodeMap::new()); + let mut diag = EmitterWriter::new(Box::new(Sink(data.clone())), + None, + cm.clone(), + FormatMode::NewErrorFormat); + + let inp = "aaaaa\n\ + aaaaa\n\ + aaaaa\n\ + bbbbb\n\ + ccccc\n\ + xxxxx\n\ + yyyyy\n\ + _____\n\ + ddd__eee_\n\ + elided\n\ + __f_gg"; + let file = cm.new_filemap_and_lines("dummy.txt", None, inp); + + let span = |lo, hi, (off_lo, off_hi)| { + let lines = file.lines.borrow(); + let (mut lo, mut hi): (BytePos, BytePos) = (lines[lo], lines[hi]); + lo.0 += off_lo; + hi.0 += off_hi; + mk_sp(lo, hi) + }; + let sp0 = span(4, 6, (0, 5)); + let sp1 = span(0, 6, (0, 5)); + let sp2 = span(8, 8, (0, 3)); + let sp3 = span(8, 8, (5, 8)); + let sp4 = span(10, 10, (2, 3)); + let sp5 = span(10, 10, (4, 6)); + + let expect0 = &r#" + --> dummy.txt:5:1 + |> +5 |> ccccc + |> ^ +... +9 |> ddd__eee_ + |> ^^^ ^^^ +10 |> elided +11 |> __f_gg + |> ^ ^^ +"#[1..]; + + let expect = &r#" + --> dummy.txt:1:1 + |> +1 |> aaaaa + |> ^ +... +9 |> ddd__eee_ + |> ^^^ ^^^ +10 |> elided +11 |> __f_gg + |> ^ ^^ +"#[1..]; + + macro_rules! test { + ($expected: expr, $highlight: expr) => ({ + data.lock().unwrap().clear(); + $highlight(); + let vec = data.lock().unwrap().clone(); + let actual = from_utf8(&vec[..]).unwrap(); + println!("actual:"); + println!("{}", actual); + println!("expected:"); + println!("{}", $expected); + assert_eq!(&actual[..], &$expected[..]); + }); + } + + let msp0 = MultiSpan::from_spans(vec![sp0, sp2, sp3, sp4, sp5]); + let msp = MultiSpan::from_spans(vec![sp1, sp2, sp3, sp4, sp5]); + + test!(expect0, || { + diag.highlight_lines(&msp0, Level::Error).unwrap(); + }); + test!(expect, || { + diag.highlight_lines(&msp, Level::Error).unwrap(); + }); + } + + #[test] + fn tab() { + let file_text = " +fn foo() { +\tbar; +} +"; + + let cm = Rc::new(CodeMap::new()); + let foo = cm.new_filemap_and_lines("foo.rs", None, file_text); + let span_bar = cm.span_substr(&foo, file_text, "bar", 0); + + let mut snippet = SnippetData::new(cm, Some(span_bar), FormatMode::NewErrorFormat); + snippet.push(span_bar, true, None); + + let lines = snippet.render_lines(); + let text = make_string(&lines); + assert_eq!(&text[..], &" + --> foo.rs:3:2 + |> +3 |> \tbar; + |> \t^^^ +"[1..]); + } + + #[test] + fn one_line() { + let file_text = r#" +fn foo() { + vec.push(vec.pop().unwrap()); +} +"#; + + let cm = Rc::new(CodeMap::new()); + let foo = cm.new_filemap_and_lines("foo.rs", None, file_text); + let span_vec0 = cm.span_substr(&foo, file_text, "vec", 0); + let span_vec1 = cm.span_substr(&foo, file_text, "vec", 1); + let span_semi = cm.span_substr(&foo, file_text, ";", 0); + + let mut snippet = SnippetData::new(cm, None, FormatMode::NewErrorFormat); + snippet.push(span_vec0, false, Some(format!("previous borrow of `vec` occurs here"))); + snippet.push(span_vec1, false, Some(format!("error occurs here"))); + snippet.push(span_semi, false, Some(format!("previous borrow ends here"))); + + let lines = snippet.render_lines(); + println!("{:#?}", lines); + + let text: String = make_string(&lines); + + println!("text=\n{}", text); + assert_eq!(&text[..], &r#" + ::: foo.rs + |> +3 |> vec.push(vec.pop().unwrap()); + |> --- --- - previous borrow ends here + |> | | + |> | error occurs here + |> previous borrow of `vec` occurs here +"#[1..]); + } + + #[test] + fn two_files() { + let file_text_foo = r#" +fn foo() { + vec.push(vec.pop().unwrap()); +} +"#; + + let file_text_bar = r#" +fn bar() { + // these blank links here + // serve to ensure that the line numbers + // from bar.rs + // require more digits + + + + + + + + + + + vec.push(); + + // this line will get elided + + vec.pop().unwrap()); +} +"#; + + let cm = Rc::new(CodeMap::new()); + let foo_map = cm.new_filemap_and_lines("foo.rs", None, file_text_foo); + let span_foo_vec0 = cm.span_substr(&foo_map, file_text_foo, "vec", 0); + let span_foo_vec1 = cm.span_substr(&foo_map, file_text_foo, "vec", 1); + let span_foo_semi = cm.span_substr(&foo_map, file_text_foo, ";", 0); + + let bar_map = cm.new_filemap_and_lines("bar.rs", None, file_text_bar); + let span_bar_vec0 = cm.span_substr(&bar_map, file_text_bar, "vec", 0); + let span_bar_vec1 = cm.span_substr(&bar_map, file_text_bar, "vec", 1); + let span_bar_semi = cm.span_substr(&bar_map, file_text_bar, ";", 0); + + let mut snippet = SnippetData::new(cm, Some(span_foo_vec1), FormatMode::NewErrorFormat); + snippet.push(span_foo_vec0, false, Some(format!("a"))); + snippet.push(span_foo_vec1, true, Some(format!("b"))); + snippet.push(span_foo_semi, false, Some(format!("c"))); + snippet.push(span_bar_vec0, false, Some(format!("d"))); + snippet.push(span_bar_vec1, false, Some(format!("e"))); + snippet.push(span_bar_semi, false, Some(format!("f"))); + + let lines = snippet.render_lines(); + println!("{:#?}", lines); + + let text: String = make_string(&lines); + + println!("text=\n{}", text); + + // Note that the `|>` remain aligned across both files: + assert_eq!(&text[..], &r#" + --> foo.rs:3:14 + |> +3 |> vec.push(vec.pop().unwrap()); + |> --- ^^^ - c + |> | | + |> | b + |> a + ::: bar.rs + |> +17 |> vec.push(); + |> --- - f + |> | + |> d +... +21 |> vec.pop().unwrap()); + |> --- e +"#[1..]); + } + + #[test] + fn multi_line() { + let file_text = r#" +fn foo() { + let name = find_id(&data, 22).unwrap(); + + // Add one more item we forgot to the vector. Silly us. + data.push(Data { name: format!("Hera"), id: 66 }); + + // Print everything out. + println!("Name: {:?}", name); + println!("Data: {:?}", data); +} +"#; + + let cm = Rc::new(CodeMap::new()); + let foo = cm.new_filemap_and_lines("foo.rs", None, file_text); + let span_data0 = cm.span_substr(&foo, file_text, "data", 0); + let span_data1 = cm.span_substr(&foo, file_text, "data", 1); + let span_rbrace = cm.span_substr(&foo, file_text, "}", 3); + + let mut snippet = SnippetData::new(cm, None, FormatMode::NewErrorFormat); + snippet.push(span_data0, false, Some(format!("immutable borrow begins here"))); + snippet.push(span_data1, false, Some(format!("mutable borrow occurs here"))); + snippet.push(span_rbrace, false, Some(format!("immutable borrow ends here"))); + + let lines = snippet.render_lines(); + println!("{:#?}", lines); + + let text: String = make_string(&lines); + + println!("text=\n{}", text); + assert_eq!(&text[..], &r#" + ::: foo.rs + |> +3 |> let name = find_id(&data, 22).unwrap(); + |> ---- immutable borrow begins here +... +6 |> data.push(Data { name: format!("Hera"), id: 66 }); + |> ---- mutable borrow occurs here +... +11 |> } + |> - immutable borrow ends here +"#[1..]); + } + + #[test] + fn overlapping() { + let file_text = r#" +fn foo() { + vec.push(vec.pop().unwrap()); +} +"#; + + let cm = Rc::new(CodeMap::new()); + let foo = cm.new_filemap_and_lines("foo.rs", None, file_text); + let span0 = cm.span_substr(&foo, file_text, "vec.push", 0); + let span1 = cm.span_substr(&foo, file_text, "vec", 0); + let span2 = cm.span_substr(&foo, file_text, "ec.push", 0); + let span3 = cm.span_substr(&foo, file_text, "unwrap", 0); + + let mut snippet = SnippetData::new(cm, None, FormatMode::NewErrorFormat); + snippet.push(span0, false, Some(format!("A"))); + snippet.push(span1, false, Some(format!("B"))); + snippet.push(span2, false, Some(format!("C"))); + snippet.push(span3, false, Some(format!("D"))); + + let lines = snippet.render_lines(); + println!("{:#?}", lines); + let text: String = make_string(&lines); + + println!("text=r#\"\n{}\".trim_left()", text); + assert_eq!(&text[..], &r#" + ::: foo.rs + |> +3 |> vec.push(vec.pop().unwrap()); + |> -------- ------ D + |> || + |> |C + |> A + |> B +"#[1..]); + } + + #[test] + fn one_line_out_of_order() { + let file_text = r#" +fn foo() { + vec.push(vec.pop().unwrap()); +} +"#; + + let cm = Rc::new(CodeMap::new()); + let foo = cm.new_filemap_and_lines("foo.rs", None, file_text); + let span_vec0 = cm.span_substr(&foo, file_text, "vec", 0); + let span_vec1 = cm.span_substr(&foo, file_text, "vec", 1); + let span_semi = cm.span_substr(&foo, file_text, ";", 0); + + // intentionally don't push the snippets left to right + let mut snippet = SnippetData::new(cm, None, FormatMode::NewErrorFormat); + snippet.push(span_vec1, false, Some(format!("error occurs here"))); + snippet.push(span_vec0, false, Some(format!("previous borrow of `vec` occurs here"))); + snippet.push(span_semi, false, Some(format!("previous borrow ends here"))); + + let lines = snippet.render_lines(); + println!("{:#?}", lines); + let text: String = make_string(&lines); + + println!("text=r#\"\n{}\".trim_left()", text); + assert_eq!(&text[..], &r#" + ::: foo.rs + |> +3 |> vec.push(vec.pop().unwrap()); + |> --- --- - previous borrow ends here + |> | | + |> | error occurs here + |> previous borrow of `vec` occurs here +"#[1..]); + } + + #[test] + fn elide_unnecessary_lines() { + let file_text = r#" +fn foo() { + let mut vec = vec![0, 1, 2]; + let mut vec2 = vec; + vec2.push(3); + vec2.push(4); + vec2.push(5); + vec2.push(6); + vec.push(7); +} +"#; + + let cm = Rc::new(CodeMap::new()); + let foo = cm.new_filemap_and_lines("foo.rs", None, file_text); + let span_vec0 = cm.span_substr(&foo, file_text, "vec", 3); + let span_vec1 = cm.span_substr(&foo, file_text, "vec", 8); + + let mut snippet = SnippetData::new(cm, None, FormatMode::NewErrorFormat); + snippet.push(span_vec0, false, Some(format!("`vec` moved here because it \ + has type `collections::vec::Vec`"))); + snippet.push(span_vec1, false, Some(format!("use of moved value: `vec`"))); + + let lines = snippet.render_lines(); + println!("{:#?}", lines); + let text: String = make_string(&lines); + println!("text=r#\"\n{}\".trim_left()", text); + assert_eq!(&text[..], &r#" + ::: foo.rs + |> +4 |> let mut vec2 = vec; + |> --- `vec` moved here because it has type `collections::vec::Vec` +... +9 |> vec.push(7); + |> --- use of moved value: `vec` +"#[1..]); + } + + #[test] + fn spans_without_labels() { + let file_text = r#" +fn foo() { + let mut vec = vec![0, 1, 2]; + let mut vec2 = vec; + vec2.push(3); + vec2.push(4); + vec2.push(5); + vec2.push(6); + vec.push(7); +} +"#; + + let cm = Rc::new(CodeMap::new()); + let foo = cm.new_filemap_and_lines("foo.rs", None, file_text); + + let mut snippet = SnippetData::new(cm.clone(), None, FormatMode::NewErrorFormat); + for i in 0..4 { + let span_veci = cm.span_substr(&foo, file_text, "vec", i); + snippet.push(span_veci, false, None); + } + + let lines = snippet.render_lines(); + let text: String = make_string(&lines); + println!("text=&r#\"\n{}\n\"#[1..]", text); + assert_eq!(text, &r#" + ::: foo.rs + |> +3 |> let mut vec = vec![0, 1, 2]; + |> --- --- +4 |> let mut vec2 = vec; + |> --- --- +"#[1..]); + } + + #[test] + fn span_long_selection() { + let file_text = r#" +impl SomeTrait for () { + fn foo(x: u32) { + // impl 1 + // impl 2 + // impl 3 + } +} +"#; + + let cm = Rc::new(CodeMap::new()); + let foo = cm.new_filemap_and_lines("foo.rs", None, file_text); + + let mut snippet = SnippetData::new(cm.clone(), None, FormatMode::NewErrorFormat); + let fn_span = cm.span_substr(&foo, file_text, "fn", 0); + let rbrace_span = cm.span_substr(&foo, file_text, "}", 0); + snippet.push(splice(fn_span, rbrace_span), false, None); + let lines = snippet.render_lines(); + let text: String = make_string(&lines); + println!("r#\"\n{}\"", text); + assert_eq!(text, &r#" + ::: foo.rs + |> +3 |> fn foo(x: u32) { + |> - +"#[1..]); + } + + #[test] + fn span_overlap_label() { + // Test that we don't put `x_span` to the right of its highlight, + // since there is another highlight that overlaps it. + + let file_text = r#" + fn foo(x: u32) { + } +} +"#; + + let cm = Rc::new(CodeMap::new()); + let foo = cm.new_filemap_and_lines("foo.rs", None, file_text); + + let mut snippet = SnippetData::new(cm.clone(), None, FormatMode::NewErrorFormat); + let fn_span = cm.span_substr(&foo, file_text, "fn foo(x: u32)", 0); + let x_span = cm.span_substr(&foo, file_text, "x", 0); + snippet.push(fn_span, false, Some(format!("fn_span"))); + snippet.push(x_span, false, Some(format!("x_span"))); + let lines = snippet.render_lines(); + let text: String = make_string(&lines); + println!("r#\"\n{}\"", text); + assert_eq!(text, &r#" + ::: foo.rs + |> +2 |> fn foo(x: u32) { + |> -------------- + |> | | + |> | x_span + |> fn_span +"#[1..]); + } + + #[test] + fn span_overlap_label2() { + // Test that we don't put `x_span` to the right of its highlight, + // since there is another highlight that overlaps it. In this + // case, the overlap is only at the beginning, but it's still + // better to show the beginning more clearly. + + let file_text = r#" + fn foo(x: u32) { + } +} +"#; + + let cm = Rc::new(CodeMap::new()); + let foo = cm.new_filemap_and_lines("foo.rs", None, file_text); + + let mut snippet = SnippetData::new(cm.clone(), None, FormatMode::NewErrorFormat); + let fn_span = cm.span_substr(&foo, file_text, "fn foo(x", 0); + let x_span = cm.span_substr(&foo, file_text, "x: u32)", 0); + snippet.push(fn_span, false, Some(format!("fn_span"))); + snippet.push(x_span, false, Some(format!("x_span"))); + let lines = snippet.render_lines(); + let text: String = make_string(&lines); + println!("r#\"\n{}\"", text); + assert_eq!(text, &r#" + ::: foo.rs + |> +2 |> fn foo(x: u32) { + |> -------------- + |> | | + |> | x_span + |> fn_span +"#[1..]); + } + + #[test] + fn span_overlap_label3() { + // Test that we don't put `x_span` to the right of its highlight, + // since there is another highlight that overlaps it. In this + // case, the overlap is only at the beginning, but it's still + // better to show the beginning more clearly. + + let file_text = r#" + fn foo() { + let closure = || { + inner + }; + } +} +"#; + + let cm = Rc::new(CodeMap::new()); + let foo = cm.new_filemap_and_lines("foo.rs", None, file_text); + + let mut snippet = SnippetData::new(cm.clone(), None, FormatMode::NewErrorFormat); + + let closure_span = { + let closure_start_span = cm.span_substr(&foo, file_text, "||", 0); + let closure_end_span = cm.span_substr(&foo, file_text, "}", 0); + splice(closure_start_span, closure_end_span) + }; + + let inner_span = cm.span_substr(&foo, file_text, "inner", 0); + + snippet.push(closure_span, false, Some(format!("foo"))); + snippet.push(inner_span, false, Some(format!("bar"))); + + let lines = snippet.render_lines(); + let text: String = make_string(&lines); + println!("r#\"\n{}\"", text); + assert_eq!(text, &r#" + ::: foo.rs + |> +3 |> let closure = || { + |> - foo +4 |> inner + |> ----- bar +"#[1..]); + } + + #[test] + fn span_empty() { + // In one of the unit tests, we found that the parser sometimes + // gives empty spans, and in particular it supplied an EOF span + // like this one, which points at the very end. We want to + // fallback gracefully in this case. + + let file_text = r#" +fn main() { + struct Foo; + + impl !Sync for Foo {} + + unsafe impl Send for &'static Foo { + // error: cross-crate traits with a default impl, like `core::marker::Send`, + // can only be implemented for a struct/enum type, not + // `&'static Foo` +}"#; + + + let cm = Rc::new(CodeMap::new()); + let foo = cm.new_filemap_and_lines("foo.rs", None, file_text); + + let mut rbrace_span = cm.span_substr(&foo, file_text, "}", 1); + rbrace_span.lo = rbrace_span.hi; + + let mut snippet = SnippetData::new(cm.clone(), + Some(rbrace_span), + FormatMode::NewErrorFormat); + snippet.push(rbrace_span, false, None); + let lines = snippet.render_lines(); + let text: String = make_string(&lines); + println!("r#\"\n{}\"", text); + assert_eq!(text, &r#" + --> foo.rs:11:2 + |> +11 |> } + |> - +"#[1..]); + } } diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs index 0e5d6841c8258..961763c6025fd 100644 --- a/src/libsyntax/config.rs +++ b/src/libsyntax/config.rs @@ -11,7 +11,8 @@ use attr::{AttrMetaMethods, HasAttrs}; use feature_gate::{emit_feature_err, EXPLAIN_STMT_ATTR_SYNTAX, Features, get_features, GateIssue}; use fold::Folder; -use {ast, fold, attr}; +use {fold, attr}; +use ast; use codemap::{Spanned, respan}; use parse::{ParseSess, token}; use ptr::P; @@ -212,17 +213,10 @@ impl<'a> fold::Folder for StripUnconfigured<'a> { } fn fold_stmt(&mut self, stmt: ast::Stmt) -> SmallVector { - let is_item = match stmt.node { - ast::StmtKind::Decl(ref decl, _) => match decl.node { - ast::DeclKind::Item(_) => true, - _ => false, - }, - _ => false, - }; - // avoid calling `visit_stmt_or_expr_attrs` on items - if !is_item { - self.visit_stmt_or_expr_attrs(stmt.attrs()); + match stmt.node { + ast::StmtKind::Item(_) => {} + _ => self.visit_stmt_or_expr_attrs(stmt.attrs()), } self.configure(stmt).map(|stmt| fold::noop_fold_stmt(stmt, self)) diff --git a/src/libsyntax/diagnostics/metadata.rs b/src/libsyntax/diagnostics/metadata.rs index 181b32594f127..5bbd18bd9ee2e 100644 --- a/src/libsyntax/diagnostics/metadata.rs +++ b/src/libsyntax/diagnostics/metadata.rs @@ -20,7 +20,7 @@ use std::io::Write; use std::error::Error; use rustc_serialize::json::as_json; -use codemap::Span; +use syntax_pos::Span; use ext::base::ExtCtxt; use diagnostics::plugin::{ErrorMap, ErrorInfo}; diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs index 26088b1242e2a..4e50299e836b3 100644 --- a/src/libsyntax/diagnostics/plugin.rs +++ b/src/libsyntax/diagnostics/plugin.rs @@ -13,16 +13,19 @@ use std::collections::BTreeMap; use std::env; use ast; -use ast::{Ident, Name, TokenTree}; -use codemap::Span; +use ast::{Ident, Name}; +use syntax_pos::Span; use ext::base::{ExtCtxt, MacEager, MacResult}; use ext::build::AstBuilder; use parse::token; use ptr::P; +use tokenstream::{TokenTree}; use util::small_vector::SmallVector; use diagnostics::metadata::output_metadata; +pub use errors::*; + // Maximum width of any line in an extended error description (inclusive). const MAX_DESCRIPTION_WIDTH: usize = 80; diff --git a/src/libsyntax/errors/snippet/test.rs b/src/libsyntax/errors/snippet/test.rs deleted file mode 100644 index 79e40a091659e..0000000000000 --- a/src/libsyntax/errors/snippet/test.rs +++ /dev/null @@ -1,597 +0,0 @@ -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// Code for testing annotated snippets. - -#![cfg(test)] - -use codemap::{BytePos, CodeMap, FileMap, NO_EXPANSION, Span}; -use std::rc::Rc; -use super::{RenderedLine, SnippetData}; - -/// Returns the span corresponding to the `n`th occurrence of -/// `substring` in `source_text`. -trait CodeMapExtension { - fn span_substr(&self, - file: &Rc, - source_text: &str, - substring: &str, - n: usize) - -> Span; -} - -impl CodeMapExtension for CodeMap { - fn span_substr(&self, - file: &Rc, - source_text: &str, - substring: &str, - n: usize) - -> Span - { - println!("span_substr(file={:?}/{:?}, substring={:?}, n={})", - file.name, file.start_pos, substring, n); - let mut i = 0; - let mut hi = 0; - loop { - let offset = source_text[hi..].find(substring).unwrap_or_else(|| { - panic!("source_text `{}` does not have {} occurrences of `{}`, only {}", - source_text, n, substring, i); - }); - let lo = hi + offset; - hi = lo + substring.len(); - if i == n { - let span = Span { - lo: BytePos(lo as u32 + file.start_pos.0), - hi: BytePos(hi as u32 + file.start_pos.0), - expn_id: NO_EXPANSION, - }; - assert_eq!(&self.span_to_snippet(span).unwrap()[..], - substring); - return span; - } - i += 1; - } - } -} - -fn splice(start: Span, end: Span) -> Span { - Span { - lo: start.lo, - hi: end.hi, - expn_id: NO_EXPANSION, - } -} - -fn make_string(lines: &[RenderedLine]) -> String { - lines.iter() - .flat_map(|rl| { - rl.text.iter() - .map(|s| &s.text[..]) - .chain(Some("\n")) - }) - .collect() -} - -#[test] -fn tab() { - let file_text = " -fn foo() { -\tbar; -} -"; - - let cm = Rc::new(CodeMap::new()); - let foo = cm.new_filemap_and_lines("foo.rs", None, file_text); - let span_bar = cm.span_substr(&foo, file_text, "bar", 0); - - let mut snippet = SnippetData::new(cm, Some(span_bar)); - snippet.push(span_bar, true, None); - - let lines = snippet.render_lines(); - let text = make_string(&lines); - assert_eq!(&text[..], &" - --> foo.rs:3:2 - |> -3 |> \tbar; - |> \t^^^ -"[1..]); -} - -#[test] -fn one_line() { - let file_text = r#" -fn foo() { - vec.push(vec.pop().unwrap()); -} -"#; - - let cm = Rc::new(CodeMap::new()); - let foo = cm.new_filemap_and_lines("foo.rs", None, file_text); - let span_vec0 = cm.span_substr(&foo, file_text, "vec", 0); - let span_vec1 = cm.span_substr(&foo, file_text, "vec", 1); - let span_semi = cm.span_substr(&foo, file_text, ";", 0); - - let mut snippet = SnippetData::new(cm, None); - snippet.push(span_vec0, false, Some(format!("previous borrow of `vec` occurs here"))); - snippet.push(span_vec1, false, Some(format!("error occurs here"))); - snippet.push(span_semi, false, Some(format!("previous borrow ends here"))); - - let lines = snippet.render_lines(); - println!("{:#?}", lines); - - let text: String = make_string(&lines); - - println!("text=\n{}", text); - assert_eq!(&text[..], &r#" - ::: foo.rs - |> -3 |> vec.push(vec.pop().unwrap()); - |> --- --- - previous borrow ends here - |> | | - |> | error occurs here - |> previous borrow of `vec` occurs here -"#[1..]); -} - -#[test] -fn two_files() { - let file_text_foo = r#" -fn foo() { - vec.push(vec.pop().unwrap()); -} -"#; - - let file_text_bar = r#" -fn bar() { - // these blank links here - // serve to ensure that the line numbers - // from bar.rs - // require more digits - - - - - - - - - - - vec.push(); - - // this line will get elided - - vec.pop().unwrap()); -} -"#; - - let cm = Rc::new(CodeMap::new()); - let foo_map = cm.new_filemap_and_lines("foo.rs", None, file_text_foo); - let span_foo_vec0 = cm.span_substr(&foo_map, file_text_foo, "vec", 0); - let span_foo_vec1 = cm.span_substr(&foo_map, file_text_foo, "vec", 1); - let span_foo_semi = cm.span_substr(&foo_map, file_text_foo, ";", 0); - - let bar_map = cm.new_filemap_and_lines("bar.rs", None, file_text_bar); - let span_bar_vec0 = cm.span_substr(&bar_map, file_text_bar, "vec", 0); - let span_bar_vec1 = cm.span_substr(&bar_map, file_text_bar, "vec", 1); - let span_bar_semi = cm.span_substr(&bar_map, file_text_bar, ";", 0); - - let mut snippet = SnippetData::new(cm, Some(span_foo_vec1)); - snippet.push(span_foo_vec0, false, Some(format!("a"))); - snippet.push(span_foo_vec1, true, Some(format!("b"))); - snippet.push(span_foo_semi, false, Some(format!("c"))); - snippet.push(span_bar_vec0, false, Some(format!("d"))); - snippet.push(span_bar_vec1, false, Some(format!("e"))); - snippet.push(span_bar_semi, false, Some(format!("f"))); - - let lines = snippet.render_lines(); - println!("{:#?}", lines); - - let text: String = make_string(&lines); - - println!("text=\n{}", text); - - // Note that the `|>` remain aligned across both files: - assert_eq!(&text[..], &r#" - --> foo.rs:3:14 - |> -3 |> vec.push(vec.pop().unwrap()); - |> --- ^^^ - c - |> | | - |> | b - |> a - ::: bar.rs - |> -17 |> vec.push(); - |> --- - f - |> | - |> d -... -21 |> vec.pop().unwrap()); - |> --- e -"#[1..]); -} - -#[test] -fn multi_line() { - let file_text = r#" -fn foo() { - let name = find_id(&data, 22).unwrap(); - - // Add one more item we forgot to the vector. Silly us. - data.push(Data { name: format!("Hera"), id: 66 }); - - // Print everything out. - println!("Name: {:?}", name); - println!("Data: {:?}", data); -} -"#; - - let cm = Rc::new(CodeMap::new()); - let foo = cm.new_filemap_and_lines("foo.rs", None, file_text); - let span_data0 = cm.span_substr(&foo, file_text, "data", 0); - let span_data1 = cm.span_substr(&foo, file_text, "data", 1); - let span_rbrace = cm.span_substr(&foo, file_text, "}", 3); - - let mut snippet = SnippetData::new(cm, None); - snippet.push(span_data0, false, Some(format!("immutable borrow begins here"))); - snippet.push(span_data1, false, Some(format!("mutable borrow occurs here"))); - snippet.push(span_rbrace, false, Some(format!("immutable borrow ends here"))); - - let lines = snippet.render_lines(); - println!("{:#?}", lines); - - let text: String = make_string(&lines); - - println!("text=\n{}", text); - assert_eq!(&text[..], &r#" - ::: foo.rs - |> -3 |> let name = find_id(&data, 22).unwrap(); - |> ---- immutable borrow begins here -... -6 |> data.push(Data { name: format!("Hera"), id: 66 }); - |> ---- mutable borrow occurs here -... -11 |> } - |> - immutable borrow ends here -"#[1..]); -} - -#[test] -fn overlapping() { - let file_text = r#" -fn foo() { - vec.push(vec.pop().unwrap()); -} -"#; - - let cm = Rc::new(CodeMap::new()); - let foo = cm.new_filemap_and_lines("foo.rs", None, file_text); - let span0 = cm.span_substr(&foo, file_text, "vec.push", 0); - let span1 = cm.span_substr(&foo, file_text, "vec", 0); - let span2 = cm.span_substr(&foo, file_text, "ec.push", 0); - let span3 = cm.span_substr(&foo, file_text, "unwrap", 0); - - let mut snippet = SnippetData::new(cm, None); - snippet.push(span0, false, Some(format!("A"))); - snippet.push(span1, false, Some(format!("B"))); - snippet.push(span2, false, Some(format!("C"))); - snippet.push(span3, false, Some(format!("D"))); - - let lines = snippet.render_lines(); - println!("{:#?}", lines); - let text: String = make_string(&lines); - - println!("text=r#\"\n{}\".trim_left()", text); - assert_eq!(&text[..], &r#" - ::: foo.rs - |> -3 |> vec.push(vec.pop().unwrap()); - |> -------- ------ D - |> || - |> |C - |> A - |> B -"#[1..]); -} - -#[test] -fn one_line_out_of_order() { - let file_text = r#" -fn foo() { - vec.push(vec.pop().unwrap()); -} -"#; - - let cm = Rc::new(CodeMap::new()); - let foo = cm.new_filemap_and_lines("foo.rs", None, file_text); - let span_vec0 = cm.span_substr(&foo, file_text, "vec", 0); - let span_vec1 = cm.span_substr(&foo, file_text, "vec", 1); - let span_semi = cm.span_substr(&foo, file_text, ";", 0); - - // intentionally don't push the snippets left to right - let mut snippet = SnippetData::new(cm, None); - snippet.push(span_vec1, false, Some(format!("error occurs here"))); - snippet.push(span_vec0, false, Some(format!("previous borrow of `vec` occurs here"))); - snippet.push(span_semi, false, Some(format!("previous borrow ends here"))); - - let lines = snippet.render_lines(); - println!("{:#?}", lines); - let text: String = make_string(&lines); - - println!("text=r#\"\n{}\".trim_left()", text); - assert_eq!(&text[..], &r#" - ::: foo.rs - |> -3 |> vec.push(vec.pop().unwrap()); - |> --- --- - previous borrow ends here - |> | | - |> | error occurs here - |> previous borrow of `vec` occurs here -"#[1..]); -} - -#[test] -fn elide_unnecessary_lines() { - let file_text = r#" -fn foo() { - let mut vec = vec![0, 1, 2]; - let mut vec2 = vec; - vec2.push(3); - vec2.push(4); - vec2.push(5); - vec2.push(6); - vec.push(7); -} -"#; - - let cm = Rc::new(CodeMap::new()); - let foo = cm.new_filemap_and_lines("foo.rs", None, file_text); - let span_vec0 = cm.span_substr(&foo, file_text, "vec", 3); - let span_vec1 = cm.span_substr(&foo, file_text, "vec", 8); - - let mut snippet = SnippetData::new(cm, None); - snippet.push(span_vec0, false, Some(format!("`vec` moved here because it \ - has type `collections::vec::Vec`"))); - snippet.push(span_vec1, false, Some(format!("use of moved value: `vec`"))); - - let lines = snippet.render_lines(); - println!("{:#?}", lines); - let text: String = make_string(&lines); - println!("text=r#\"\n{}\".trim_left()", text); - assert_eq!(&text[..], &r#" - ::: foo.rs - |> -4 |> let mut vec2 = vec; - |> --- `vec` moved here because it has type `collections::vec::Vec` -... -9 |> vec.push(7); - |> --- use of moved value: `vec` -"#[1..]); -} - -#[test] -fn spans_without_labels() { - let file_text = r#" -fn foo() { - let mut vec = vec![0, 1, 2]; - let mut vec2 = vec; - vec2.push(3); - vec2.push(4); - vec2.push(5); - vec2.push(6); - vec.push(7); -} -"#; - - let cm = Rc::new(CodeMap::new()); - let foo = cm.new_filemap_and_lines("foo.rs", None, file_text); - - let mut snippet = SnippetData::new(cm.clone(), None); - for i in 0..4 { - let span_veci = cm.span_substr(&foo, file_text, "vec", i); - snippet.push(span_veci, false, None); - } - - let lines = snippet.render_lines(); - let text: String = make_string(&lines); - println!("text=&r#\"\n{}\n\"#[1..]", text); - assert_eq!(text, &r#" - ::: foo.rs - |> -3 |> let mut vec = vec![0, 1, 2]; - |> --- --- -4 |> let mut vec2 = vec; - |> --- --- -"#[1..]); -} - -#[test] -fn span_long_selection() { - let file_text = r#" -impl SomeTrait for () { - fn foo(x: u32) { - // impl 1 - // impl 2 - // impl 3 - } -} -"#; - - let cm = Rc::new(CodeMap::new()); - let foo = cm.new_filemap_and_lines("foo.rs", None, file_text); - - let mut snippet = SnippetData::new(cm.clone(), None); - let fn_span = cm.span_substr(&foo, file_text, "fn", 0); - let rbrace_span = cm.span_substr(&foo, file_text, "}", 0); - snippet.push(splice(fn_span, rbrace_span), false, None); - let lines = snippet.render_lines(); - let text: String = make_string(&lines); - println!("r#\"\n{}\"", text); - assert_eq!(text, &r#" - ::: foo.rs - |> -3 |> fn foo(x: u32) { - |> - -"#[1..]); -} - -#[test] -fn span_overlap_label() { - // Test that we don't put `x_span` to the right of its highlight, - // since there is another highlight that overlaps it. - - let file_text = r#" - fn foo(x: u32) { - } -} -"#; - - let cm = Rc::new(CodeMap::new()); - let foo = cm.new_filemap_and_lines("foo.rs", None, file_text); - - let mut snippet = SnippetData::new(cm.clone(), None); - let fn_span = cm.span_substr(&foo, file_text, "fn foo(x: u32)", 0); - let x_span = cm.span_substr(&foo, file_text, "x", 0); - snippet.push(fn_span, false, Some(format!("fn_span"))); - snippet.push(x_span, false, Some(format!("x_span"))); - let lines = snippet.render_lines(); - let text: String = make_string(&lines); - println!("r#\"\n{}\"", text); - assert_eq!(text, &r#" - ::: foo.rs - |> -2 |> fn foo(x: u32) { - |> -------------- - |> | | - |> | x_span - |> fn_span -"#[1..]); -} - -#[test] -fn span_overlap_label2() { - // Test that we don't put `x_span` to the right of its highlight, - // since there is another highlight that overlaps it. In this - // case, the overlap is only at the beginning, but it's still - // better to show the beginning more clearly. - - let file_text = r#" - fn foo(x: u32) { - } -} -"#; - - let cm = Rc::new(CodeMap::new()); - let foo = cm.new_filemap_and_lines("foo.rs", None, file_text); - - let mut snippet = SnippetData::new(cm.clone(), None); - let fn_span = cm.span_substr(&foo, file_text, "fn foo(x", 0); - let x_span = cm.span_substr(&foo, file_text, "x: u32)", 0); - snippet.push(fn_span, false, Some(format!("fn_span"))); - snippet.push(x_span, false, Some(format!("x_span"))); - let lines = snippet.render_lines(); - let text: String = make_string(&lines); - println!("r#\"\n{}\"", text); - assert_eq!(text, &r#" - ::: foo.rs - |> -2 |> fn foo(x: u32) { - |> -------------- - |> | | - |> | x_span - |> fn_span -"#[1..]); -} - -#[test] -fn span_overlap_label3() { - // Test that we don't put `x_span` to the right of its highlight, - // since there is another highlight that overlaps it. In this - // case, the overlap is only at the beginning, but it's still - // better to show the beginning more clearly. - - let file_text = r#" - fn foo() { - let closure = || { - inner - }; - } -} -"#; - - let cm = Rc::new(CodeMap::new()); - let foo = cm.new_filemap_and_lines("foo.rs", None, file_text); - - let mut snippet = SnippetData::new(cm.clone(), None); - - let closure_span = { - let closure_start_span = cm.span_substr(&foo, file_text, "||", 0); - let closure_end_span = cm.span_substr(&foo, file_text, "}", 0); - splice(closure_start_span, closure_end_span) - }; - - let inner_span = cm.span_substr(&foo, file_text, "inner", 0); - - snippet.push(closure_span, false, Some(format!("foo"))); - snippet.push(inner_span, false, Some(format!("bar"))); - - let lines = snippet.render_lines(); - let text: String = make_string(&lines); - println!("r#\"\n{}\"", text); - assert_eq!(text, &r#" - ::: foo.rs - |> -3 |> let closure = || { - |> - foo -4 |> inner - |> ----- bar -"#[1..]); -} - -#[test] -fn span_empty() { - // In one of the unit tests, we found that the parser sometimes - // gives empty spans, and in particular it supplied an EOF span - // like this one, which points at the very end. We want to - // fallback gracefully in this case. - - let file_text = r#" -fn main() { - struct Foo; - - impl !Sync for Foo {} - - unsafe impl Send for &'static Foo { - // error: cross-crate traits with a default impl, like `core::marker::Send`, - // can only be implemented for a struct/enum type, not - // `&'static Foo` -}"#; - - - let cm = Rc::new(CodeMap::new()); - let foo = cm.new_filemap_and_lines("foo.rs", None, file_text); - - let mut rbrace_span = cm.span_substr(&foo, file_text, "}", 1); - rbrace_span.lo = rbrace_span.hi; - - let mut snippet = SnippetData::new(cm.clone(), Some(rbrace_span)); - snippet.push(rbrace_span, false, None); - let lines = snippet.render_lines(); - let text: String = make_string(&lines); - println!("r#\"\n{}\"", text); - assert_eq!(text, &r#" - --> foo.rs:11:2 - |> -11 |> } - |> - -"#[1..]); -} diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index f2a005573d561..757b039fcac8a 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -13,8 +13,8 @@ pub use self::SyntaxExtension::*; use ast; use ast::{Name, PatKind}; use attr::HasAttrs; -use codemap; -use codemap::{CodeMap, Span, ExpnId, ExpnInfo, NO_EXPANSION}; +use codemap::{self, CodeMap, ExpnInfo}; +use syntax_pos::{Span, ExpnId, NO_EXPANSION}; use errors::DiagnosticBuilder; use ext; use ext::expand; @@ -32,6 +32,7 @@ use fold::Folder; use std::collections::{HashMap, HashSet}; use std::rc::Rc; use std::default::Default; +use tokenstream; #[derive(Debug,Clone)] @@ -168,20 +169,22 @@ pub trait TTMacroExpander { fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, - token_tree: &[ast::TokenTree]) + token_tree: &[tokenstream::TokenTree]) -> Box; } pub type MacroExpanderFn = - for<'cx> fn(&'cx mut ExtCtxt, Span, &[ast::TokenTree]) -> Box; + for<'cx> fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree]) + -> Box; impl TTMacroExpander for F - where F : for<'cx> Fn(&'cx mut ExtCtxt, Span, &[ast::TokenTree]) -> Box + where F : for<'cx> Fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree]) + -> Box { fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, - token_tree: &[ast::TokenTree]) + token_tree: &[tokenstream::TokenTree]) -> Box { (*self)(ecx, span, token_tree) } @@ -192,22 +195,23 @@ pub trait IdentMacroExpander { cx: &'cx mut ExtCtxt, sp: Span, ident: ast::Ident, - token_tree: Vec ) + token_tree: Vec ) -> Box; } pub type IdentMacroExpanderFn = - for<'cx> fn(&'cx mut ExtCtxt, Span, ast::Ident, Vec) -> Box; + for<'cx> fn(&'cx mut ExtCtxt, Span, ast::Ident, Vec) + -> Box; impl IdentMacroExpander for F where F : for<'cx> Fn(&'cx mut ExtCtxt, Span, ast::Ident, - Vec) -> Box + Vec) -> Box { fn expand<'cx>(&self, cx: &'cx mut ExtCtxt, sp: Span, ident: ast::Ident, - token_tree: Vec ) + token_tree: Vec ) -> Box { (*self)(cx, sp, ident, token_tree) @@ -217,10 +221,11 @@ impl IdentMacroExpander for F // Use a macro because forwarding to a simple function has type system issues macro_rules! make_stmts_default { ($me:expr) => { - $me.make_expr().map(|e| { - SmallVector::one(codemap::respan( - e.span, ast::StmtKind::Expr(e, ast::DUMMY_NODE_ID))) - }) + $me.make_expr().map(|e| SmallVector::one(ast::Stmt { + id: ast::DUMMY_NODE_ID, + span: e.span, + node: ast::StmtKind::Expr(e), + })) } } @@ -241,6 +246,11 @@ pub trait MacResult { None } + /// Create zero or more trait items. + fn make_trait_items(self: Box) -> Option> { + None + } + /// Create a pattern. fn make_pat(self: Box) -> Option> { None @@ -288,6 +298,7 @@ make_MacEager! { pat: P, items: SmallVector>, impl_items: SmallVector, + trait_items: SmallVector, stmts: SmallVector, ty: P, } @@ -305,6 +316,10 @@ impl MacResult for MacEager { self.impl_items } + fn make_trait_items(self: Box) -> Option> { + self.trait_items + } + fn make_stmts(self: Box) -> Option> { match self.stmts.as_ref().map_or(0, |s| s.len()) { 0 => make_stmts_default!(self), @@ -365,7 +380,7 @@ impl DummyResult { id: ast::DUMMY_NODE_ID, node: ast::ExprKind::Lit(P(codemap::respan(sp, ast::LitKind::Bool(false)))), span: sp, - attrs: None, + attrs: ast::ThinVec::new(), }) } @@ -413,11 +428,20 @@ impl MacResult for DummyResult { } } + fn make_trait_items(self: Box) -> Option> { + if self.expr_only { + None + } else { + Some(SmallVector::zero()) + } + } + fn make_stmts(self: Box) -> Option> { - Some(SmallVector::one( - codemap::respan(self.span, - ast::StmtKind::Expr(DummyResult::raw_expr(self.span), - ast::DUMMY_NODE_ID)))) + Some(SmallVector::one(ast::Stmt { + id: ast::DUMMY_NODE_ID, + node: ast::StmtKind::Expr(DummyResult::raw_expr(self.span)), + span: self.span, + })) } } @@ -612,7 +636,7 @@ impl<'a> ExtCtxt<'a> { expand::MacroExpander::new(self) } - pub fn new_parser_from_tts(&self, tts: &[ast::TokenTree]) + pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree]) -> parser::Parser<'a> { parse::tts_to_parser(self.parse_sess, tts.to_vec(), self.cfg()) } @@ -811,7 +835,7 @@ pub fn expr_to_string(cx: &mut ExtCtxt, expr: P, err_msg: &str) /// done as rarely as possible). pub fn check_zero_tts(cx: &ExtCtxt, sp: Span, - tts: &[ast::TokenTree], + tts: &[tokenstream::TokenTree], name: &str) { if !tts.is_empty() { cx.span_err(sp, &format!("{} takes no arguments", name)); @@ -822,7 +846,7 @@ pub fn check_zero_tts(cx: &ExtCtxt, /// is not a string literal, emit an error and return None. pub fn get_single_str_from_tts(cx: &mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree], + tts: &[tokenstream::TokenTree], name: &str) -> Option { let mut p = cx.new_parser_from_tts(tts); @@ -843,7 +867,7 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt, /// parsing error, emit a non-fatal error and return None. pub fn get_exprs_from_tts(cx: &mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree]) -> Option>> { + tts: &[tokenstream::TokenTree]) -> Option>> { let mut p = cx.new_parser_from_tts(tts); let mut es = Vec::new(); while p.token != token::Eof { diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 3a1cdae9bfbd0..435241f426ec6 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -11,7 +11,8 @@ use abi::Abi; use ast::{self, Ident, Generics, Expr, BlockCheckMode, UnOp, PatKind}; use attr; -use codemap::{Span, respan, Spanned, DUMMY_SP, Pos}; +use syntax_pos::{Span, DUMMY_SP, Pos}; +use codemap::{respan, Spanned}; use ext::base::ExtCtxt; use parse::token::{self, keywords, InternedString}; use ptr::P; @@ -87,6 +88,7 @@ pub trait AstBuilder { // statements fn stmt_expr(&self, expr: P) -> ast::Stmt; + fn stmt_semi(&self, expr: P) -> ast::Stmt; fn stmt_let(&self, sp: Span, mutbl: bool, ident: ast::Ident, ex: P) -> ast::Stmt; fn stmt_let_typed(&self, sp: Span, @@ -98,12 +100,8 @@ pub trait AstBuilder { fn stmt_item(&self, sp: Span, item: P) -> ast::Stmt; // blocks - fn block(&self, span: Span, stmts: Vec, - expr: Option>) -> P; + fn block(&self, span: Span, stmts: Vec) -> P; fn block_expr(&self, expr: P) -> P; - fn block_all(&self, span: Span, - stmts: Vec, - expr: Option>) -> P; // expressions fn expr(&self, span: Span, node: ast::ExprKind) -> P; @@ -508,7 +506,19 @@ impl<'a> AstBuilder for ExtCtxt<'a> { } fn stmt_expr(&self, expr: P) -> ast::Stmt { - respan(expr.span, ast::StmtKind::Semi(expr, ast::DUMMY_NODE_ID)) + ast::Stmt { + id: ast::DUMMY_NODE_ID, + span: expr.span, + node: ast::StmtKind::Expr(expr), + } + } + + fn stmt_semi(&self, expr: P) -> ast::Stmt { + ast::Stmt { + id: ast::DUMMY_NODE_ID, + span: expr.span, + node: ast::StmtKind::Semi(expr), + } } fn stmt_let(&self, sp: Span, mutbl: bool, ident: ast::Ident, @@ -525,10 +535,13 @@ impl<'a> AstBuilder for ExtCtxt<'a> { init: Some(ex), id: ast::DUMMY_NODE_ID, span: sp, - attrs: None, + attrs: ast::ThinVec::new(), }); - let decl = respan(sp, ast::DeclKind::Local(local)); - respan(sp, ast::StmtKind::Decl(P(decl), ast::DUMMY_NODE_ID)) + ast::Stmt { + id: ast::DUMMY_NODE_ID, + node: ast::StmtKind::Local(local), + span: sp, + } } fn stmt_let_typed(&self, @@ -550,36 +563,37 @@ impl<'a> AstBuilder for ExtCtxt<'a> { init: Some(ex), id: ast::DUMMY_NODE_ID, span: sp, - attrs: None, + attrs: ast::ThinVec::new(), }); - let decl = respan(sp, ast::DeclKind::Local(local)); - P(respan(sp, ast::StmtKind::Decl(P(decl), ast::DUMMY_NODE_ID))) - } - - fn block(&self, span: Span, stmts: Vec, - expr: Option>) -> P { - self.block_all(span, stmts, expr) + P(ast::Stmt { + id: ast::DUMMY_NODE_ID, + node: ast::StmtKind::Local(local), + span: sp, + }) } fn stmt_item(&self, sp: Span, item: P) -> ast::Stmt { - let decl = respan(sp, ast::DeclKind::Item(item)); - respan(sp, ast::StmtKind::Decl(P(decl), ast::DUMMY_NODE_ID)) + ast::Stmt { + id: ast::DUMMY_NODE_ID, + node: ast::StmtKind::Item(item), + span: sp, + } } fn block_expr(&self, expr: P) -> P { - self.block_all(expr.span, Vec::new(), Some(expr)) - } - fn block_all(&self, - span: Span, - stmts: Vec, - expr: Option>) -> P { - P(ast::Block { - stmts: stmts, - expr: expr, - id: ast::DUMMY_NODE_ID, - rules: BlockCheckMode::Default, - span: span, - }) + self.block(expr.span, vec![ast::Stmt { + id: ast::DUMMY_NODE_ID, + span: expr.span, + node: ast::StmtKind::Expr(expr), + }]) + } + fn block(&self, span: Span, stmts: Vec) -> P { + P(ast::Block { + stmts: stmts, + id: ast::DUMMY_NODE_ID, + rules: BlockCheckMode::Default, + span: span, + }) } fn expr(&self, span: Span, node: ast::ExprKind) -> P { @@ -587,7 +601,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { id: ast::DUMMY_NODE_ID, node: node, span: span, - attrs: None, + attrs: ast::ThinVec::new(), }) } @@ -830,7 +844,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { } fn pat_enum(&self, span: Span, path: ast::Path, subpats: Vec>) -> P { let pat = if subpats.is_empty() { - PatKind::Path(path) + PatKind::Path(None, path) } else { PatKind::TupleStruct(path, subpats, None) }; @@ -948,14 +962,14 @@ impl<'a> AstBuilder for ExtCtxt<'a> { ids: Vec, stmts: Vec) -> P { - self.lambda(span, ids, self.block(span, stmts, None)) + self.lambda(span, ids, self.block(span, stmts)) } fn lambda_stmts_0(&self, span: Span, stmts: Vec) -> P { - self.lambda0(span, self.block(span, stmts, None)) + self.lambda0(span, self.block(span, stmts)) } fn lambda_stmts_1(&self, span: Span, stmts: Vec, ident: ast::Ident) -> P { - self.lambda1(span, self.block(span, stmts, None), ident) + self.lambda1(span, self.block(span, stmts), ident) } fn arg(&self, span: Span, ident: ast::Ident, ty: P) -> ast::Arg { diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 5beb49372077e..3036a88430a2b 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -8,18 +8,16 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast::{Block, Crate, DeclKind, PatKind}; +use ast::{Block, Crate, PatKind}; use ast::{Local, Ident, Mac_, Name, SpannedIdent}; use ast::{MacStmtStyle, Mrk, Stmt, StmtKind, ItemKind}; -use ast::TokenTree; use ast; use attr::HasAttrs; use ext::mtwt; -use ext::build::AstBuilder; use attr; -use attr::{AttrMetaMethods, WithAttrs, ThinAttributesExt}; -use codemap; -use codemap::{Span, Spanned, ExpnInfo, ExpnId, NameAndSpan, MacroBang, MacroAttribute}; +use attr::AttrMetaMethods; +use codemap::{Spanned, ExpnInfo, NameAndSpan, MacroBang, MacroAttribute}; +use syntax_pos::{self, Span, ExpnId}; use config::StripUnconfigured; use ext::base::*; use feature_gate::{self, Features}; @@ -28,6 +26,7 @@ use fold::*; use util::move_map::MoveMap; use parse::token::{fresh_mark, fresh_name, intern, keywords}; use ptr::P; +use tokenstream::TokenTree; use util::small_vector::SmallVector; use visit; use visit::Visitor; @@ -42,7 +41,7 @@ trait MacroGenerable: Sized { // Fold this node or list of nodes using the given folder. fn fold_with(self, folder: &mut F) -> Self; - fn visit_with<'v, V: Visitor<'v>>(&'v self, visitor: &mut V); + fn visit_with(&self, visitor: &mut V); // Return a placeholder expansion to allow compilation to continue after an erroring expansion. fn dummy(span: Span) -> Self; @@ -63,7 +62,7 @@ macro_rules! impl_macro_generable { $( folder.$fold(self) )* $( self.into_iter().flat_map(|item| folder. $fold_elt (item)).collect() )* } - fn visit_with<'v, V: Visitor<'v>>(&'v self, visitor: &mut V) { + fn visit_with(&self, visitor: &mut V) { $( visitor.$visit(self) )* $( for item in self.as_slice() { visitor. $visit_elt (item) } )* } @@ -81,8 +80,11 @@ impl_macro_generable! { "statement", .make_stmts, lift .fold_stmt, lift .visit_stmt, |_span| SmallVector::zero(); SmallVector>: "item", .make_items, lift .fold_item, lift .visit_item, |_span| SmallVector::zero(); + SmallVector: + "trait item", .make_trait_items, lift .fold_trait_item, lift .visit_trait_item, + |_span| SmallVector::zero(); SmallVector: - "impl item", .make_impl_items, lift .fold_impl_item, lift .visit_impl_item, + "impl item", .make_impl_items, lift .fold_impl_item, lift .visit_impl_item, |_span| SmallVector::zero(); } @@ -95,24 +97,23 @@ impl MacroGenerable for Option> { fn fold_with(self, folder: &mut F) -> Self { self.and_then(|expr| folder.fold_opt_expr(expr)) } - fn visit_with<'v, V: Visitor<'v>>(&'v self, visitor: &mut V) { + fn visit_with(&self, visitor: &mut V) { self.as_ref().map(|expr| visitor.visit_expr(expr)); } } -pub fn expand_expr(expr: ast::Expr, fld: &mut MacroExpander) -> P { +pub fn expand_expr(mut expr: ast::Expr, fld: &mut MacroExpander) -> P { match expr.node { // expr_mac should really be expr_ext or something; it's the // entry-point for all syntax extensions. ast::ExprKind::Mac(mac) => { - expand_mac_invoc(mac, None, expr.attrs.into_attr_vec(), expr.span, fld) + return expand_mac_invoc(mac, None, expr.attrs.into(), expr.span, fld); } ast::ExprKind::While(cond, body, opt_ident) => { let cond = fld.fold_expr(cond); let (body, opt_ident) = expand_loop_block(body, opt_ident, fld); - fld.cx.expr(expr.span, ast::ExprKind::While(cond, body, opt_ident)) - .with_attrs(fold_thin_attrs(expr.attrs, fld)) + expr.node = ast::ExprKind::While(cond, body, opt_ident); } ast::ExprKind::WhileLet(pat, cond, body, opt_ident) => { @@ -129,14 +130,12 @@ pub fn expand_expr(expr: ast::Expr, fld: &mut MacroExpander) -> P { }); assert!(rewritten_pats.len() == 1); - let wl = ast::ExprKind::WhileLet(rewritten_pats.remove(0), cond, body, opt_ident); - fld.cx.expr(expr.span, wl).with_attrs(fold_thin_attrs(expr.attrs, fld)) + expr.node = ast::ExprKind::WhileLet(rewritten_pats.remove(0), cond, body, opt_ident); } ast::ExprKind::Loop(loop_block, opt_ident) => { let (loop_block, opt_ident) = expand_loop_block(loop_block, opt_ident, fld); - fld.cx.expr(expr.span, ast::ExprKind::Loop(loop_block, opt_ident)) - .with_attrs(fold_thin_attrs(expr.attrs, fld)) + expr.node = ast::ExprKind::Loop(loop_block, opt_ident); } ast::ExprKind::ForLoop(pat, head, body, opt_ident) => { @@ -153,8 +152,7 @@ pub fn expand_expr(expr: ast::Expr, fld: &mut MacroExpander) -> P { assert!(rewritten_pats.len() == 1); let head = fld.fold_expr(head); - let fl = ast::ExprKind::ForLoop(rewritten_pats.remove(0), head, body, opt_ident); - fld.cx.expr(expr.span, fl).with_attrs(fold_thin_attrs(expr.attrs, fld)) + expr.node = ast::ExprKind::ForLoop(rewritten_pats.remove(0), head, body, opt_ident); } ast::ExprKind::IfLet(pat, sub_expr, body, else_opt) => { @@ -172,25 +170,21 @@ pub fn expand_expr(expr: ast::Expr, fld: &mut MacroExpander) -> P { let else_opt = else_opt.map(|else_opt| fld.fold_expr(else_opt)); let sub_expr = fld.fold_expr(sub_expr); - let il = ast::ExprKind::IfLet(rewritten_pats.remove(0), sub_expr, body, else_opt); - fld.cx.expr(expr.span, il).with_attrs(fold_thin_attrs(expr.attrs, fld)) + expr.node = ast::ExprKind::IfLet(rewritten_pats.remove(0), sub_expr, body, else_opt); } ast::ExprKind::Closure(capture_clause, fn_decl, block, fn_decl_span) => { let (rewritten_fn_decl, rewritten_block) = expand_and_rename_fn_decl_and_block(fn_decl, block, fld); - let new_node = ast::ExprKind::Closure(capture_clause, - rewritten_fn_decl, - rewritten_block, - fn_decl_span); - P(ast::Expr{ id: expr.id, - node: new_node, - span: expr.span, - attrs: fold_thin_attrs(expr.attrs, fld) }) + expr.node = ast::ExprKind::Closure(capture_clause, + rewritten_fn_decl, + rewritten_block, + fn_decl_span); } - _ => P(noop_fold_expr(expr, fld)), - } + _ => expr = noop_fold_expr(expr, fld), + }; + P(expr) } /// Expand a macro invocation. Returns the result of expansion. @@ -249,7 +243,7 @@ fn expand_mac_invoc(mac: ast::Mac, ident: Option, attrs: Vec(mac: ast::Mac, ident: Option, attrs: Vec SmallVector { }; let (mac, style, attrs) = match stmt.node { - StmtKind::Mac(mac, style, attrs) => (mac, style, attrs), + StmtKind::Mac(mac) => mac.unwrap(), _ => return expand_non_macro_stmt(stmt, fld) }; let mut fully_expanded: SmallVector = - expand_mac_invoc(mac.unwrap(), None, attrs.into_attr_vec(), stmt.span, fld); + expand_mac_invoc(mac, None, attrs.into(), stmt.span, fld); // If this is a macro invocation with a semicolon, then apply that // semicolon to the final statement produced by expansion. if style == MacStmtStyle::Semicolon { if let Some(stmt) = fully_expanded.pop() { - let new_stmt = Spanned { + fully_expanded.push(Stmt { + id: stmt.id, node: match stmt.node { - StmtKind::Expr(e, stmt_id) => StmtKind::Semi(e, stmt_id), + StmtKind::Expr(expr) => StmtKind::Semi(expr), _ => stmt.node /* might already have a semi */ }, - span: stmt.span - }; - fully_expanded.push(new_stmt); + span: stmt.span, + }); } } @@ -472,73 +466,53 @@ fn expand_stmt(stmt: Stmt, fld: &mut MacroExpander) -> SmallVector { // expand a non-macro stmt. this is essentially the fallthrough for // expand_stmt, above. -fn expand_non_macro_stmt(Spanned {node, span: stmt_span}: Stmt, fld: &mut MacroExpander) +fn expand_non_macro_stmt(stmt: Stmt, fld: &mut MacroExpander) -> SmallVector { // is it a let? - match node { - StmtKind::Decl(decl, node_id) => decl.and_then(|Spanned {node: decl, span}| match decl { - DeclKind::Local(local) => { - // take it apart: - let rewritten_local = local.map(|Local {id, pat, ty, init, span, attrs}| { - // expand the ty since TyKind::FixedLengthVec contains an Expr - // and thus may have a macro use - let expanded_ty = ty.map(|t| fld.fold_ty(t)); - // expand the pat (it might contain macro uses): - let expanded_pat = fld.fold_pat(pat); - // find the PatIdents in the pattern: - // oh dear heaven... this is going to include the enum - // names, as well... but that should be okay, as long as - // the new names are gensyms for the old ones. - // generate fresh names, push them to a new pending list - let idents = pattern_bindings(&expanded_pat); - let mut new_pending_renames = - idents.iter().map(|ident| (*ident, fresh_name(*ident))).collect(); - // rewrite the pattern using the new names (the old - // ones have already been applied): - let rewritten_pat = { - // nested binding to allow borrow to expire: - let mut rename_fld = IdentRenamer{renames: &mut new_pending_renames}; - rename_fld.fold_pat(expanded_pat) - }; - // add them to the existing pending renames: - fld.cx.syntax_env.info().pending_renames - .extend(new_pending_renames); - Local { - id: id, - ty: expanded_ty, - pat: rewritten_pat, - // also, don't forget to expand the init: - init: init.map(|e| fld.fold_expr(e)), - span: span, - attrs: fold::fold_thin_attrs(attrs, fld), - } - }); - SmallVector::one(Spanned { - node: StmtKind::Decl(P(Spanned { - node: DeclKind::Local(rewritten_local), - span: span - }), - node_id), - span: stmt_span - }) - } - _ => { - noop_fold_stmt(Spanned { - node: StmtKind::Decl(P(Spanned { - node: decl, - span: span - }), - node_id), - span: stmt_span - }, fld) - } - }), - _ => { - noop_fold_stmt(Spanned { - node: node, - span: stmt_span - }, fld) + match stmt.node { + StmtKind::Local(local) => { + // take it apart: + let rewritten_local = local.map(|Local {id, pat, ty, init, span, attrs}| { + // expand the ty since TyKind::FixedLengthVec contains an Expr + // and thus may have a macro use + let expanded_ty = ty.map(|t| fld.fold_ty(t)); + // expand the pat (it might contain macro uses): + let expanded_pat = fld.fold_pat(pat); + // find the PatIdents in the pattern: + // oh dear heaven... this is going to include the enum + // names, as well... but that should be okay, as long as + // the new names are gensyms for the old ones. + // generate fresh names, push them to a new pending list + let idents = pattern_bindings(&expanded_pat); + let mut new_pending_renames = + idents.iter().map(|ident| (*ident, fresh_name(*ident))).collect(); + // rewrite the pattern using the new names (the old + // ones have already been applied): + let rewritten_pat = { + // nested binding to allow borrow to expire: + let mut rename_fld = IdentRenamer{renames: &mut new_pending_renames}; + rename_fld.fold_pat(expanded_pat) + }; + // add them to the existing pending renames: + fld.cx.syntax_env.info().pending_renames + .extend(new_pending_renames); + Local { + id: id, + ty: expanded_ty, + pat: rewritten_pat, + // also, don't forget to expand the init: + init: init.map(|e| fld.fold_expr(e)), + span: span, + attrs: fold::fold_thin_attrs(attrs, fld), + } + }); + SmallVector::one(Stmt { + id: stmt.id, + node: StmtKind::Local(rewritten_local), + span: stmt.span, + }) } + _ => noop_fold_stmt(stmt, fld), } } @@ -596,7 +570,7 @@ struct PatIdentFinder { ident_accumulator: Vec } -impl<'v> Visitor<'v> for PatIdentFinder { +impl Visitor for PatIdentFinder { fn visit_pat(&mut self, pattern: &ast::Pat) { match *pattern { ast::Pat { id: _, node: PatKind::Ident(_, ref path1, ref inner), span: _ } => { @@ -637,23 +611,14 @@ pub fn expand_block(blk: P, fld: &mut MacroExpander) -> P { // expand the elements of a block. pub fn expand_block_elts(b: P, fld: &mut MacroExpander) -> P { - b.map(|Block {id, stmts, expr, rules, span}| { + b.map(|Block {id, stmts, rules, span}| { let new_stmts = stmts.into_iter().flat_map(|x| { // perform pending renames and expand macros in the statement fld.fold_stmt(x).into_iter() }).collect(); - let new_expr = expr.map(|x| { - let expr = { - let pending_renames = &mut fld.cx.syntax_env.info().pending_renames; - let mut rename_fld = IdentRenamer{renames:pending_renames}; - rename_fld.fold_expr(x) - }; - fld.fold_expr(expr) - }); Block { id: fld.new_id(id), stmts: new_stmts, - expr: new_expr, rules: rules, span: span } @@ -754,25 +719,10 @@ fn expand_multi_modified(a: Annotatable, fld: &mut MacroExpander) -> SmallVector _ => noop_fold_item(it, fld), }.into_iter().map(|i| Annotatable::Item(i)).collect(), - Annotatable::TraitItem(it) => match it.node { - ast::TraitItemKind::Method(_, Some(_)) => { - let ti = it.unwrap(); - SmallVector::one(ast::TraitItem { - id: ti.id, - ident: ti.ident, - attrs: ti.attrs, - node: match ti.node { - ast::TraitItemKind::Method(sig, Some(body)) => { - let (sig, body) = expand_and_rename_method(sig, body, fld); - ast::TraitItemKind::Method(sig, Some(body)) - } - _ => unreachable!() - }, - span: ti.span, - }) - } - _ => fold::noop_fold_trait_item(it.unwrap(), fld) - }.into_iter().map(|ti| Annotatable::TraitItem(P(ti))).collect(), + Annotatable::TraitItem(it) => { + expand_trait_item(it.unwrap(), fld).into_iter(). + map(|it| Annotatable::TraitItem(P(it))).collect() + } Annotatable::ImplItem(ii) => { expand_impl_item(ii.unwrap(), fld).into_iter(). @@ -900,6 +850,31 @@ fn expand_impl_item(ii: ast::ImplItem, fld: &mut MacroExpander) } } +fn expand_trait_item(ti: ast::TraitItem, fld: &mut MacroExpander) + -> SmallVector { + match ti.node { + ast::TraitItemKind::Method(_, Some(_)) => { + SmallVector::one(ast::TraitItem { + id: ti.id, + ident: ti.ident, + attrs: ti.attrs, + node: match ti.node { + ast::TraitItemKind::Method(sig, Some(body)) => { + let (sig, body) = expand_and_rename_method(sig, body, fld); + ast::TraitItemKind::Method(sig, Some(body)) + } + _ => unreachable!() + }, + span: ti.span, + }) + } + ast::TraitItemKind::Macro(mac) => { + expand_mac_invoc(mac, None, ti.attrs, ti.span, fld) + } + _ => fold::noop_fold_trait_item(ti, fld) + } +} + /// Given a fn_decl and a block and a MacroExpander, expand the fn_decl, then use the /// PatIdents in its arguments to perform renaming in the FnDecl and /// the block, returning both the new FnDecl and the new Block. @@ -980,9 +955,9 @@ impl<'a, 'b> MacroExpander<'a, 'b> { at_crate_root: bool, } - impl<'a, 'b, 'v> Visitor<'v> for MacroLoadingVisitor<'a, 'b> { - fn visit_mac(&mut self, _: &'v ast::Mac) {} - fn visit_item(&mut self, item: &'v ast::Item) { + impl<'a, 'b> Visitor for MacroLoadingVisitor<'a, 'b> { + fn visit_mac(&mut self, _: &ast::Mac) {} + fn visit_item(&mut self, item: &ast::Item) { if let ast::ItemKind::ExternCrate(..) = item.node { // We need to error on `#[macro_use] extern crate` when it isn't at the // crate root, because `$crate` won't work properly. @@ -995,7 +970,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { self.at_crate_root = at_crate_root; } } - fn visit_block(&mut self, block: &'v ast::Block) { + fn visit_block(&mut self, block: &ast::Block) { let at_crate_root = ::std::mem::replace(&mut self.at_crate_root, false); visit::walk_block(self, block); self.at_crate_root = at_crate_root; @@ -1022,7 +997,7 @@ impl<'a, 'b> Folder for MacroExpander<'a, 'b> { fn fold_opt_expr(&mut self, expr: P) -> Option> { expr.and_then(|expr| match expr.node { ast::ExprKind::Mac(mac) => - expand_mac_invoc(mac, None, expr.attrs.into_attr_vec(), expr.span, self), + expand_mac_invoc(mac, None, expr.attrs.into(), expr.span, self), _ => Some(expand_expr(expr, self)), }) } @@ -1040,7 +1015,7 @@ impl<'a, 'b> Folder for MacroExpander<'a, 'b> { result = expand_item(item, self); self.pop_mod_path(); } else { - let filename = if inner != codemap::DUMMY_SP { + let filename = if inner != syntax_pos::DUMMY_SP { Some(self.cx.parse_sess.codemap().span_to_filename(inner)) } else { None }; let orig_filename = replace(&mut self.cx.filename, filename); @@ -1202,8 +1177,7 @@ impl Folder for Marker { Spanned { node: Mac_ { path: self.fold_path(node.path), - tts: self.fold_tts(&node.tts), - ctxt: mtwt::apply_mark(self.mark, node.ctxt), + tts: self.fold_tts(node.tts), }, span: self.new_span(span), } @@ -1218,7 +1192,7 @@ impl Folder for Marker { } // apply a given mark to the given token trees. Used prior to expansion of a macro. -fn mark_tts(tts: &[TokenTree], m: Mrk) -> Vec { +fn mark_tts(tts: Vec, m: Mrk) -> Vec { noop_fold_tts(tts, &mut Marker{mark:m, expn_id: None}) } @@ -1229,7 +1203,7 @@ mod tests { use super::{PatIdentFinder, IdentRenamer, PatIdentRenamer, ExpansionConfig}; use ast; use ast::Name; - use codemap; + use syntax_pos; use ext::base::{ExtCtxt, DummyMacroLoader}; use ext::mtwt; use fold::Folder; @@ -1248,7 +1222,7 @@ mod tests { path_accumulator: Vec , } - impl<'v> Visitor<'v> for PathExprFinderContext { + impl Visitor for PathExprFinderContext { fn visit_expr(&mut self, expr: &ast::Expr) { if let ast::ExprKind::Path(None, ref p) = expr.node { self.path_accumulator.push(p.clone()); @@ -1270,8 +1244,8 @@ mod tests { ident_accumulator: Vec } - impl<'v> Visitor<'v> for IdentFinder { - fn visit_ident(&mut self, _: codemap::Span, id: ast::Ident){ + impl Visitor for IdentFinder { + fn visit_ident(&mut self, _: syntax_pos::Span, id: ast::Ident){ self.ident_accumulator.push(id); } } diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 871b0d4b1c023..68527b0797d5b 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -8,8 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast::{self, Arg, Arm, Block, Expr, Item, Pat, Stmt, TokenTree, Ty}; -use codemap::Span; +use ast::{self, Arg, Arm, Block, Expr, Item, Pat, Stmt, Ty}; +use syntax_pos::Span; use ext::base::ExtCtxt; use ext::base; use ext::build::AstBuilder; @@ -17,6 +17,7 @@ use parse::parser::{Parser, PathStyle}; use parse::token::*; use parse::token; use ptr::P; +use tokenstream::{self, TokenTree}; /// Quasiquoting works via token trees. /// @@ -31,12 +32,12 @@ pub mod rt { use ext::base::ExtCtxt; use parse::{self, token, classify}; use ptr::P; - use std::rc::Rc; - use ast::TokenTree; + use tokenstream::{self, TokenTree}; pub use parse::new_parser_from_tts; - pub use codemap::{BytePos, Span, dummy_spanned, DUMMY_SP}; + pub use syntax_pos::{BytePos, Span, DUMMY_SP}; + pub use codemap::{dummy_spanned}; pub trait ToTokens { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec; @@ -214,12 +215,12 @@ pub mod rt { if self.node.style == ast::AttrStyle::Inner { r.push(TokenTree::Token(self.span, token::Not)); } - r.push(TokenTree::Delimited(self.span, Rc::new(ast::Delimited { + r.push(TokenTree::Delimited(self.span, tokenstream::Delimited { delim: token::Bracket, open_span: self.span, tts: self.node.value.to_tokens(cx), close_span: self.span, - }))); + })); r } } @@ -234,12 +235,12 @@ pub mod rt { impl ToTokens for () { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![TokenTree::Delimited(DUMMY_SP, Rc::new(ast::Delimited { + vec![TokenTree::Delimited(DUMMY_SP, tokenstream::Delimited { delim: token::Paren, open_span: DUMMY_SP, tts: vec![], close_span: DUMMY_SP, - }))] + })] } } @@ -250,7 +251,7 @@ pub mod rt { id: ast::DUMMY_NODE_ID, node: ast::ExprKind::Lit(P(self.clone())), span: DUMMY_SP, - attrs: None, + attrs: ast::ThinVec::new(), }).to_tokens(cx) } } @@ -281,7 +282,7 @@ pub mod rt { id: ast::DUMMY_NODE_ID, node: ast::ExprKind::Lit(P(dummy_spanned(lit))), span: DUMMY_SP, - attrs: None, + attrs: ast::ThinVec::new(), }); if *self >= 0 { return lit.to_tokens(cx); @@ -290,7 +291,7 @@ pub mod rt { id: ast::DUMMY_NODE_ID, node: ast::ExprKind::Unary(ast::UnOp::Neg, lit), span: DUMMY_SP, - attrs: None, + attrs: ast::ThinVec::new(), }).to_tokens(cx) } } @@ -512,10 +513,8 @@ pub fn expand_quote_matcher(cx: &mut ExtCtxt, let (cx_expr, tts) = parse_arguments_to_quote(cx, tts); let mut vector = mk_stmts_let(cx, sp); vector.extend(statements_mk_tts(cx, &tts[..], true)); - let block = cx.expr_block( - cx.block_all(sp, - vector, - Some(cx.expr_ident(sp, id_ext("tt"))))); + vector.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt")))); + let block = cx.expr_block(cx.block(sp, vector)); let expanded = expand_wrapper(cx, sp, cx_expr, block, &[&["syntax", "ext", "quote", "rt"]]); base::MacEager::expr(expanded) @@ -548,7 +547,7 @@ fn mk_name(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P { } fn mk_tt_path(cx: &ExtCtxt, sp: Span, name: &str) -> P { - let idents = vec!(id_ext("syntax"), id_ext("ast"), id_ext("TokenTree"), id_ext(name)); + let idents = vec!(id_ext("syntax"), id_ext("tokenstream"), id_ext("TokenTree"), id_ext(name)); cx.expr_path(cx.path_global(sp, idents)) } @@ -765,19 +764,20 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec cx.expr_some(sp, expr_mk_token(cx, sp, sep)), None => cx.expr_none(sp), }; let e_op = match seq.op { - ast::KleeneOp::ZeroOrMore => "ZeroOrMore", - ast::KleeneOp::OneOrMore => "OneOrMore", + tokenstream::KleeneOp::ZeroOrMore => "ZeroOrMore", + tokenstream::KleeneOp::OneOrMore => "OneOrMore", }; let e_op_idents = vec![ id_ext("syntax"), - id_ext("ast"), + id_ext("tokenstream"), id_ext("KleeneOp"), id_ext(e_op), ]; @@ -787,16 +787,13 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec>(); + stmts.push(cx.stmt_expr(expr)); - cx.expr_block(cx.block_all(sp, stmts, Some(expr))) + cx.expr_block(cx.block(sp, stmts)) } fn expand_parse_call(cx: &ExtCtxt, diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index d297188a35c8c..97cb09991ec40 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -9,8 +9,7 @@ // except according to those terms. use ast; -use codemap::{Pos, Span}; -use codemap; +use syntax_pos::{self, Pos, Span}; use ext::base::*; use ext::base; use ext::build::AstBuilder; @@ -18,6 +17,7 @@ use parse::token; use parse; use print::pprust; use ptr::P; +use tokenstream; use util::small_vector::SmallVector; use std::fs::File; @@ -30,7 +30,7 @@ use std::rc::Rc; // a given file into the current one. /// line!(): expands to the current line number -pub fn expand_line(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) +pub fn expand_line(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box { base::check_zero_tts(cx, sp, tts, "line!"); @@ -41,7 +41,7 @@ pub fn expand_line(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) } /* column!(): expands to the current column number */ -pub fn expand_column(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) +pub fn expand_column(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box { base::check_zero_tts(cx, sp, tts, "column!"); @@ -54,7 +54,7 @@ pub fn expand_column(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) /// file!(): expands to the current filename */ /// The filemap (`loc.file`) contains a bunch more information we could spit /// out if we wanted. -pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) +pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box { base::check_zero_tts(cx, sp, tts, "file!"); @@ -64,14 +64,14 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) base::MacEager::expr(cx.expr_str(topmost, filename)) } -pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) +pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box { let s = pprust::tts_to_string(tts); base::MacEager::expr(cx.expr_str(sp, token::intern_and_get_ident(&s[..]))) } -pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) +pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box { base::check_zero_tts(cx, sp, tts, "module_path!"); let string = cx.mod_path() @@ -87,7 +87,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) /// include! : parse the given file as an expr /// This is generally a bad idea because it's going to behave /// unhygienically. -pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) +pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box { let file = match get_single_str_from_tts(cx, sp, tts, "include!") { Some(f) => f, @@ -130,7 +130,7 @@ pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree } // include_str! : read the given file, insert it as a literal string expr -pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) +pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box { let file = match get_single_str_from_tts(cx, sp, tts, "include_str!") { Some(f) => f, @@ -167,7 +167,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) } } -pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) +pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box { let file = match get_single_str_from_tts(cx, sp, tts, "include_bytes!") { Some(f) => f, @@ -194,7 +194,7 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) // resolve a file-system path to an absolute file-system path (if it // isn't already) -fn res_rel_file(cx: &mut ExtCtxt, sp: codemap::Span, arg: &Path) -> PathBuf { +fn res_rel_file(cx: &mut ExtCtxt, sp: syntax_pos::Span, arg: &Path) -> PathBuf { // NB: relative paths are resolved relative to the compilation unit if !arg.is_absolute() { let callsite = cx.codemap().source_callsite(sp); diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index ca5eb8f8003bb..813afb935762e 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -79,9 +79,9 @@ pub use self::ParseResult::*; use self::TokenTreeOrTokenTreeVec::*; use ast; -use ast::{TokenTree, Name, Ident}; -use codemap::{BytePos, mk_sp, Span, Spanned}; -use codemap; +use ast::{Name, Ident}; +use syntax_pos::{self, BytePos, mk_sp, Span}; +use codemap::Spanned; use errors::FatalError; use parse::lexer::*; //resolve bug? use parse::ParseSess; @@ -91,6 +91,7 @@ use parse::token::{Token, Nonterminal}; use parse::token; use print::pprust; use ptr::P; +use tokenstream::{self, TokenTree}; use std::mem; use std::rc::Rc; @@ -102,8 +103,8 @@ use std::collections::hash_map::Entry::{Vacant, Occupied}; #[derive(Clone)] enum TokenTreeOrTokenTreeVec { - Tt(ast::TokenTree), - TtSeq(Rc>), + Tt(tokenstream::TokenTree), + TtSeq(Rc>), } impl TokenTreeOrTokenTreeVec { @@ -196,7 +197,7 @@ pub fn initial_matcher_pos(ms: Rc>, sep: Option, lo: ByteP /// token tree it was derived from. pub enum NamedMatch { - MatchedSeq(Vec>, codemap::Span), + MatchedSeq(Vec>, syntax_pos::Span), MatchedNonterminal(Nonterminal) } @@ -204,7 +205,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc]) -> ParseResult>> { fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc], ret_val: &mut HashMap>, idx: &mut usize) - -> Result<(), (codemap::Span, String)> { + -> Result<(), (syntax_pos::Span, String)> { match *m { TokenTree::Sequence(_, ref seq) => { for next_m in &seq.tts { @@ -251,9 +252,9 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc]) pub enum ParseResult { Success(T), /// Arm failed to match - Failure(codemap::Span, String), + Failure(syntax_pos::Span, String), /// Fatal error (malformed macro?). Abort compilation. - Error(codemap::Span, String) + Error(syntax_pos::Span, String) } pub type NamedParseResult = ParseResult>>; @@ -374,7 +375,7 @@ pub fn parse(sess: &ParseSess, match ei.top_elts.get_tt(idx) { /* need to descend into sequence */ TokenTree::Sequence(sp, seq) => { - if seq.op == ast::KleeneOp::ZeroOrMore { + if seq.op == tokenstream::KleeneOp::ZeroOrMore { let mut new_ei = ei.clone(); new_ei.match_cur += seq.num_captures; new_ei.idx += 1; diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index bbe989b0f40ab..23f0b1fff0ae7 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -8,8 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast::{self, TokenTree}; -use codemap::{Span, DUMMY_SP}; +use ast; +use syntax_pos::{Span, DUMMY_SP}; use ext::base::{DummyResult, ExtCtxt, MacResult, SyntaxExtension}; use ext::base::{NormalTT, TTMacroExpander}; use ext::tt::macro_parser::{Success, Error, Failure}; @@ -21,13 +21,13 @@ use parse::token::{self, gensym_ident, NtTT, Token}; use parse::token::Token::*; use print; use ptr::P; +use tokenstream::{self, TokenTree}; use util::small_vector::SmallVector; use std::cell::RefCell; use std::collections::{HashMap}; use std::collections::hash_map::{Entry}; -use std::rc::Rc; struct ParserAnyMacro<'a> { parser: RefCell>, @@ -100,6 +100,21 @@ impl<'a> MacResult for ParserAnyMacro<'a> { Some(ret) } + fn make_trait_items(self: Box>) + -> Option> { + let mut ret = SmallVector::zero(); + loop { + let mut parser = self.parser.borrow_mut(); + match parser.token { + token::Eof => break, + _ => ret.push(panictry!(parser.parse_trait_item())) + } + } + self.ensure_complete_parse(false, "item"); + Some(ret) + } + + fn make_stmts(self: Box>) -> Option> { let mut ret = SmallVector::zero(); @@ -246,27 +261,25 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt, // These spans won't matter, anyways let match_lhs_tok = MatchNt(lhs_nm, token::str_to_ident("tt")); let match_rhs_tok = MatchNt(rhs_nm, token::str_to_ident("tt")); - let argument_gram = vec!( - TokenTree::Sequence(DUMMY_SP, - Rc::new(ast::SequenceRepetition { - tts: vec![ - TokenTree::Token(DUMMY_SP, match_lhs_tok), - TokenTree::Token(DUMMY_SP, token::FatArrow), - TokenTree::Token(DUMMY_SP, match_rhs_tok)], - separator: Some(token::Semi), - op: ast::KleeneOp::OneOrMore, - num_captures: 2 - })), - //to phase into semicolon-termination instead of - //semicolon-separation - TokenTree::Sequence(DUMMY_SP, - Rc::new(ast::SequenceRepetition { - tts: vec![TokenTree::Token(DUMMY_SP, token::Semi)], - separator: None, - op: ast::KleeneOp::ZeroOrMore, - num_captures: 0 - }))); - + let argument_gram = vec![ + TokenTree::Sequence(DUMMY_SP, tokenstream::SequenceRepetition { + tts: vec![ + TokenTree::Token(DUMMY_SP, match_lhs_tok), + TokenTree::Token(DUMMY_SP, token::FatArrow), + TokenTree::Token(DUMMY_SP, match_rhs_tok), + ], + separator: Some(token::Semi), + op: tokenstream::KleeneOp::OneOrMore, + num_captures: 2, + }), + // to phase into semicolon-termination instead of semicolon-separation + TokenTree::Sequence(DUMMY_SP, tokenstream::SequenceRepetition { + tts: vec![TokenTree::Token(DUMMY_SP, token::Semi)], + separator: None, + op: tokenstream::KleeneOp::ZeroOrMore, + num_captures: 0 + }), + ]; // Parse the macro_rules! invocation (`none` is for no interpolations): let arg_reader = new_tt_reader(&cx.parse_sess().span_diagnostic, @@ -427,7 +440,7 @@ impl FirstSets { } // Reverse scan: Sequence comes before `first`. - if subfirst.maybe_empty || seq_rep.op == ast::KleeneOp::ZeroOrMore { + if subfirst.maybe_empty || seq_rep.op == tokenstream::KleeneOp::ZeroOrMore { // If sequence is potentially empty, then // union them (preserving first emptiness). first.add_all(&TokenSet { maybe_empty: true, ..subfirst }); @@ -474,7 +487,8 @@ impl FirstSets { assert!(first.maybe_empty); first.add_all(subfirst); - if subfirst.maybe_empty || seq_rep.op == ast::KleeneOp::ZeroOrMore { + if subfirst.maybe_empty || + seq_rep.op == tokenstream::KleeneOp::ZeroOrMore { // continue scanning for more first // tokens, but also make sure we // restore empty-tracking state diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 6b3b5ce9de914..58328eb424675 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -9,15 +9,15 @@ // except according to those terms. use self::LockstepIterSize::*; -use ast; -use ast::{TokenTree, Ident, Name}; -use codemap::{Span, DUMMY_SP}; +use ast::{Ident, Name}; +use syntax_pos::{Span, DUMMY_SP}; use errors::{Handler, DiagnosticBuilder}; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; use parse::token::{DocComment, MatchNt, SubstNt}; use parse::token::{Token, NtIdent, SpecialMacroVar}; use parse::token; use parse::lexer::TokenAndSpan; +use tokenstream::{self, TokenTree}; use std::rc::Rc; use std::ops::Add; @@ -59,7 +59,7 @@ pub struct TtReader<'a> { pub fn new_tt_reader(sp_diag: &Handler, interp: Option>>, imported_from: Option, - src: Vec) + src: Vec) -> TtReader { new_tt_reader_with_doc_flag(sp_diag, interp, imported_from, src, false) } @@ -73,17 +73,17 @@ pub fn new_tt_reader(sp_diag: &Handler, pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler, interp: Option>>, imported_from: Option, - src: Vec, + src: Vec, desugar_doc_comments: bool) -> TtReader { let mut r = TtReader { sp_diag: sp_diag, stack: vec!(TtFrame { - forest: TokenTree::Sequence(DUMMY_SP, Rc::new(ast::SequenceRepetition { + forest: TokenTree::Sequence(DUMMY_SP, tokenstream::SequenceRepetition { tts: src, // doesn't matter. This merely holds the root unzipping. - separator: None, op: ast::KleeneOp::ZeroOrMore, num_captures: 0 - })), + separator: None, op: tokenstream::KleeneOp::ZeroOrMore, num_captures: 0 + }), idx: 0, dotdotdoted: false, sep: None, @@ -259,7 +259,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { } LisConstraint(len, _) => { if len == 0 { - if seq.op == ast::KleeneOp::OneOrMore { + if seq.op == tokenstream::KleeneOp::OneOrMore { // FIXME #2887 blame invoker panic!(r.sp_diag.span_fatal(sp.clone(), "this must repeat at least once")); diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index 550eb0a56d98c..d6476fdb2f015 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -30,10 +30,10 @@ use ast::{NodeId, PatKind}; use ast; use attr; use attr::AttrMetaMethods; -use codemap::{CodeMap, Span}; +use codemap::CodeMap; +use syntax_pos::Span; use errors::Handler; -use visit; -use visit::{FnKind, Visitor}; +use visit::{self, FnKind, Visitor}; use parse::ParseSess; use parse::token::InternedString; @@ -800,7 +800,7 @@ macro_rules! gate_feature_post { }} } -impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { +impl<'a> Visitor for PostExpansionVisitor<'a> { fn visit_attribute(&mut self, attr: &ast::Attribute) { if !self.context.cm.span_allows_unstable(attr.span) { self.context.check_attribute(attr, false); @@ -996,9 +996,9 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { } fn visit_fn(&mut self, - fn_kind: FnKind<'v>, - fn_decl: &'v ast::FnDecl, - block: &'v ast::Block, + fn_kind: FnKind, + fn_decl: &ast::FnDecl, + block: &ast::Block, span: Span, _node_id: NodeId) { // check for const fn declarations @@ -1037,7 +1037,7 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { visit::walk_fn(self, fn_kind, fn_decl, block, span); } - fn visit_trait_item(&mut self, ti: &'v ast::TraitItem) { + fn visit_trait_item(&mut self, ti: &ast::TraitItem) { match ti.node { ast::TraitItemKind::Const(..) => { gate_feature_post!(&self, associated_consts, @@ -1058,7 +1058,7 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { visit::walk_trait_item(self, ti); } - fn visit_impl_item(&mut self, ii: &'v ast::ImplItem) { + fn visit_impl_item(&mut self, ii: &ast::ImplItem) { if ii.defaultness == ast::Defaultness::Default { gate_feature_post!(&self, specialization, ii.span, @@ -1081,7 +1081,7 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { visit::walk_impl_item(self, ii); } - fn visit_vis(&mut self, vis: &'v ast::Visibility) { + fn visit_vis(&mut self, vis: &ast::Visibility) { let span = match *vis { ast::Visibility::Crate(span) => span, ast::Visibility::Restricted { ref path, .. } => path.span, diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index edf418e33325b..6789e7be058bf 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -20,15 +20,14 @@ use ast::*; use ast; -use attr::{ThinAttributes, ThinAttributesExt}; -use codemap::{respan, Span, Spanned}; +use syntax_pos::Span; +use codemap::{Spanned, respan}; use parse::token::{self, keywords}; use ptr::P; +use tokenstream::*; use util::small_vector::SmallVector; use util::move_map::MoveMap; -use std::rc::Rc; - pub trait Folder : Sized { // Any additions to this trait should happen in form // of a call to a public `noop_*` function that only calls @@ -102,10 +101,6 @@ pub trait Folder : Sized { noop_fold_pat(p, self) } - fn fold_decl(&mut self, d: P) -> SmallVector> { - noop_fold_decl(d, self) - } - fn fold_expr(&mut self, e: P) -> P { e.map(|e| noop_fold_expr(e, self)) } @@ -227,11 +222,11 @@ pub trait Folder : Sized { noop_fold_ty_params(tps, self) } - fn fold_tt(&mut self, tt: &TokenTree) -> TokenTree { + fn fold_tt(&mut self, tt: TokenTree) -> TokenTree { noop_fold_tt(tt, self) } - fn fold_tts(&mut self, tts: &[TokenTree]) -> Vec { + fn fold_tts(&mut self, tts: Vec) -> Vec { noop_fold_tts(tts, self) } @@ -336,8 +331,8 @@ pub fn fold_attrs(attrs: Vec, fld: &mut T) -> Vec(attrs: ThinAttributes, fld: &mut T) -> ThinAttributes { - attrs.map_thin_attrs(|v| fold_attrs(v, fld)) +pub fn fold_thin_attrs(attrs: ThinVec, fld: &mut T) -> ThinVec { + fold_attrs(attrs.into(), fld).into() } pub fn noop_fold_arm(Arm {attrs, pats, guard, body}: Arm, fld: &mut T) -> Arm { @@ -349,19 +344,6 @@ pub fn noop_fold_arm(Arm {attrs, pats, guard, body}: Arm, fld: &mut T } } -pub fn noop_fold_decl(d: P, fld: &mut T) -> SmallVector> { - d.and_then(|Spanned {node, span}| match node { - DeclKind::Local(l) => SmallVector::one(P(Spanned { - node: DeclKind::Local(fld.fold_local(l)), - span: fld.new_span(span) - })), - DeclKind::Item(it) => fld.fold_item(it).into_iter().map(|i| P(Spanned { - node: DeclKind::Item(i), - span: fld.new_span(span) - })).collect() - }) -} - pub fn noop_fold_ty_binding(b: TypeBinding, fld: &mut T) -> TypeBinding { TypeBinding { id: fld.new_id(b.id), @@ -498,7 +480,7 @@ pub fn noop_fold_local(l: P, fld: &mut T) -> P { pat: fld.fold_pat(pat), init: init.map(|e| fld.fold_expr(e)), span: fld.new_span(span), - attrs: attrs.map_thin_attrs(|v| fold_attrs(v, fld)), + attrs: fold_attrs(attrs.into(), fld).into(), }) } @@ -519,8 +501,7 @@ pub fn noop_fold_mac(Spanned {node, span}: Mac, fld: &mut T) -> Mac { Spanned { node: Mac_ { path: fld.fold_path(node.path), - tts: fld.fold_tts(&node.tts), - ctxt: node.ctxt, + tts: fld.fold_tts(node.tts), }, span: fld.new_span(span) } @@ -547,34 +528,26 @@ pub fn noop_fold_arg(Arg {id, pat, ty}: Arg, fld: &mut T) -> Arg { } } -pub fn noop_fold_tt(tt: &TokenTree, fld: &mut T) -> TokenTree { - match *tt { +pub fn noop_fold_tt(tt: TokenTree, fld: &mut T) -> TokenTree { + match tt { TokenTree::Token(span, ref tok) => TokenTree::Token(span, fld.fold_token(tok.clone())), - TokenTree::Delimited(span, ref delimed) => { - TokenTree::Delimited(span, Rc::new( - Delimited { - delim: delimed.delim, - open_span: delimed.open_span, - tts: fld.fold_tts(&delimed.tts), - close_span: delimed.close_span, - } - )) - }, - TokenTree::Sequence(span, ref seq) => - TokenTree::Sequence(span, - Rc::new(SequenceRepetition { - tts: fld.fold_tts(&seq.tts), - separator: seq.separator.clone().map(|tok| fld.fold_token(tok)), - ..**seq - })), + TokenTree::Delimited(span, delimed) => TokenTree::Delimited(span, Delimited { + delim: delimed.delim, + open_span: delimed.open_span, + tts: fld.fold_tts(delimed.tts), + close_span: delimed.close_span, + }), + TokenTree::Sequence(span, seq) => TokenTree::Sequence(span, SequenceRepetition { + tts: fld.fold_tts(seq.tts), + separator: seq.separator.clone().map(|tok| fld.fold_token(tok)), + ..seq + }), } } -pub fn noop_fold_tts(tts: &[TokenTree], fld: &mut T) -> Vec { - // FIXME: Does this have to take a tts slice? - // Could use move_map otherwise... - tts.iter().map(|tt| fld.fold_tt(tt)).collect() +pub fn noop_fold_tts(tts: Vec, fld: &mut T) -> Vec { + tts.move_map(|tt| fld.fold_tt(tt)) } // apply ident folder if it's an ident, apply other folds to interpolated nodes @@ -632,7 +605,7 @@ pub fn noop_fold_interpolated(nt: token::Nonterminal, fld: &mut T) token::NtIdent(Box::new(Spanned::{node: fld.fold_ident(id.node), ..*id})), token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)), token::NtPath(path) => token::NtPath(Box::new(fld.fold_path(*path))), - token::NtTT(tt) => token::NtTT(P(fld.fold_tt(&tt))), + token::NtTT(tt) => token::NtTT(tt.map(|tt| fld.fold_tt(tt))), token::NtArm(arm) => token::NtArm(fld.fold_arm(arm)), token::NtImplItem(arm) => token::NtImplItem(arm.map(|arm| fld.fold_impl_item(arm) @@ -845,10 +818,9 @@ fn noop_fold_bounds(bounds: TyParamBounds, folder: &mut T) } pub fn noop_fold_block(b: P, folder: &mut T) -> P { - b.map(|Block {id, stmts, expr, rules, span}| Block { + b.map(|Block {id, stmts, rules, span}| Block { id: folder.new_id(id), stmts: stmts.move_flat_map(|s| folder.fold_stmt(s).into_iter()), - expr: expr.and_then(|x| folder.fold_opt_expr(x)), rules: rules, span: folder.new_span(span), }) @@ -945,6 +917,9 @@ pub fn noop_fold_trait_item(i: TraitItem, folder: &mut T) TraitItemKind::Type(folder.fold_bounds(bounds), default.map(|x| folder.fold_ty(x))) } + ast::TraitItemKind::Macro(mac) => { + TraitItemKind::Macro(folder.fold_mac(mac)) + } }, span: folder.new_span(i.span) }) @@ -1088,12 +1063,11 @@ pub fn noop_fold_pat(p: P, folder: &mut T) -> P { PatKind::TupleStruct(folder.fold_path(pth), pats.move_map(|x| folder.fold_pat(x)), ddpos) } - PatKind::Path(pth) => { - PatKind::Path(folder.fold_path(pth)) - } - PatKind::QPath(qself, pth) => { - let qself = QSelf {ty: folder.fold_ty(qself.ty), .. qself}; - PatKind::QPath(qself, folder.fold_path(pth)) + PatKind::Path(opt_qself, pth) => { + let opt_qself = opt_qself.map(|qself| { + QSelf { ty: folder.fold_ty(qself.ty), position: qself.position } + }); + PatKind::Path(opt_qself, folder.fold_path(pth)) } PatKind::Struct(pth, fields, etc) => { let pth = folder.fold_path(pth); @@ -1255,7 +1229,7 @@ pub fn noop_fold_expr(Expr {id, node, span, attrs}: Expr, folder: &mu respan(folder.new_span(label.span), folder.fold_ident(label.node))) ), - ExprKind::Again(opt_ident) => ExprKind::Again(opt_ident.map(|label| + ExprKind::Continue(opt_ident) => ExprKind::Continue(opt_ident.map(|label| respan(folder.new_span(label.span), folder.fold_ident(label.node))) ), @@ -1300,7 +1274,7 @@ pub fn noop_fold_expr(Expr {id, node, span, attrs}: Expr, folder: &mu ExprKind::Try(ex) => ExprKind::Try(folder.fold_expr(ex)), }, span: folder.new_span(span), - attrs: attrs.map_thin_attrs(|v| fold_attrs(v, folder)), + attrs: fold_attrs(attrs.into(), folder).into(), } } @@ -1312,44 +1286,50 @@ pub fn noop_fold_exprs(es: Vec>, folder: &mut T) -> Vec(Spanned {node, span}: Stmt, folder: &mut T) +pub fn noop_fold_stmt(Stmt {node, span, id}: Stmt, folder: &mut T) -> SmallVector { + let id = folder.new_id(id); let span = folder.new_span(span); + match node { - StmtKind::Decl(d, id) => { - let id = folder.new_id(id); - folder.fold_decl(d).into_iter().map(|d| Spanned { - node: StmtKind::Decl(d, id), - span: span - }).collect() - } - StmtKind::Expr(e, id) => { - let id = folder.new_id(id); - if let Some(e) = folder.fold_opt_expr(e) { - SmallVector::one(Spanned { - node: StmtKind::Expr(e, id), - span: span + StmtKind::Local(local) => SmallVector::one(Stmt { + id: id, + node: StmtKind::Local(folder.fold_local(local)), + span: span, + }), + StmtKind::Item(item) => folder.fold_item(item).into_iter().map(|item| Stmt { + id: id, + node: StmtKind::Item(item), + span: span, + }).collect(), + StmtKind::Expr(expr) => { + if let Some(expr) = folder.fold_opt_expr(expr) { + SmallVector::one(Stmt { + id: id, + node: StmtKind::Expr(expr), + span: span, }) } else { SmallVector::zero() } } - StmtKind::Semi(e, id) => { - let id = folder.new_id(id); - if let Some(e) = folder.fold_opt_expr(e) { - SmallVector::one(Spanned { - node: StmtKind::Semi(e, id), - span: span + StmtKind::Semi(expr) => { + if let Some(expr) = folder.fold_opt_expr(expr) { + SmallVector::one(Stmt { + id: id, + node: StmtKind::Semi(expr), + span: span, }) } else { SmallVector::zero() } } - StmtKind::Mac(mac, semi, attrs) => SmallVector::one(Spanned { - node: StmtKind::Mac(mac.map(|m| folder.fold_mac(m)), - semi, - attrs.map_thin_attrs(|v| fold_attrs(v, folder))), - span: span + StmtKind::Mac(mac) => SmallVector::one(Stmt { + id: id, + node: StmtKind::Mac(mac.map(|(mac, semi, attrs)| { + (folder.fold_mac(mac), semi, fold_attrs(attrs.into(), folder).into()) + })), + span: span, }) } } diff --git a/src/libsyntax/errors/json.rs b/src/libsyntax/json.rs similarity index 97% rename from src/libsyntax/errors/json.rs rename to src/libsyntax/json.rs index 93c6268ccaea1..dc9a5ee46645f 100644 --- a/src/libsyntax/errors/json.rs +++ b/src/libsyntax/json.rs @@ -19,10 +19,10 @@ // FIXME spec the JSON output properly. - -use codemap::{self, MacroBacktrace, Span, SpanLabel, MultiSpan, CodeMap}; -use diagnostics::registry::Registry; -use errors::{Level, DiagnosticBuilder, SubDiagnostic, RenderSpan, CodeSuggestion}; +use codemap::CodeMap; +use syntax_pos::{self, MacroBacktrace, Span, SpanLabel, MultiSpan}; +use errors::registry::Registry; +use errors::{Level, DiagnosticBuilder, SubDiagnostic, RenderSpan, CodeSuggestion, CodeMapper}; use errors::emitter::Emitter; use std::rc::Rc; @@ -34,7 +34,7 @@ use rustc_serialize::json::as_json; pub struct JsonEmitter { dst: Box, registry: Option, - cm: Rc, + cm: Rc, } impl JsonEmitter { @@ -303,7 +303,7 @@ impl DiagnosticSpan { } impl DiagnosticSpanLine { - fn line_from_filemap(fm: &codemap::FileMap, + fn line_from_filemap(fm: &syntax_pos::FileMap, index: usize, h_start: usize, h_end: usize) @@ -354,12 +354,14 @@ impl DiagnosticCode { impl JsonEmitter { fn render(&self, render_span: &RenderSpan) -> Option { + use std::borrow::Borrow; + match *render_span { RenderSpan::FullSpan(_) => { None } RenderSpan::Suggestion(ref suggestion) => { - Some(suggestion.splice_lines(&self.cm)) + Some(suggestion.splice_lines(self.cm.borrow())) } } } diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 420a41e03b914..652cf68db0759 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -33,7 +33,6 @@ #![feature(str_escape)] #![feature(unicode)] #![feature(question_mark)] -#![feature(range_contains)] extern crate serialize; extern crate term; @@ -41,9 +40,12 @@ extern crate libc; #[macro_use] extern crate log; #[macro_use] #[no_link] extern crate rustc_bitflags; extern crate rustc_unicode; +pub extern crate rustc_errors as errors; +extern crate syntax_pos; extern crate serialize as rustc_serialize; // used by deriving + // A variant of 'try!' that panics on an Err. This is used as a crutch on the // way towards a non-panic!-prone parser. It should be used for fatal parsing // errors; eventually we plan to convert all code using panictry to just use @@ -53,7 +55,7 @@ extern crate serialize as rustc_serialize; // used by deriving macro_rules! panictry { ($e:expr) => ({ use std::result::Result::{Ok, Err}; - use $crate::errors::FatalError; + use errors::FatalError; match $e { Ok(e) => e, Err(mut e) => { @@ -73,16 +75,18 @@ pub mod util { pub mod parser_testing; pub mod small_vector; pub mod move_map; + + mod thin_vec; + pub use self::thin_vec::ThinVec; } pub mod diagnostics { pub mod macros; pub mod plugin; - pub mod registry; pub mod metadata; } -pub mod errors; +pub mod json; pub mod syntax { pub use ext; @@ -104,6 +108,7 @@ pub mod show_span; pub mod std_inject; pub mod str; pub mod test; +pub mod tokenstream; pub mod visit; pub mod print { diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index db643eb0df07a..f6e94b7caeabe 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -10,7 +10,8 @@ use attr; use ast; -use codemap::{spanned, Spanned, mk_sp, Span}; +use syntax_pos::{mk_sp, Span}; +use codemap::{spanned, Spanned}; use parse::common::SeqSep; use parse::PResult; use parse::token; diff --git a/src/libsyntax/parse/classify.rs b/src/libsyntax/parse/classify.rs index 89110f3160fc9..4fe4ec7e4c0ed 100644 --- a/src/libsyntax/parse/classify.rs +++ b/src/libsyntax/parse/classify.rs @@ -47,13 +47,9 @@ pub fn expr_is_simple_block(e: &ast::Expr) -> bool { /// seen the semicolon, and thus don't need another. pub fn stmt_ends_with_semi(stmt: &ast::StmtKind) -> bool { match *stmt { - ast::StmtKind::Decl(ref d, _) => { - match d.node { - ast::DeclKind::Local(_) => true, - ast::DeclKind::Item(_) => false, - } - } - ast::StmtKind::Expr(ref e, _) => expr_requires_semi_to_be_stmt(e), + ast::StmtKind::Local(_) => true, + ast::StmtKind::Item(_) => false, + ast::StmtKind::Expr(ref e) => expr_requires_semi_to_be_stmt(e), ast::StmtKind::Semi(..) => false, ast::StmtKind::Mac(..) => false, } diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index 06d255d5c0f92..5eb5605ea71a0 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -11,7 +11,8 @@ pub use self::CommentStyle::*; use ast; -use codemap::{BytePos, CharPos, CodeMap, Pos}; +use codemap::CodeMap; +use syntax_pos::{BytePos, CharPos, Pos}; use errors; use parse::lexer::is_block_doc_comment; use parse::lexer::{StringReader, TokenAndSpan}; diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index d78a81dec83db..809f4daa3616d 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -9,8 +9,8 @@ // except according to those terms. use ast; -use codemap::{BytePos, CharPos, CodeMap, Pos, Span}; -use codemap; +use syntax_pos::{self, BytePos, CharPos, Pos, Span}; +use codemap::CodeMap; use errors::{FatalError, Handler, DiagnosticBuilder}; use ext::tt::transcribe::tt_next_token; use parse::token::{self, keywords, str_to_ident}; @@ -84,7 +84,7 @@ pub struct StringReader<'a> { pub col: CharPos, /// The last character to be read pub curr: Option, - pub filemap: Rc, + pub filemap: Rc, // cached: pub peek_tok: token::Token, pub peek_span: Span, @@ -162,7 +162,7 @@ impl<'a> Reader for TtReader<'a> { impl<'a> StringReader<'a> { /// For comments.rs, which hackily pokes into pos and curr pub fn new_raw<'b>(span_diagnostic: &'b Handler, - filemap: Rc) + filemap: Rc) -> StringReader<'b> { if filemap.src.is_none() { span_diagnostic.bug(&format!("Cannot lex filemap \ @@ -181,7 +181,7 @@ impl<'a> StringReader<'a> { filemap: filemap, // dummy values; not read peek_tok: token::Eof, - peek_span: codemap::DUMMY_SP, + peek_span: syntax_pos::DUMMY_SP, source_text: source_text, fatal_errs: Vec::new(), }; @@ -190,7 +190,7 @@ impl<'a> StringReader<'a> { } pub fn new<'b>(span_diagnostic: &'b Handler, - filemap: Rc) + filemap: Rc) -> StringReader<'b> { let mut sr = StringReader::new_raw(span_diagnostic, filemap); if let Err(_) = sr.advance_token() { @@ -217,12 +217,12 @@ impl<'a> StringReader<'a> { /// Report a fatal error spanning [`from_pos`, `to_pos`). fn fatal_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) -> FatalError { - self.fatal_span(codemap::mk_sp(from_pos, to_pos), m) + self.fatal_span(syntax_pos::mk_sp(from_pos, to_pos), m) } /// Report a lexical error spanning [`from_pos`, `to_pos`). fn err_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) { - self.err_span(codemap::mk_sp(from_pos, to_pos), m) + self.err_span(syntax_pos::mk_sp(from_pos, to_pos), m) } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an @@ -246,7 +246,7 @@ impl<'a> StringReader<'a> { for c in c.escape_default() { m.push(c) } - self.span_diagnostic.struct_span_fatal(codemap::mk_sp(from_pos, to_pos), &m[..]) + self.span_diagnostic.struct_span_fatal(syntax_pos::mk_sp(from_pos, to_pos), &m[..]) } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an @@ -270,7 +270,7 @@ impl<'a> StringReader<'a> { for c in c.escape_default() { m.push(c) } - self.span_diagnostic.struct_span_err(codemap::mk_sp(from_pos, to_pos), &m[..]) + self.span_diagnostic.struct_span_err(syntax_pos::mk_sp(from_pos, to_pos), &m[..]) } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending the @@ -294,11 +294,11 @@ impl<'a> StringReader<'a> { None => { if self.is_eof() { self.peek_tok = token::Eof; - self.peek_span = codemap::mk_sp(self.filemap.end_pos, self.filemap.end_pos); + self.peek_span = syntax_pos::mk_sp(self.filemap.end_pos, self.filemap.end_pos); } else { let start_bytepos = self.last_pos; self.peek_tok = self.next_token_inner()?; - self.peek_span = codemap::mk_sp(start_bytepos, self.last_pos); + self.peek_span = syntax_pos::mk_sp(start_bytepos, self.last_pos); }; } } @@ -473,7 +473,7 @@ impl<'a> StringReader<'a> { match self.curr { Some(c) => { if c.is_whitespace() { - self.span_diagnostic.span_err(codemap::mk_sp(self.last_pos, self.last_pos), + self.span_diagnostic.span_err(syntax_pos::mk_sp(self.last_pos, self.last_pos), "called consume_any_line_comment, but there \ was whitespace"); } @@ -524,13 +524,13 @@ impl<'a> StringReader<'a> { Some(TokenAndSpan { tok: tok, - sp: codemap::mk_sp(start_bpos, self.last_pos), + sp: syntax_pos::mk_sp(start_bpos, self.last_pos), }) }) } else { Some(TokenAndSpan { tok: token::Comment, - sp: codemap::mk_sp(start_bpos, self.last_pos), + sp: syntax_pos::mk_sp(start_bpos, self.last_pos), }) }; } @@ -563,7 +563,7 @@ impl<'a> StringReader<'a> { } return Some(TokenAndSpan { tok: token::Shebang(self.name_from(start)), - sp: codemap::mk_sp(start, self.last_pos), + sp: syntax_pos::mk_sp(start, self.last_pos), }); } } @@ -591,7 +591,7 @@ impl<'a> StringReader<'a> { } let c = Some(TokenAndSpan { tok: token::Whitespace, - sp: codemap::mk_sp(start_bpos, self.last_pos), + sp: syntax_pos::mk_sp(start_bpos, self.last_pos), }); debug!("scanning whitespace: {:?}", c); c @@ -653,7 +653,7 @@ impl<'a> StringReader<'a> { Some(TokenAndSpan { tok: tok, - sp: codemap::mk_sp(start_bpos, self.last_pos), + sp: syntax_pos::mk_sp(start_bpos, self.last_pos), }) }) } @@ -850,7 +850,7 @@ impl<'a> StringReader<'a> { let valid = if self.curr_is('{') { self.scan_unicode_escape(delim) && !ascii_only } else { - let span = codemap::mk_sp(start, self.last_pos); + let span = syntax_pos::mk_sp(start, self.last_pos); self.span_diagnostic .struct_span_err(span, "incorrect unicode escape sequence") .span_help(span, @@ -888,13 +888,13 @@ impl<'a> StringReader<'a> { }, c); if e == '\r' { - err.span_help(codemap::mk_sp(escaped_pos, last_pos), + err.span_help(syntax_pos::mk_sp(escaped_pos, last_pos), "this is an isolated carriage return; consider \ checking your editor and version control \ settings"); } if (e == '{' || e == '}') && !ascii_only { - err.span_help(codemap::mk_sp(escaped_pos, last_pos), + err.span_help(syntax_pos::mk_sp(escaped_pos, last_pos), "if used in a formatting string, curly braces \ are escaped with `{{` and `}}`"); } @@ -1677,7 +1677,8 @@ fn ident_continue(c: Option) -> bool { mod tests { use super::*; - use codemap::{BytePos, CodeMap, Span, NO_EXPANSION}; + use syntax_pos::{BytePos, Span, NO_EXPANSION}; + use codemap::CodeMap; use errors; use parse::token; use parse::token::str_to_ident; @@ -1686,7 +1687,10 @@ mod tests { fn mk_sh(cm: Rc) -> errors::Handler { // FIXME (#22405): Replace `Box::new` with `box` here when/if possible. - let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()), None, cm); + let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()), + None, + cm, + errors::snippet::FormatMode::EnvironmentSelected); errors::Handler::with_emitter(true, false, Box::new(emitter)) } @@ -1889,7 +1893,7 @@ mod tests { let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string()); let comment = lexer.next_token(); assert_eq!(comment.tok, token::Comment); - assert_eq!(comment.sp, ::codemap::mk_sp(BytePos(0), BytePos(7))); + assert_eq!(comment.sp, ::syntax_pos::mk_sp(BytePos(0), BytePos(7))); assert_eq!(lexer.next_token().tok, token::Whitespace); assert_eq!(lexer.next_token().tok, token::DocComment(token::intern("/// test"))); diff --git a/src/libsyntax/parse/lexer/unicode_chars.rs b/src/libsyntax/parse/lexer/unicode_chars.rs index d337c78bee8b5..dab97d1d5a6ff 100644 --- a/src/libsyntax/parse/lexer/unicode_chars.rs +++ b/src/libsyntax/parse/lexer/unicode_chars.rs @@ -11,7 +11,7 @@ // Characters and their corresponding confusables were collected from // http://www.unicode.org/Public/security/revision-06/confusables.txt -use codemap::mk_sp as make_span; +use syntax_pos::mk_sp as make_span; use errors::DiagnosticBuilder; use super::StringReader; diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 2e4d46bc98318..bbcc044d43c6b 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -11,12 +11,14 @@ //! The main parser interface use ast; -use codemap::{self, Span, CodeMap, FileMap}; +use codemap::CodeMap; +use syntax_pos::{self, Span, FileMap}; use errors::{Handler, ColorConfig, DiagnosticBuilder}; use parse::parser::Parser; use parse::token::InternedString; use ptr::P; use str::char_at; +use tokenstream; use std::cell::RefCell; use std::iter; @@ -160,7 +162,7 @@ pub fn parse_tts_from_source_str<'a>(name: String, source: String, cfg: ast::CrateConfig, sess: &'a ParseSess) - -> PResult<'a, Vec> { + -> PResult<'a, Vec> { let mut p = new_parser_from_source_str( sess, cfg, @@ -211,8 +213,8 @@ pub fn filemap_to_parser<'a>(sess: &'a ParseSess, let end_pos = filemap.end_pos; let mut parser = tts_to_parser(sess, filemap_to_tts(sess, filemap), cfg); - if parser.token == token::Eof && parser.span == codemap::DUMMY_SP { - parser.span = codemap::mk_sp(end_pos, end_pos); + if parser.token == token::Eof && parser.span == syntax_pos::DUMMY_SP { + parser.span = syntax_pos::mk_sp(end_pos, end_pos); } parser @@ -222,7 +224,7 @@ pub fn filemap_to_parser<'a>(sess: &'a ParseSess, // compiler expands into it pub fn new_parser_from_tts<'a>(sess: &'a ParseSess, cfg: ast::CrateConfig, - tts: Vec) -> Parser<'a> { + tts: Vec) -> Parser<'a> { tts_to_parser(sess, tts, cfg) } @@ -247,7 +249,7 @@ fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option) /// Given a filemap, produce a sequence of token-trees pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc) - -> Vec { + -> Vec { // it appears to me that the cfg doesn't matter here... indeed, // parsing tt's probably shouldn't require a parser at all. let cfg = Vec::new(); @@ -258,7 +260,7 @@ pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc) /// Given tts and cfg, produce a parser pub fn tts_to_parser<'a>(sess: &'a ParseSess, - tts: Vec, + tts: Vec, cfg: ast::CrateConfig) -> Parser<'a> { let trdr = lexer::new_tt_reader(&sess.span_diagnostic, None, None, tts); let mut p = Parser::new(sess, cfg, Box::new(trdr)); @@ -660,9 +662,9 @@ pub fn integer_lit(s: &str, #[cfg(test)] mod tests { use super::*; - use std::rc::Rc; - use codemap::{Span, BytePos, Pos, Spanned, NO_EXPANSION}; - use ast::{self, TokenTree, PatKind}; + use syntax_pos::{Span, BytePos, Pos, NO_EXPANSION}; + use codemap::Spanned; + use ast::{self, PatKind}; use abi::Abi; use attr::{first_attr_value_str_by_name, AttrMetaMethods}; use parse; @@ -670,10 +672,12 @@ mod tests { use parse::token::{str_to_ident}; use print::pprust::item_to_string; use ptr::P; + use tokenstream::{self, TokenTree}; use util::parser_testing::{string_to_tts, string_to_parser}; use util::parser_testing::{string_to_expr, string_to_item, string_to_stmt}; + use util::ThinVec; - // produce a codemap::span + // produce a syntax_pos::span fn sp(a: u32, b: u32) -> Span { Span {lo: BytePos(a), hi: BytePos(b), expn_id: NO_EXPANSION} } @@ -693,7 +697,7 @@ mod tests { ), }), span: sp(0, 1), - attrs: None, + attrs: ThinVec::new(), })) } @@ -716,7 +720,7 @@ mod tests { ) }), span: sp(0, 6), - attrs: None, + attrs: ThinVec::new(), })) } @@ -729,7 +733,7 @@ mod tests { #[test] fn string_to_tts_macro () { let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string()); - let tts: &[ast::TokenTree] = &tts[..]; + let tts: &[tokenstream::TokenTree] = &tts[..]; match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) { ( @@ -759,7 +763,7 @@ mod tests { ) if first_delimed.delim == token::Paren && ident.name.as_str() == "a" => {}, - _ => panic!("value 3: {:?}", **first_delimed), + _ => panic!("value 3: {:?}", *first_delimed), } let tts = &second_delimed.tts[..]; match (tts.len(), tts.get(0), tts.get(1)) { @@ -770,10 +774,10 @@ mod tests { ) if second_delimed.delim == token::Paren && ident.name.as_str() == "a" => {}, - _ => panic!("value 4: {:?}", **second_delimed), + _ => panic!("value 4: {:?}", *second_delimed), } }, - _ => panic!("value 2: {:?}", **macro_delimed), + _ => panic!("value 2: {:?}", *macro_delimed), } }, _ => panic!("value: {:?}",tts), @@ -789,7 +793,7 @@ mod tests { TokenTree::Token(sp(3, 4), token::Ident(str_to_ident("a"))), TokenTree::Delimited( sp(5, 14), - Rc::new(ast::Delimited { + tokenstream::Delimited { delim: token::DelimToken::Paren, open_span: sp(5, 6), tts: vec![ @@ -798,10 +802,10 @@ mod tests { TokenTree::Token(sp(10, 13), token::Ident(str_to_ident("i32"))), ], close_span: sp(13, 14), - })), + }), TokenTree::Delimited( sp(15, 21), - Rc::new(ast::Delimited { + tokenstream::Delimited { delim: token::DelimToken::Brace, open_span: sp(15, 16), tts: vec![ @@ -809,7 +813,7 @@ mod tests { TokenTree::Token(sp(18, 19), token::Semi), ], close_span: sp(20, 21), - })) + }) ]; assert_eq!(tts, expected); @@ -832,16 +836,16 @@ mod tests { ), }), span:sp(7,8), - attrs: None, + attrs: ThinVec::new(), }))), span:sp(0,8), - attrs: None, + attrs: ThinVec::new(), })) } #[test] fn parse_stmt_1 () { assert!(string_to_stmt("b;".to_string()) == - Some(Spanned{ + Some(ast::Stmt { node: ast::StmtKind::Expr(P(ast::Expr { id: ast::DUMMY_NODE_ID, node: ast::ExprKind::Path(None, ast::Path { @@ -855,8 +859,8 @@ mod tests { ), }), span: sp(0,1), - attrs: None}), - ast::DUMMY_NODE_ID), + attrs: ThinVec::new()})), + id: ast::DUMMY_NODE_ID, span: sp(0,1)})) } @@ -932,7 +936,7 @@ mod tests { } }, P(ast::Block { - stmts: vec!(Spanned{ + stmts: vec!(ast::Stmt { node: ast::StmtKind::Semi(P(ast::Expr{ id: ast::DUMMY_NODE_ID, node: ast::ExprKind::Path(None, @@ -950,10 +954,9 @@ mod tests { ), }), span: sp(17,18), - attrs: None,}), - ast::DUMMY_NODE_ID), + attrs: ThinVec::new()})), + id: ast::DUMMY_NODE_ID, span: sp(17,19)}), - expr: None, id: ast::DUMMY_NODE_ID, rules: ast::BlockCheckMode::Default, // no idea span: sp(15,21), @@ -992,8 +995,8 @@ mod tests { struct PatIdentVisitor { spans: Vec } - impl<'v> ::visit::Visitor<'v> for PatIdentVisitor { - fn visit_pat(&mut self, p: &'v ast::Pat) { + impl ::visit::Visitor for PatIdentVisitor { + fn visit_pat(&mut self, p: &ast::Pat) { match p.node { PatKind::Ident(_ , ref spannedident, _) => { self.spans.push(spannedident.span.clone()); diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs index 75f1ac49c9acc..a1d7ddcdf4bdf 100644 --- a/src/libsyntax/parse/obsolete.rs +++ b/src/libsyntax/parse/obsolete.rs @@ -13,7 +13,7 @@ //! //! Obsolete syntax that becomes too hard to parse can be removed. -use codemap::Span; +use syntax_pos::Span; use parse::parser; /// The specific types of unsupported syntax diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 341b076e7cf30..813d90103b887 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -16,8 +16,8 @@ use ast::{Mod, Arg, Arm, Attribute, BindingMode, TraitItemKind}; use ast::Block; use ast::{BlockCheckMode, CaptureBy}; use ast::{Constness, Crate, CrateConfig}; -use ast::{Decl, DeclKind, Defaultness}; -use ast::{EMPTY_CTXT, EnumDef}; +use ast::Defaultness; +use ast::EnumDef; use ast::{Expr, ExprKind, RangeLimits}; use ast::{Field, FnDecl}; use ast::{ForeignItem, ForeignItemKind, FunctionRetTy}; @@ -33,14 +33,14 @@ use ast::{Stmt, StmtKind}; use ast::{VariantData, StructField}; use ast::StrStyle; use ast::SelfKind; -use ast::{Delimited, SequenceRepetition, TokenTree, TraitItem, TraitRef}; +use ast::{TraitItem, TraitRef}; use ast::{Ty, TyKind, TypeBinding, TyParam, TyParamBounds}; use ast::{ViewPath, ViewPathGlob, ViewPathList, ViewPathSimple}; use ast::{Visibility, WhereClause}; -use attr::{ThinAttributes, ThinAttributesExt, AttributesExt}; use ast::{BinOpKind, UnOp}; use ast; -use codemap::{self, Span, BytePos, Spanned, spanned, mk_sp, CodeMap}; +use codemap::{self, CodeMap, Spanned, spanned}; +use syntax_pos::{self, Span, BytePos, mk_sp}; use errors::{self, DiagnosticBuilder}; use ext::tt::macro_parser; use parse; @@ -55,6 +55,8 @@ use util::parser::{AssocOp, Fixity}; use print::pprust; use ptr::P; use parse::PResult; +use tokenstream::{self, Delimited, SequenceRepetition, TokenTree}; +use util::ThinVec; use std::collections::HashSet; use std::mem; @@ -120,7 +122,7 @@ macro_rules! maybe_whole_expr { _ => unreachable!() }; let span = $p.span; - Some($p.mk_expr(span.lo, span.hi, ExprKind::Path(None, pt), None)) + Some($p.mk_expr(span.lo, span.hi, ExprKind::Path(None, pt), ThinVec::new())) } token::Interpolated(token::NtBlock(_)) => { // FIXME: The following avoids an issue with lexical borrowck scopes, @@ -130,7 +132,7 @@ macro_rules! maybe_whole_expr { _ => unreachable!() }; let span = $p.span; - Some($p.mk_expr(span.lo, span.hi, ExprKind::Block(b), None)) + Some($p.mk_expr(span.lo, span.hi, ExprKind::Block(b), ThinVec::new())) } _ => None }; @@ -316,12 +318,12 @@ pub struct ModulePathError { pub enum LhsExpr { NotYetParsed, - AttributesParsed(ThinAttributes), + AttributesParsed(ThinVec), AlreadyParsed(P), } -impl From> for LhsExpr { - fn from(o: Option) -> Self { +impl From>> for LhsExpr { + fn from(o: Option>) -> Self { if let Some(attrs) = o { LhsExpr::AttributesParsed(attrs) } else { @@ -344,7 +346,7 @@ impl<'a> Parser<'a> { { let tok0 = rdr.real_token(); let span = tok0.sp; - let filename = if span != codemap::DUMMY_SP { + let filename = if span != syntax_pos::DUMMY_SP { Some(sess.codemap().span_to_filename(span)) } else { None }; let placeholder = TokenAndSpan { @@ -1232,55 +1234,70 @@ impl<'a> Parser<'a> { } /// Parse the items in a trait declaration - pub fn parse_trait_items(&mut self) -> PResult<'a, Vec> { - self.parse_unspanned_seq( - &token::OpenDelim(token::Brace), - &token::CloseDelim(token::Brace), - SeqSep::none(), - |p| -> PResult<'a, TraitItem> { - maybe_whole!(no_clone_from_p p, NtTraitItem); - let mut attrs = p.parse_outer_attributes()?; - let lo = p.span.lo; - - let (name, node) = if p.eat_keyword(keywords::Type) { - let TyParam {ident, bounds, default, ..} = p.parse_ty_param()?; - p.expect(&token::Semi)?; - (ident, TraitItemKind::Type(bounds, default)) - } else if p.is_const_item() { - p.expect_keyword(keywords::Const)?; - let ident = p.parse_ident()?; - p.expect(&token::Colon)?; - let ty = p.parse_ty_sum()?; - let default = if p.check(&token::Eq) { - p.bump(); - let expr = p.parse_expr()?; - p.commit_expr_expecting(&expr, token::Semi)?; - Some(expr) - } else { - p.expect(&token::Semi)?; - None - }; - (ident, TraitItemKind::Const(ty, default)) + pub fn parse_trait_item(&mut self) -> PResult<'a, TraitItem> { + maybe_whole!(no_clone_from_p self, NtTraitItem); + let mut attrs = self.parse_outer_attributes()?; + let lo = self.span.lo; + + let (name, node) = if self.eat_keyword(keywords::Type) { + let TyParam {ident, bounds, default, ..} = self.parse_ty_param()?; + self.expect(&token::Semi)?; + (ident, TraitItemKind::Type(bounds, default)) + } else if self.is_const_item() { + self.expect_keyword(keywords::Const)?; + let ident = self.parse_ident()?; + self.expect(&token::Colon)?; + let ty = self.parse_ty_sum()?; + let default = if self.check(&token::Eq) { + self.bump(); + let expr = self.parse_expr()?; + self.commit_expr_expecting(&expr, token::Semi)?; + Some(expr) + } else { + self.expect(&token::Semi)?; + None + }; + (ident, TraitItemKind::Const(ty, default)) + } else if !self.token.is_any_keyword() + && self.look_ahead(1, |t| *t == token::Not) + && (self.look_ahead(2, |t| *t == token::OpenDelim(token::Paren)) + || self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))) { + // trait item macro. + // code copied from parse_macro_use_or_failure... abstraction! + let lo = self.span.lo; + let pth = self.parse_ident_into_path()?; + self.expect(&token::Not)?; + + // eat a matched-delimiter token tree: + let delim = self.expect_open_delim()?; + let tts = self.parse_seq_to_end(&token::CloseDelim(delim), + SeqSep::none(), + |pp| pp.parse_token_tree())?; + let m_ = Mac_ { path: pth, tts: tts }; + let m: ast::Mac = codemap::Spanned { node: m_, + span: mk_sp(lo, + self.last_span.hi) }; + if delim != token::Brace { + self.expect(&token::Semi)? + } + (keywords::Invalid.ident(), ast::TraitItemKind::Macro(m)) } else { - let (constness, unsafety, abi) = match p.parse_fn_front_matter() { + let (constness, unsafety, abi) = match self.parse_fn_front_matter() { Ok(cua) => cua, Err(e) => { loop { - match p.token { + match self.token { token::Eof => break, - token::CloseDelim(token::Brace) | token::Semi => { - p.bump(); + self.bump(); break; } - token::OpenDelim(token::Brace) => { - p.parse_token_tree()?; + self.parse_token_tree()?; break; } - - _ => p.bump() + _ => self.bump() } } @@ -1288,17 +1305,17 @@ impl<'a> Parser<'a> { } }; - let ident = p.parse_ident()?; - let mut generics = p.parse_generics()?; + let ident = self.parse_ident()?; + let mut generics = self.parse_generics()?; - let d = p.parse_fn_decl_with_self(|p: &mut Parser<'a>|{ + let d = self.parse_fn_decl_with_self(|p: &mut Parser<'a>|{ // This is somewhat dubious; We don't want to allow // argument names to be left off if there is a // definition... p.parse_arg_general(false) })?; - generics.where_clause = p.parse_where_clause()?; + generics.where_clause = self.parse_where_clause()?; let sig = ast::MethodSig { unsafety: unsafety, constness: constness, @@ -1307,37 +1324,47 @@ impl<'a> Parser<'a> { abi: abi, }; - let body = match p.token { - token::Semi => { - p.bump(); - debug!("parse_trait_methods(): parsing required method"); - None - } - token::OpenDelim(token::Brace) => { - debug!("parse_trait_methods(): parsing provided method"); - let (inner_attrs, body) = - p.parse_inner_attrs_and_block()?; - attrs.extend(inner_attrs.iter().cloned()); - Some(body) - } + let body = match self.token { + token::Semi => { + self.bump(); + debug!("parse_trait_methods(): parsing required method"); + None + } + token::OpenDelim(token::Brace) => { + debug!("parse_trait_methods(): parsing provided method"); + let (inner_attrs, body) = + self.parse_inner_attrs_and_block()?; + attrs.extend(inner_attrs.iter().cloned()); + Some(body) + } - _ => { - let token_str = p.this_token_to_string(); - return Err(p.fatal(&format!("expected `;` or `{{`, found `{}`", - token_str)[..])) - } + _ => { + let token_str = self.this_token_to_string(); + return Err(self.fatal(&format!("expected `;` or `{{`, found `{}`", + token_str)[..])) + } }; (ident, ast::TraitItemKind::Method(sig, body)) }; + Ok(TraitItem { + id: ast::DUMMY_NODE_ID, + ident: name, + attrs: attrs, + node: node, + span: mk_sp(lo, self.last_span.hi), + }) + } - Ok(TraitItem { - id: ast::DUMMY_NODE_ID, - ident: name, - attrs: attrs, - node: node, - span: mk_sp(lo, p.last_span.hi), + + /// Parse the items in a trait declaration + pub fn parse_trait_items(&mut self) -> PResult<'a, Vec> { + self.parse_unspanned_seq( + &token::OpenDelim(token::Brace), + &token::CloseDelim(token::Brace), + SeqSep::none(), + |p| -> PResult<'a, TraitItem> { + p.parse_trait_item() }) - }) } /// Parse a possibly mutable type @@ -1467,7 +1494,7 @@ impl<'a> Parser<'a> { SeqSep::none(), |p| p.parse_token_tree())?; let hi = self.span.hi; - TyKind::Mac(spanned(lo, hi, Mac_ { path: path, tts: tts, ctxt: EMPTY_CTXT })) + TyKind::Mac(spanned(lo, hi, Mac_ { path: path, tts: tts })) } else { // NAMED TYPE TyKind::Path(None, path) @@ -1676,12 +1703,12 @@ impl<'a> Parser<'a> { let lo = self.span.lo; let literal = P(self.parse_lit()?); let hi = self.last_span.hi; - let expr = self.mk_expr(lo, hi, ExprKind::Lit(literal), None); + let expr = self.mk_expr(lo, hi, ExprKind::Lit(literal), ThinVec::new()); if minus_present { let minus_hi = self.last_span.hi; let unary = self.mk_unary(UnOp::Neg, expr); - Ok(self.mk_expr(minus_lo, minus_hi, unary, None)) + Ok(self.mk_expr(minus_lo, minus_hi, unary, ThinVec::new())) } else { Ok(expr) } @@ -2039,13 +2066,13 @@ impl<'a> Parser<'a> { }) } - pub fn mk_expr(&mut self, lo: BytePos, hi: BytePos, - node: ExprKind, attrs: ThinAttributes) -> P { + pub fn mk_expr(&mut self, lo: BytePos, hi: BytePos, node: ExprKind, attrs: ThinVec) + -> P { P(Expr { id: ast::DUMMY_NODE_ID, node: node, span: mk_sp(lo, hi), - attrs: attrs, + attrs: attrs.into(), }) } @@ -2102,7 +2129,7 @@ impl<'a> Parser<'a> { } pub fn mk_mac_expr(&mut self, lo: BytePos, hi: BytePos, - m: Mac_, attrs: ThinAttributes) -> P { + m: Mac_, attrs: ThinVec) -> P { P(Expr { id: ast::DUMMY_NODE_ID, node: ExprKind::Mac(codemap::Spanned {node: m, span: mk_sp(lo, hi)}), @@ -2111,7 +2138,7 @@ impl<'a> Parser<'a> { }) } - pub fn mk_lit_u32(&mut self, i: u32, attrs: ThinAttributes) -> P { + pub fn mk_lit_u32(&mut self, i: u32, attrs: ThinVec) -> P { let span = &self.span; let lv_lit = P(codemap::Spanned { node: LitKind::Int(i as u64, ast::LitIntType::Unsigned(UintTy::U32)), @@ -2152,7 +2179,7 @@ impl<'a> Parser<'a> { // // Therefore, prevent sub-parser from parsing // attributes by giving them a empty "already parsed" list. - let mut attrs = None; + let mut attrs = ThinVec::new(); let lo = self.span.lo; let mut hi = self.span.hi; @@ -2164,9 +2191,7 @@ impl<'a> Parser<'a> { token::OpenDelim(token::Paren) => { self.bump(); - let attrs = self.parse_inner_attributes()? - .into_thin_attrs() - .prepend(attrs); + attrs.extend(self.parse_inner_attributes()?); // (e) is parenthesized e // (e,) is a tuple with only one field, e @@ -2204,9 +2229,7 @@ impl<'a> Parser<'a> { token::OpenDelim(token::Bracket) => { self.bump(); - let inner_attrs = self.parse_inner_attributes()? - .into_thin_attrs(); - attrs.update(|attrs| attrs.append(inner_attrs)); + attrs.extend(self.parse_inner_attributes()?); if self.check(&token::CloseDelim(token::Bracket)) { // Empty vector. @@ -2285,14 +2308,14 @@ impl<'a> Parser<'a> { } if self.eat_keyword(keywords::Continue) { let ex = if self.token.is_lifetime() { - let ex = ExprKind::Again(Some(Spanned{ + let ex = ExprKind::Continue(Some(Spanned{ node: self.get_lifetime(), span: self.span })); self.bump(); ex } else { - ExprKind::Again(None) + ExprKind::Continue(None) }; let hi = self.last_span.hi; return Ok(self.mk_expr(lo, hi, ex, attrs)); @@ -2348,7 +2371,7 @@ impl<'a> Parser<'a> { return Ok(self.mk_mac_expr(lo, hi, - Mac_ { path: pth, tts: tts, ctxt: EMPTY_CTXT }, + Mac_ { path: pth, tts: tts }, attrs)); } if self.check(&token::OpenDelim(token::Brace)) { @@ -2363,9 +2386,7 @@ impl<'a> Parser<'a> { let mut fields = Vec::new(); let mut base = None; - let attrs = attrs.append( - self.parse_inner_attributes()? - .into_thin_attrs()); + attrs.extend(self.parse_inner_attributes()?); while self.token != token::CloseDelim(token::Brace) { if self.eat(&token::DotDot) { @@ -2432,25 +2453,24 @@ impl<'a> Parser<'a> { } fn parse_or_use_outer_attributes(&mut self, - already_parsed_attrs: Option) - -> PResult<'a, ThinAttributes> { + already_parsed_attrs: Option>) + -> PResult<'a, ThinVec> { if let Some(attrs) = already_parsed_attrs { Ok(attrs) } else { - self.parse_outer_attributes().map(|a| a.into_thin_attrs()) + self.parse_outer_attributes().map(|a| a.into()) } } /// Parse a block or unsafe block pub fn parse_block_expr(&mut self, lo: BytePos, blk_mode: BlockCheckMode, - attrs: ThinAttributes) + outer_attrs: ThinVec) -> PResult<'a, P> { - let outer_attrs = attrs; self.expect(&token::OpenDelim(token::Brace))?; - let inner_attrs = self.parse_inner_attributes()?.into_thin_attrs(); - let attrs = outer_attrs.append(inner_attrs); + let mut attrs = outer_attrs; + attrs.extend(self.parse_inner_attributes()?); let blk = self.parse_block_tail(lo, blk_mode)?; return Ok(self.mk_expr(blk.span.lo, blk.span.hi, ExprKind::Block(blk), attrs)); @@ -2458,7 +2478,7 @@ impl<'a> Parser<'a> { /// parse a.b or a(13) or a[4] or just a pub fn parse_dot_or_call_expr(&mut self, - already_parsed_attrs: Option) + already_parsed_attrs: Option>) -> PResult<'a, P> { let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?; @@ -2470,7 +2490,7 @@ impl<'a> Parser<'a> { pub fn parse_dot_or_call_expr_with(&mut self, e0: P, lo: BytePos, - attrs: ThinAttributes) + mut attrs: ThinVec) -> PResult<'a, P> { // Stitch the list of outer attributes onto the return value. // A little bit ugly, but the best way given the current code @@ -2478,12 +2498,13 @@ impl<'a> Parser<'a> { self.parse_dot_or_call_expr_with_(e0, lo) .map(|expr| expr.map(|mut expr| { - expr.attrs.update(|a| a.prepend(attrs)); + attrs.extend::>(expr.attrs.into()); + expr.attrs = attrs; match expr.node { ExprKind::If(..) | ExprKind::IfLet(..) => { - if !expr.attrs.as_attr_slice().is_empty() { + if !expr.attrs.is_empty() { // Just point to the first attribute in there... - let span = expr.attrs.as_attr_slice()[0].span; + let span = expr.attrs[0].span; self.span_err(span, "attributes are not yet allowed on `if` \ @@ -2531,7 +2552,7 @@ impl<'a> Parser<'a> { es.insert(0, self_value); let id = spanned(ident_span.lo, ident_span.hi, ident); let nd = self.mk_method_call(id, tys, es); - self.mk_expr(lo, hi, nd, None) + self.mk_expr(lo, hi, nd, ThinVec::new()) } // Field access. _ => { @@ -2544,7 +2565,7 @@ impl<'a> Parser<'a> { let id = spanned(ident_span.lo, ident_span.hi, ident); let field = self.mk_field(self_value, id); - self.mk_expr(lo, ident_span.hi, field, None) + self.mk_expr(lo, ident_span.hi, field, ThinVec::new()) } }) } @@ -2556,7 +2577,7 @@ impl<'a> Parser<'a> { // expr? while self.eat(&token::Question) { let hi = self.last_span.hi; - e = self.mk_expr(lo, hi, ExprKind::Try(e), None); + e = self.mk_expr(lo, hi, ExprKind::Try(e), ThinVec::new()); } // expr.f @@ -2584,7 +2605,7 @@ impl<'a> Parser<'a> { Some(n) => { let id = spanned(dot, hi, n); let field = self.mk_tup_field(e, id); - e = self.mk_expr(lo, hi, field, None); + e = self.mk_expr(lo, hi, field, ThinVec::new()); } None => { let last_span = self.last_span; @@ -2636,7 +2657,7 @@ impl<'a> Parser<'a> { hi = self.last_span.hi; let nd = self.mk_call(e, es); - e = self.mk_expr(lo, hi, nd, None); + e = self.mk_expr(lo, hi, nd, ThinVec::new()); } // expr[...] @@ -2647,7 +2668,7 @@ impl<'a> Parser<'a> { hi = self.span.hi; self.commit_expr_expecting(&ix, token::CloseDelim(token::Bracket))?; let index = self.mk_index(e, ix); - e = self.mk_expr(lo, hi, index, None) + e = self.mk_expr(lo, hi, index, ThinVec::new()) } _ => return Ok(e) } @@ -2671,13 +2692,12 @@ impl<'a> Parser<'a> { )?; let (sep, repeat) = self.parse_sep_and_kleene_op()?; let name_num = macro_parser::count_names(&seq); - return Ok(TokenTree::Sequence(mk_sp(sp.lo, seq_span.hi), - Rc::new(SequenceRepetition { - tts: seq, - separator: sep, - op: repeat, - num_captures: name_num - }))); + return Ok(TokenTree::Sequence(mk_sp(sp.lo, seq_span.hi), SequenceRepetition { + tts: seq, + separator: sep, + op: repeat, + num_captures: name_num + })); } else if self.token.is_keyword(keywords::Crate) { self.bump(); return Ok(TokenTree::Token(sp, SpecialVarNt(SpecialMacroVar::CrateMacroVar))); @@ -2720,16 +2740,17 @@ impl<'a> Parser<'a> { /// Parse an optional separator followed by a Kleene-style /// repetition token (+ or *). pub fn parse_sep_and_kleene_op(&mut self) - -> PResult<'a, (Option, ast::KleeneOp)> { - fn parse_kleene_op<'a>(parser: &mut Parser<'a>) -> PResult<'a, Option> { + -> PResult<'a, (Option, tokenstream::KleeneOp)> { + fn parse_kleene_op<'a>(parser: &mut Parser<'a>) -> + PResult<'a, Option> { match parser.token { token::BinOp(token::Star) => { parser.bump(); - Ok(Some(ast::KleeneOp::ZeroOrMore)) + Ok(Some(tokenstream::KleeneOp::ZeroOrMore)) }, token::BinOp(token::Plus) => { parser.bump(); - Ok(Some(ast::KleeneOp::OneOrMore)) + Ok(Some(tokenstream::KleeneOp::OneOrMore)) }, _ => Ok(None) } @@ -2832,12 +2853,12 @@ impl<'a> Parser<'a> { _ => {} } - Ok(TokenTree::Delimited(span, Rc::new(Delimited { + Ok(TokenTree::Delimited(span, Delimited { delim: delim, open_span: open_span, tts: tts, close_span: close_span, - }))) + })) }, _ => { // invariants: the current token is not a left-delimiter, @@ -2878,7 +2899,7 @@ impl<'a> Parser<'a> { /// Parse a prefix-unary-operator expr pub fn parse_prefix_expr(&mut self, - already_parsed_attrs: Option) + already_parsed_attrs: Option>) -> PResult<'a, P> { let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?; let lo = self.span.lo; @@ -2923,8 +2944,7 @@ impl<'a> Parser<'a> { let blk = self.parse_block()?; let span = blk.span; hi = span.hi; - let blk_expr = self.mk_expr(span.lo, span.hi, ExprKind::Block(blk), - None); + let blk_expr = self.mk_expr(span.lo, hi, ExprKind::Block(blk), ThinVec::new()); ExprKind::InPlace(place, blk_expr) } token::Ident(..) if self.token.is_keyword(keywords::Box) => { @@ -2944,7 +2964,7 @@ impl<'a> Parser<'a> { /// This parses an expression accounting for associativity and precedence of the operators in /// the expression. pub fn parse_assoc_expr(&mut self, - already_parsed_attrs: Option) + already_parsed_attrs: Option>) -> PResult<'a, P> { self.parse_assoc_expr_with(0, already_parsed_attrs.into()) } @@ -2997,13 +3017,13 @@ impl<'a> Parser<'a> { // Special cases: if op == AssocOp::As { let rhs = self.parse_ty()?; - lhs = self.mk_expr(lhs_span.lo, rhs.span.hi, - ExprKind::Cast(lhs, rhs), None); + let (lo, hi) = (lhs_span.lo, rhs.span.hi); + lhs = self.mk_expr(lo, hi, ExprKind::Cast(lhs, rhs), ThinVec::new()); continue } else if op == AssocOp::Colon { let rhs = self.parse_ty()?; - lhs = self.mk_expr(lhs_span.lo, rhs.span.hi, - ExprKind::Type(lhs, rhs), None); + let (lo, hi) = (lhs_span.lo, rhs.span.hi); + lhs = self.mk_expr(lo, hi, ExprKind::Type(lhs, rhs), ThinVec::new()); continue } else if op == AssocOp::DotDot || op == AssocOp::DotDotDot { // If we didn’t have to handle `x..`/`x...`, it would be pretty easy to @@ -3029,7 +3049,7 @@ impl<'a> Parser<'a> { }; let r = try!(self.mk_range(Some(lhs), rhs, limits)); - lhs = self.mk_expr(lhs_span.lo, rhs_span.hi, r, None); + lhs = self.mk_expr(lhs_span.lo, rhs_span.hi, r, ThinVec::new()); break } @@ -3056,6 +3076,7 @@ impl<'a> Parser<'a> { }), }?; + let (lo, hi) = (lhs_span.lo, rhs.span.hi); lhs = match op { AssocOp::Add | AssocOp::Subtract | AssocOp::Multiply | AssocOp::Divide | AssocOp::Modulus | AssocOp::LAnd | AssocOp::LOr | AssocOp::BitXor | @@ -3063,14 +3084,13 @@ impl<'a> Parser<'a> { AssocOp::Equal | AssocOp::Less | AssocOp::LessEqual | AssocOp::NotEqual | AssocOp::Greater | AssocOp::GreaterEqual => { let ast_op = op.to_ast_binop().unwrap(); - let (lhs_span, rhs_span) = (lhs_span, rhs.span); let binary = self.mk_binary(codemap::respan(cur_op_span, ast_op), lhs, rhs); - self.mk_expr(lhs_span.lo, rhs_span.hi, binary, None) + self.mk_expr(lo, hi, binary, ThinVec::new()) } AssocOp::Assign => - self.mk_expr(lhs_span.lo, rhs.span.hi, ExprKind::Assign(lhs, rhs), None), + self.mk_expr(lo, hi, ExprKind::Assign(lhs, rhs), ThinVec::new()), AssocOp::Inplace => - self.mk_expr(lhs_span.lo, rhs.span.hi, ExprKind::InPlace(lhs, rhs), None), + self.mk_expr(lo, hi, ExprKind::InPlace(lhs, rhs), ThinVec::new()), AssocOp::AssignOp(k) => { let aop = match k { token::Plus => BinOpKind::Add, @@ -3084,9 +3104,8 @@ impl<'a> Parser<'a> { token::Shl => BinOpKind::Shl, token::Shr => BinOpKind::Shr, }; - let (lhs_span, rhs_span) = (lhs_span, rhs.span); let aopexpr = self.mk_assign_op(codemap::respan(cur_op_span, aop), lhs, rhs); - self.mk_expr(lhs_span.lo, rhs_span.hi, aopexpr, None) + self.mk_expr(lo, hi, aopexpr, ThinVec::new()) } AssocOp::As | AssocOp::Colon | AssocOp::DotDot | AssocOp::DotDotDot => { self.bug("As, Colon, DotDot or DotDotDot branch reached") @@ -3121,7 +3140,7 @@ impl<'a> Parser<'a> { /// Parse prefix-forms of range notation: `..expr`, `..`, `...expr` fn parse_prefix_range_expr(&mut self, - already_parsed_attrs: Option) + already_parsed_attrs: Option>) -> PResult<'a, P> { debug_assert!(self.token == token::DotDot || self.token == token::DotDotDot); let tok = self.token.clone(); @@ -3166,7 +3185,7 @@ impl<'a> Parser<'a> { } /// Parse an 'if' or 'if let' expression ('if' token already eaten) - pub fn parse_if_expr(&mut self, attrs: ThinAttributes) -> PResult<'a, P> { + pub fn parse_if_expr(&mut self, attrs: ThinVec) -> PResult<'a, P> { if self.check_keyword(keywords::Let) { return self.parse_if_let_expr(attrs); } @@ -3184,7 +3203,7 @@ impl<'a> Parser<'a> { } /// Parse an 'if let' expression ('if' token already eaten) - pub fn parse_if_let_expr(&mut self, attrs: ThinAttributes) + pub fn parse_if_let_expr(&mut self, attrs: ThinVec) -> PResult<'a, P> { let lo = self.last_span.lo; self.expect_keyword(keywords::Let)?; @@ -3205,7 +3224,7 @@ impl<'a> Parser<'a> { pub fn parse_lambda_expr(&mut self, lo: BytePos, capture_clause: CaptureBy, - attrs: ThinAttributes) + attrs: ThinVec) -> PResult<'a, P> { let decl = self.parse_fn_block_decl()?; @@ -3217,9 +3236,12 @@ impl<'a> Parser<'a> { let body_expr = self.parse_expr()?; P(ast::Block { id: ast::DUMMY_NODE_ID, - stmts: vec![], span: body_expr.span, - expr: Some(body_expr), + stmts: vec![Stmt { + span: body_expr.span, + node: StmtKind::Expr(body_expr), + id: ast::DUMMY_NODE_ID, + }], rules: BlockCheckMode::Default, }) } @@ -3240,24 +3262,24 @@ impl<'a> Parser<'a> { // `else` token already eaten pub fn parse_else_expr(&mut self) -> PResult<'a, P> { if self.eat_keyword(keywords::If) { - return self.parse_if_expr(None); + return self.parse_if_expr(ThinVec::new()); } else { let blk = self.parse_block()?; - return Ok(self.mk_expr(blk.span.lo, blk.span.hi, ExprKind::Block(blk), None)); + return Ok(self.mk_expr(blk.span.lo, blk.span.hi, ExprKind::Block(blk), ThinVec::new())); } } /// Parse a 'for' .. 'in' expression ('for' token already eaten) pub fn parse_for_expr(&mut self, opt_ident: Option, span_lo: BytePos, - attrs: ThinAttributes) -> PResult<'a, P> { + mut attrs: ThinVec) -> PResult<'a, P> { // Parse: `for in ` let pat = self.parse_pat()?; self.expect_keyword(keywords::In)?; let expr = self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL, None)?; let (iattrs, loop_block) = self.parse_inner_attrs_and_block()?; - let attrs = attrs.append(iattrs.into_thin_attrs()); + attrs.extend(iattrs); let hi = self.last_span.hi; @@ -3269,13 +3291,13 @@ impl<'a> Parser<'a> { /// Parse a 'while' or 'while let' expression ('while' token already eaten) pub fn parse_while_expr(&mut self, opt_ident: Option, span_lo: BytePos, - attrs: ThinAttributes) -> PResult<'a, P> { + mut attrs: ThinVec) -> PResult<'a, P> { if self.token.is_keyword(keywords::Let) { return self.parse_while_let_expr(opt_ident, span_lo, attrs); } let cond = self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL, None)?; let (iattrs, body) = self.parse_inner_attrs_and_block()?; - let attrs = attrs.append(iattrs.into_thin_attrs()); + attrs.extend(iattrs); let hi = body.span.hi; return Ok(self.mk_expr(span_lo, hi, ExprKind::While(cond, body, opt_ident), attrs)); @@ -3284,13 +3306,13 @@ impl<'a> Parser<'a> { /// Parse a 'while let' expression ('while' token already eaten) pub fn parse_while_let_expr(&mut self, opt_ident: Option, span_lo: BytePos, - attrs: ThinAttributes) -> PResult<'a, P> { + mut attrs: ThinVec) -> PResult<'a, P> { self.expect_keyword(keywords::Let)?; let pat = self.parse_pat()?; self.expect(&token::Eq)?; let expr = self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL, None)?; let (iattrs, body) = self.parse_inner_attrs_and_block()?; - let attrs = attrs.append(iattrs.into_thin_attrs()); + attrs.extend(iattrs); let hi = body.span.hi; return Ok(self.mk_expr(span_lo, hi, ExprKind::WhileLet(pat, expr, body, opt_ident), attrs)); } @@ -3298,15 +3320,15 @@ impl<'a> Parser<'a> { // parse `loop {...}`, `loop` token already eaten pub fn parse_loop_expr(&mut self, opt_ident: Option, span_lo: BytePos, - attrs: ThinAttributes) -> PResult<'a, P> { + mut attrs: ThinVec) -> PResult<'a, P> { let (iattrs, body) = self.parse_inner_attrs_and_block()?; - let attrs = attrs.append(iattrs.into_thin_attrs()); + attrs.extend(iattrs); let hi = body.span.hi; Ok(self.mk_expr(span_lo, hi, ExprKind::Loop(body, opt_ident), attrs)) } // `match` token already eaten - fn parse_match_expr(&mut self, attrs: ThinAttributes) -> PResult<'a, P> { + fn parse_match_expr(&mut self, mut attrs: ThinVec) -> PResult<'a, P> { let match_span = self.last_span; let lo = self.last_span.lo; let discriminant = self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL, @@ -3318,8 +3340,8 @@ impl<'a> Parser<'a> { } return Err(e) } - let attrs = attrs.append( - self.parse_inner_attributes()?.into_thin_attrs()); + attrs.extend(self.parse_inner_attributes()?); + let mut arms: Vec = Vec::new(); while self.token != token::CloseDelim(token::Brace) { match self.parse_arm() { @@ -3392,7 +3414,7 @@ impl<'a> Parser<'a> { /// Parse an expression, subject to the given restrictions pub fn parse_expr_res(&mut self, r: Restrictions, - already_parsed_attrs: Option) + already_parsed_attrs: Option>) -> PResult<'a, P> { self.with_res(r, |this| this.parse_assoc_expr(already_parsed_attrs)) } @@ -3500,7 +3522,7 @@ impl<'a> Parser<'a> { } /// Parse the fields of a struct-like pattern - fn parse_pat_fields(&mut self) -> PResult<'a, (Vec> , bool)> { + fn parse_pat_fields(&mut self) -> PResult<'a, (Vec>, bool)> { let mut fields = Vec::new(); let mut etc = false; let mut first = true; @@ -3570,9 +3592,9 @@ impl<'a> Parser<'a> { }; fields.push(codemap::Spanned { span: mk_sp(lo, hi), - node: ast::FieldPat { ident: fieldname, - pat: subpat, - is_shorthand: is_shorthand }}); + node: ast::FieldPat { ident: fieldname, + pat: subpat, + is_shorthand: is_shorthand }}); } return Ok((fields, etc)); } @@ -3590,7 +3612,7 @@ impl<'a> Parser<'a> { (None, self.parse_path(PathStyle::Expr)?) }; let hi = self.last_span.hi; - Ok(self.mk_expr(lo, hi, ExprKind::Path(qself, path), None)) + Ok(self.mk_expr(lo, hi, ExprKind::Path(qself, path), ThinVec::new())) } else { self.parse_pat_literal_maybe_minus() } @@ -3661,9 +3683,9 @@ impl<'a> Parser<'a> { let tts = self.parse_seq_to_end( &token::CloseDelim(delim), SeqSep::none(), |p| p.parse_token_tree())?; - let mac = Mac_ { path: path, tts: tts, ctxt: EMPTY_CTXT }; + let mac = Mac_ { path: path, tts: tts }; pat = PatKind::Mac(codemap::Spanned {node: mac, - span: mk_sp(lo, self.last_span.hi)}); + span: mk_sp(lo, self.last_span.hi)}); } else { // Parse ident @ pat // This can give false positives and parse nullary enums, @@ -3685,7 +3707,8 @@ impl<'a> Parser<'a> { token::DotDotDot => { // Parse range let hi = self.last_span.hi; - let begin = self.mk_expr(lo, hi, ExprKind::Path(qself, path), None); + let begin = + self.mk_expr(lo, hi, ExprKind::Path(qself, path), ThinVec::new()); self.bump(); let end = self.parse_pat_range_end()?; pat = PatKind::Range(begin, end); @@ -3715,12 +3738,7 @@ impl<'a> Parser<'a> { pat = PatKind::TupleStruct(path, fields, ddpos) } _ => { - pat = match qself { - // Parse qualified path - Some(qself) => PatKind::QPath(qself, path), - // Parse nullary enum - None => PatKind::Path(path) - }; + pat = PatKind::Path(qself, path); } } } @@ -3785,7 +3803,7 @@ impl<'a> Parser<'a> { } /// Parse a local variable declaration - fn parse_local(&mut self, attrs: ThinAttributes) -> PResult<'a, P> { + fn parse_local(&mut self, attrs: ThinVec) -> PResult<'a, P> { let lo = self.span.lo; let pat = self.parse_pat()?; @@ -3804,13 +3822,6 @@ impl<'a> Parser<'a> { })) } - /// Parse a "let" stmt - fn parse_let(&mut self, attrs: ThinAttributes) -> PResult<'a, P> { - let lo = self.span.lo; - let local = self.parse_local(attrs)?; - Ok(P(spanned(lo, self.last_span.hi, DeclKind::Local(local)))) - } - /// Parse a structure field fn parse_name_and_ty(&mut self, pr: Visibility, attrs: Vec ) -> PResult<'a, StructField> { @@ -3923,12 +3934,12 @@ impl<'a> Parser<'a> { let attrs = self.parse_outer_attributes()?; let lo = self.span.lo; - Ok(Some(if self.check_keyword(keywords::Let) { - self.expect_keyword(keywords::Let)?; - let decl = self.parse_let(attrs.into_thin_attrs())?; - let hi = decl.span.hi; - let stmt = StmtKind::Decl(decl, ast::DUMMY_NODE_ID); - spanned(lo, hi, stmt) + Ok(Some(if self.eat_keyword(keywords::Let) { + Stmt { + id: ast::DUMMY_NODE_ID, + node: StmtKind::Local(self.parse_local(attrs.into())?), + span: mk_sp(lo, self.last_span.hi), + } } else if self.token.is_ident() && !self.token.is_any_keyword() && self.look_ahead(1, |t| *t == token::Not) { @@ -3979,9 +3990,12 @@ impl<'a> Parser<'a> { }; if id.name == keywords::Invalid.name() { - let mac = P(spanned(lo, hi, Mac_ { path: pth, tts: tts, ctxt: EMPTY_CTXT })); - let stmt = StmtKind::Mac(mac, style, attrs.into_thin_attrs()); - spanned(lo, hi, stmt) + let mac = spanned(lo, hi, Mac_ { path: pth, tts: tts }); + Stmt { + id: ast::DUMMY_NODE_ID, + node: StmtKind::Mac(P((mac, style, attrs.into()))), + span: mk_sp(lo, hi), + } } else { // if it has a special ident, it's definitely an item // @@ -3995,25 +4009,28 @@ impl<'a> Parser<'a> { followed by a semicolon"); } } - spanned(lo, hi, StmtKind::Decl( - P(spanned(lo, hi, DeclKind::Item( + Stmt { + id: ast::DUMMY_NODE_ID, + span: mk_sp(lo, hi), + node: StmtKind::Item({ self.mk_item( lo, hi, id /*id is good here*/, - ItemKind::Mac(spanned(lo, hi, - Mac_ { path: pth, tts: tts, ctxt: EMPTY_CTXT })), - Visibility::Inherited, attrs)))), - ast::DUMMY_NODE_ID)) + ItemKind::Mac(spanned(lo, hi, Mac_ { path: pth, tts: tts })), + Visibility::Inherited, + attrs) + }), + } } } else { // FIXME: Bad copy of attrs let restrictions = self.restrictions | Restrictions::NO_NONINLINE_MOD; match self.with_res(restrictions, |this| this.parse_item_(attrs.clone(), false, true))? { - Some(i) => { - let hi = i.span.hi; - let decl = P(spanned(lo, hi, DeclKind::Item(i))); - spanned(lo, hi, StmtKind::Decl(decl, ast::DUMMY_NODE_ID)) - } + Some(i) => Stmt { + id: ast::DUMMY_NODE_ID, + span: mk_sp(lo, i.span.hi), + node: StmtKind::Item(i), + }, None => { let unused_attrs = |attrs: &[_], s: &mut Self| { if attrs.len() > 0 { @@ -4036,10 +4053,12 @@ impl<'a> Parser<'a> { // Remainder are line-expr stmts. let e = self.parse_expr_res( - Restrictions::RESTRICTION_STMT_EXPR, Some(attrs.into_thin_attrs()))?; - let hi = e.span.hi; - let stmt = StmtKind::Expr(e, ast::DUMMY_NODE_ID); - spanned(lo, hi, stmt) + Restrictions::RESTRICTION_STMT_EXPR, Some(attrs.into()))?; + Stmt { + id: ast::DUMMY_NODE_ID, + span: mk_sp(lo, e.span.hi), + node: StmtKind::Expr(e), + } } } })) @@ -4082,10 +4101,9 @@ impl<'a> Parser<'a> { /// Precondition: already parsed the '{'. fn parse_block_tail(&mut self, lo: BytePos, s: BlockCheckMode) -> PResult<'a, P> { let mut stmts = vec![]; - let mut expr = None; while !self.eat(&token::CloseDelim(token::Brace)) { - let Spanned {node, span} = if let Some(s) = self.parse_stmt_() { + let Stmt {node, span, ..} = if let Some(s) = self.parse_stmt_() { s } else if self.token == token::Eof { break; @@ -4093,60 +4111,13 @@ impl<'a> Parser<'a> { // Found only `;` or `}`. continue; }; + match node { - StmtKind::Expr(e, _) => { - self.handle_expression_like_statement(e, span, &mut stmts, &mut expr)?; + StmtKind::Expr(e) => { + self.handle_expression_like_statement(e, span, &mut stmts)?; } - StmtKind::Mac(mac, MacStmtStyle::NoBraces, attrs) => { - // statement macro without braces; might be an - // expr depending on whether a semicolon follows - match self.token { - token::Semi => { - stmts.push(Spanned { - node: StmtKind::Mac(mac, MacStmtStyle::Semicolon, attrs), - span: mk_sp(span.lo, self.span.hi), - }); - self.bump(); - } - _ => { - let e = self.mk_mac_expr(span.lo, span.hi, - mac.and_then(|m| m.node), - None); - let lo = e.span.lo; - let e = self.parse_dot_or_call_expr_with(e, lo, attrs)?; - let e = self.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(e))?; - self.handle_expression_like_statement( - e, - span, - &mut stmts, - &mut expr)?; - } - } - } - StmtKind::Mac(m, style, attrs) => { - // statement macro; might be an expr - match self.token { - token::Semi => { - stmts.push(Spanned { - node: StmtKind::Mac(m, MacStmtStyle::Semicolon, attrs), - span: mk_sp(span.lo, self.span.hi), - }); - self.bump(); - } - token::CloseDelim(token::Brace) => { - // if a block ends in `m!(arg)` without - // a `;`, it must be an expr - expr = Some(self.mk_mac_expr(span.lo, span.hi, - m.and_then(|x| x.node), - attrs)); - } - _ => { - stmts.push(Spanned { - node: StmtKind::Mac(m, style, attrs), - span: span - }); - } - } + StmtKind::Mac(mac) => { + self.handle_macro_in_block(mac.unwrap(), span, &mut stmts)?; } _ => { // all other kinds of statements: let mut hi = span.hi; @@ -4155,7 +4126,8 @@ impl<'a> Parser<'a> { hi = self.last_span.hi; } - stmts.push(Spanned { + stmts.push(Stmt { + id: ast::DUMMY_NODE_ID, node: node, span: mk_sp(span.lo, hi) }); @@ -4165,18 +4137,64 @@ impl<'a> Parser<'a> { Ok(P(ast::Block { stmts: stmts, - expr: expr, id: ast::DUMMY_NODE_ID, rules: s, span: mk_sp(lo, self.last_span.hi), })) } + fn handle_macro_in_block(&mut self, + (mac, style, attrs): (ast::Mac, MacStmtStyle, ThinVec), + span: Span, + stmts: &mut Vec) + -> PResult<'a, ()> { + if style == MacStmtStyle::NoBraces { + // statement macro without braces; might be an + // expr depending on whether a semicolon follows + match self.token { + token::Semi => { + stmts.push(Stmt { + id: ast::DUMMY_NODE_ID, + node: StmtKind::Mac(P((mac, MacStmtStyle::Semicolon, attrs))), + span: mk_sp(span.lo, self.span.hi), + }); + self.bump(); + } + _ => { + let e = self.mk_mac_expr(span.lo, span.hi, mac.node, ThinVec::new()); + let lo = e.span.lo; + let e = self.parse_dot_or_call_expr_with(e, lo, attrs)?; + let e = self.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(e))?; + self.handle_expression_like_statement(e, span, stmts)?; + } + } + } else { + // statement macro; might be an expr + match self.token { + token::Semi => { + stmts.push(Stmt { + id: ast::DUMMY_NODE_ID, + node: StmtKind::Mac(P((mac, MacStmtStyle::Semicolon, attrs))), + span: mk_sp(span.lo, self.span.hi), + }); + self.bump(); + } + _ => { + stmts.push(Stmt { + id: ast::DUMMY_NODE_ID, + node: StmtKind::Mac(P((mac, style, attrs))), + span: span + }); + } + } + } + Ok(()) + } + fn handle_expression_like_statement(&mut self, e: P, span: Span, - stmts: &mut Vec, - last_block_expr: &mut Option>) + stmts: &mut Vec) -> PResult<'a, ()> { // expression without semicolon if classify::expr_requires_semi_to_be_stmt(&e) { @@ -4197,15 +4215,16 @@ impl<'a> Parser<'a> { hi: self.last_span.hi, expn_id: span.expn_id, }; - stmts.push(Spanned { - node: StmtKind::Semi(e, ast::DUMMY_NODE_ID), + stmts.push(Stmt { + id: ast::DUMMY_NODE_ID, + node: StmtKind::Semi(e), span: span_with_semi, }); } - token::CloseDelim(token::Brace) => *last_block_expr = Some(e), _ => { - stmts.push(Spanned { - node: StmtKind::Expr(e, ast::DUMMY_NODE_ID), + stmts.push(Stmt { + id: ast::DUMMY_NODE_ID, + node: StmtKind::Expr(e), span: span }); } @@ -4913,10 +4932,10 @@ impl<'a> Parser<'a> { let tts = self.parse_seq_to_end(&token::CloseDelim(delim), SeqSep::none(), |p| p.parse_token_tree())?; - let m_ = Mac_ { path: pth, tts: tts, ctxt: EMPTY_CTXT }; + let m_ = Mac_ { path: pth, tts: tts }; let m: ast::Mac = codemap::Spanned { node: m_, - span: mk_sp(lo, - self.last_span.hi) }; + span: mk_sp(lo, + self.last_span.hi) }; if delim != token::Brace { self.expect(&token::Semi)? } @@ -4940,7 +4959,6 @@ impl<'a> Parser<'a> { /// Parse trait Foo { ... } fn parse_item_trait(&mut self, unsafety: Unsafety) -> PResult<'a, ItemInfo> { - let ident = self.parse_ident()?; let mut tps = self.parse_generics()?; @@ -5255,7 +5273,7 @@ impl<'a> Parser<'a> { return Err(self.fatal(&format!("expected item, found `{}`", token_str))); } - let hi = if self.span == codemap::DUMMY_SP { + let hi = if self.span == syntax_pos::DUMMY_SP { inner_lo } else { self.last_span.hi @@ -5999,10 +6017,10 @@ impl<'a> Parser<'a> { SeqSep::none(), |p| p.parse_token_tree())?; // single-variant-enum... : - let m = Mac_ { path: pth, tts: tts, ctxt: EMPTY_CTXT }; + let m = Mac_ { path: pth, tts: tts }; let m: ast::Mac = codemap::Spanned { node: m, - span: mk_sp(mac_lo, - self.last_span.hi) }; + span: mk_sp(mac_lo, + self.last_span.hi) }; if delim != token::Brace { if !self.eat(&token::Semi) { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 47de32ed7d00f..8376d28164dee 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -19,6 +19,7 @@ use ext::mtwt; use ptr::P; use util::interner::{RcStr, StrInterner}; use util::interner; +use tokenstream; use serialize::{Decodable, Decoder, Encodable, Encoder}; use std::fmt; @@ -338,7 +339,7 @@ pub enum Nonterminal { /// Stuff inside brackets for attributes NtMeta(P), NtPath(Box), - NtTT(P), // needs P'ed to break a circularity + NtTT(P), // needs P'ed to break a circularity // These are not exposed to macros, but are used by quasiquote. NtArm(ast::Arm), NtImplItem(P), diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index a2ee5bf609053..b56cec72a956e 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -11,14 +11,14 @@ pub use self::AnnNode::*; use abi::{self, Abi}; -use ast::{self, TokenTree, BlockCheckMode, PatKind}; +use ast::{self, BlockCheckMode, PatKind}; use ast::{SelfKind, RegionTyParamBound, TraitTyParamBound, TraitBoundModifier}; use ast::Attribute; -use attr::ThinAttributesExt; use util::parser::AssocOp; use attr; use attr::{AttrMetaMethods, AttributeMethods}; -use codemap::{self, CodeMap, BytePos}; +use codemap::{self, CodeMap}; +use syntax_pos::{self, BytePos}; use errors; use parse::token::{self, keywords, BinOpToken, Token, InternedString}; use parse::lexer::comments; @@ -28,6 +28,7 @@ use print::pp::{Breaks, eof}; use print::pp::Breaks::{Consistent, Inconsistent}; use ptr::P; use std_inject; +use tokenstream::{self, TokenTree}; use std::ascii; use std::io::{self, Write, Read}; @@ -330,11 +331,11 @@ pub fn lifetime_to_string(e: &ast::Lifetime) -> String { to_string(|s| s.print_lifetime(e)) } -pub fn tt_to_string(tt: &ast::TokenTree) -> String { +pub fn tt_to_string(tt: &tokenstream::TokenTree) -> String { to_string(|s| s.print_tt(tt)) } -pub fn tts_to_string(tts: &[ast::TokenTree]) -> String { +pub fn tts_to_string(tts: &[tokenstream::TokenTree]) -> String { to_string(|s| s.print_tts(tts)) } @@ -842,11 +843,11 @@ impl<'a> State<'a> { self.end() // close the head-box } - pub fn bclose_(&mut self, span: codemap::Span, + pub fn bclose_(&mut self, span: syntax_pos::Span, indented: usize) -> io::Result<()> { self.bclose_maybe_open(span, indented, true) } - pub fn bclose_maybe_open(&mut self, span: codemap::Span, + pub fn bclose_maybe_open(&mut self, span: syntax_pos::Span, indented: usize, close_box: bool) -> io::Result<()> { try!(self.maybe_print_comment(span.hi)); try!(self.break_offset_if_not_bol(1, -(indented as isize))); @@ -856,7 +857,7 @@ impl<'a> State<'a> { } Ok(()) } - pub fn bclose(&mut self, span: codemap::Span) -> io::Result<()> { + pub fn bclose(&mut self, span: syntax_pos::Span) -> io::Result<()> { self.bclose_(span, INDENT_UNIT) } @@ -900,7 +901,7 @@ impl<'a> State<'a> { mut op: F, mut get_span: G) -> io::Result<()> where F: FnMut(&mut State, &T) -> io::Result<()>, - G: FnMut(&T) -> codemap::Span, + G: FnMut(&T) -> syntax_pos::Span, { try!(self.rbox(0, b)); let len = elts.len(); @@ -1352,7 +1353,7 @@ impl<'a> State<'a> { pub fn print_enum_def(&mut self, enum_definition: &ast::EnumDef, generics: &ast::Generics, ident: ast::Ident, - span: codemap::Span, + span: syntax_pos::Span, visibility: &ast::Visibility) -> io::Result<()> { try!(self.head(&visibility_qualified(visibility, "enum"))); try!(self.print_ident(ident)); @@ -1364,7 +1365,7 @@ impl<'a> State<'a> { pub fn print_variants(&mut self, variants: &[ast::Variant], - span: codemap::Span) -> io::Result<()> { + span: syntax_pos::Span) -> io::Result<()> { try!(self.bopen()); for v in variants { try!(self.space_if_not_bol()); @@ -1393,7 +1394,7 @@ impl<'a> State<'a> { struct_def: &ast::VariantData, generics: &ast::Generics, ident: ast::Ident, - span: codemap::Span, + span: syntax_pos::Span, print_finalizer: bool) -> io::Result<()> { try!(self.print_ident(ident)); try!(self.print_generics(generics)); @@ -1445,7 +1446,7 @@ impl<'a> State<'a> { /// appropriate macro, transcribe back into the grammar we just parsed from, /// and then pretty-print the resulting AST nodes (so, e.g., we print /// expression arguments as expressions). It can be done! I think. - pub fn print_tt(&mut self, tt: &ast::TokenTree) -> io::Result<()> { + pub fn print_tt(&mut self, tt: &tokenstream::TokenTree) -> io::Result<()> { match *tt { TokenTree::Token(_, ref tk) => { try!(word(&mut self.s, &token_to_string(tk))); @@ -1476,14 +1477,14 @@ impl<'a> State<'a> { None => {}, } match seq.op { - ast::KleeneOp::ZeroOrMore => word(&mut self.s, "*"), - ast::KleeneOp::OneOrMore => word(&mut self.s, "+"), + tokenstream::KleeneOp::ZeroOrMore => word(&mut self.s, "*"), + tokenstream::KleeneOp::OneOrMore => word(&mut self.s, "+"), } } } } - pub fn print_tts(&mut self, tts: &[ast::TokenTree]) -> io::Result<()> { + pub fn print_tts(&mut self, tts: &[tokenstream::TokenTree]) -> io::Result<()> { try!(self.ibox(0)); for (i, tt) in tts.iter().enumerate() { if i != 0 { @@ -1550,6 +1551,17 @@ impl<'a> State<'a> { try!(self.print_associated_type(ti.ident, Some(bounds), default.as_ref().map(|ty| &**ty))); } + ast::TraitItemKind::Macro(codemap::Spanned { ref node, .. }) => { + // code copied from ItemKind::Mac: + self.print_path(&node.path, false, 0)?; + word(&mut self.s, "! ")?; + self.cbox(INDENT_UNIT)?; + self.popen()?; + self.print_tts(&node.tts[..])?; + self.pclose()?; + word(&mut self.s, ";")?; + self.end()? + } } self.ann.post(self, NodeSubItem(ti.id)) } @@ -1593,21 +1605,40 @@ impl<'a> State<'a> { pub fn print_stmt(&mut self, st: &ast::Stmt) -> io::Result<()> { try!(self.maybe_print_comment(st.span.lo)); match st.node { - ast::StmtKind::Decl(ref decl, _) => { - try!(self.print_decl(&decl)); + ast::StmtKind::Local(ref loc) => { + try!(self.print_outer_attributes(&loc.attrs)); + try!(self.space_if_not_bol()); + try!(self.ibox(INDENT_UNIT)); + try!(self.word_nbsp("let")); + + try!(self.ibox(INDENT_UNIT)); + try!(self.print_local_decl(&loc)); + try!(self.end()); + if let Some(ref init) = loc.init { + try!(self.nbsp()); + try!(self.word_space("=")); + try!(self.print_expr(&init)); + } + try!(word(&mut self.s, ";")); + self.end()?; } - ast::StmtKind::Expr(ref expr, _) => { + ast::StmtKind::Item(ref item) => self.print_item(&item)?, + ast::StmtKind::Expr(ref expr) => { try!(self.space_if_not_bol()); try!(self.print_expr_outer_attr_style(&expr, false)); + if parse::classify::expr_requires_semi_to_be_stmt(expr) { + try!(word(&mut self.s, ";")); + } } - ast::StmtKind::Semi(ref expr, _) => { + ast::StmtKind::Semi(ref expr) => { try!(self.space_if_not_bol()); try!(self.print_expr_outer_attr_style(&expr, false)); try!(word(&mut self.s, ";")); } - ast::StmtKind::Mac(ref mac, style, ref attrs) => { + ast::StmtKind::Mac(ref mac) => { + let (ref mac, style, ref attrs) = **mac; try!(self.space_if_not_bol()); - try!(self.print_outer_attributes(attrs.as_attr_slice())); + try!(self.print_outer_attributes(&attrs)); let delim = match style { ast::MacStmtStyle::Braces => token::Brace, _ => token::Paren @@ -1619,9 +1650,6 @@ impl<'a> State<'a> { } } } - if parse::classify::stmt_ends_with_semi(&st.node) { - try!(word(&mut self.s, ";")); - } self.maybe_print_trailing_comment(st.span, None) } @@ -1665,17 +1693,17 @@ impl<'a> State<'a> { try!(self.print_inner_attributes(attrs)); - for st in &blk.stmts { - try!(self.print_stmt(st)); - } - match blk.expr { - Some(ref expr) => { - try!(self.space_if_not_bol()); - try!(self.print_expr_outer_attr_style(&expr, false)); - try!(self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi))); + for (i, st) in blk.stmts.iter().enumerate() { + match st.node { + ast::StmtKind::Expr(ref expr) if i == blk.stmts.len() - 1 => { + try!(self.space_if_not_bol()); + try!(self.print_expr_outer_attr_style(&expr, false)); + try!(self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi))); + } + _ => try!(self.print_stmt(st)), } - _ => () } + try!(self.bclose_maybe_open(blk.span, indented, close_box)); self.ann.post(self, NodeBlock(blk)) } @@ -1947,7 +1975,7 @@ impl<'a> State<'a> { is_inline: bool) -> io::Result<()> { try!(self.maybe_print_comment(expr.span.lo)); - let attrs = expr.attrs.as_attr_slice(); + let attrs = &expr.attrs; if is_inline { try!(self.print_outer_attributes_inline(attrs)); } else { @@ -2084,24 +2112,21 @@ impl<'a> State<'a> { _ => false }; - if !default_return || !body.stmts.is_empty() || body.expr.is_none() { - try!(self.print_block_unclosed(&body)); - } else { - // we extract the block, so as not to create another set of boxes - let i_expr = body.expr.as_ref().unwrap(); - match i_expr.node { - ast::ExprKind::Block(ref blk) => { - try!(self.print_block_unclosed_with_attrs( - &blk, - i_expr.attrs.as_attr_slice())); - } - _ => { + match body.stmts.last().map(|stmt| &stmt.node) { + Some(&ast::StmtKind::Expr(ref i_expr)) if default_return && + body.stmts.len() == 1 => { + // we extract the block, so as not to create another set of boxes + if let ast::ExprKind::Block(ref blk) = i_expr.node { + try!(self.print_block_unclosed_with_attrs(&blk, &i_expr.attrs)); + } else { // this is a bare expression try!(self.print_expr(&i_expr)); try!(self.end()); // need to close a box } } + _ => try!(self.print_block_unclosed(&body)), } + // a box will be closed by print_expr, but we didn't want an overall // wrapper so we closed the corresponding opening. so create an // empty box to satisfy the close. @@ -2170,7 +2195,7 @@ impl<'a> State<'a> { try!(space(&mut self.s)); } } - ast::ExprKind::Again(opt_ident) => { + ast::ExprKind::Continue(opt_ident) => { try!(word(&mut self.s, "continue")); try!(space(&mut self.s)); if let Some(ident) = opt_ident { @@ -2278,29 +2303,6 @@ impl<'a> State<'a> { Ok(()) } - pub fn print_decl(&mut self, decl: &ast::Decl) -> io::Result<()> { - try!(self.maybe_print_comment(decl.span.lo)); - match decl.node { - ast::DeclKind::Local(ref loc) => { - try!(self.print_outer_attributes(loc.attrs.as_attr_slice())); - try!(self.space_if_not_bol()); - try!(self.ibox(INDENT_UNIT)); - try!(self.word_nbsp("let")); - - try!(self.ibox(INDENT_UNIT)); - try!(self.print_local_decl(&loc)); - try!(self.end()); - if let Some(ref init) = loc.init { - try!(self.nbsp()); - try!(self.word_space("=")); - try!(self.print_expr(&init)); - } - self.end() - } - ast::DeclKind::Item(ref item) => self.print_item(&item) - } - } - pub fn print_ident(&mut self, ident: ast::Ident) -> io::Result<()> { try!(word(&mut self.s, &ident.name.as_str())); self.ann.post(self, NodeIdent(&ident)) @@ -2483,10 +2485,10 @@ impl<'a> State<'a> { } try!(self.pclose()); } - PatKind::Path(ref path) => { + PatKind::Path(None, ref path) => { try!(self.print_path(path, true, 0)); } - PatKind::QPath(ref qself, ref path) => { + PatKind::Path(Some(ref qself), ref path) => { try!(self.print_qpath(path, qself, false)); } PatKind::Struct(ref path, ref fields, etc) => { @@ -2999,7 +3001,7 @@ impl<'a> State<'a> { self.end() } - pub fn maybe_print_trailing_comment(&mut self, span: codemap::Span, + pub fn maybe_print_trailing_comment(&mut self, span: syntax_pos::Span, next_pos: Option) -> io::Result<()> { let cm = match self.cm { @@ -3104,6 +3106,7 @@ mod tests { use ast; use codemap; use parse::token; + use syntax_pos; #[test] fn test_fun_to_string() { @@ -3111,7 +3114,7 @@ mod tests { let decl = ast::FnDecl { inputs: Vec::new(), - output: ast::FunctionRetTy::Default(codemap::DUMMY_SP), + output: ast::FunctionRetTy::Default(syntax_pos::DUMMY_SP), variadic: false }; let generics = ast::Generics::default(); @@ -3125,7 +3128,7 @@ mod tests { fn test_variant_to_string() { let ident = token::str_to_ident("principal_skinner"); - let var = codemap::respan(codemap::DUMMY_SP, ast::Variant_ { + let var = codemap::respan(syntax_pos::DUMMY_SP, ast::Variant_ { name: ident, attrs: Vec::new(), // making this up as I go.... ? diff --git a/src/libsyntax/show_span.rs b/src/libsyntax/show_span.rs index 5e3cd0773aa45..928ffb202d0b3 100644 --- a/src/libsyntax/show_span.rs +++ b/src/libsyntax/show_span.rs @@ -44,7 +44,7 @@ struct ShowSpanVisitor<'a> { mode: Mode, } -impl<'a, 'v> Visitor<'v> for ShowSpanVisitor<'a> { +impl<'a> Visitor for ShowSpanVisitor<'a> { fn visit_expr(&mut self, e: &ast::Expr) { if let Mode::Expression = self.mode { self.span_diagnostic.span_warn(e.span, "expression"); diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs index 8834c026067c8..d1454ab06cbc8 100644 --- a/src/libsyntax/std_inject.rs +++ b/src/libsyntax/std_inject.rs @@ -10,8 +10,8 @@ use ast; use attr; -use codemap::{DUMMY_SP, Span, ExpnInfo, NameAndSpan, MacroAttribute}; -use codemap; +use syntax_pos::{DUMMY_SP, Span}; +use codemap::{self, ExpnInfo, NameAndSpan, MacroAttribute}; use parse::token::{intern, InternedString, keywords}; use parse::{token, ParseSess}; use ptr::P; diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs index ca6ed76d5499b..0a60b7fd430c4 100644 --- a/src/libsyntax/test.rs +++ b/src/libsyntax/test.rs @@ -12,6 +12,7 @@ #![allow(dead_code)] #![allow(unused_imports)] + use self::HasTestSignature::*; use std::iter; @@ -20,9 +21,12 @@ use std::mem; use std::vec; use attr::AttrMetaMethods; use attr; -use codemap::{DUMMY_SP, Span, ExpnInfo, NameAndSpan, MacroAttribute}; -use codemap; +use syntax_pos::{self, DUMMY_SP, NO_EXPANSION, Span, FileMap, BytePos}; +use std::rc::Rc; + +use codemap::{self, CodeMap, ExpnInfo, NameAndSpan, MacroAttribute}; use errors; +use errors::snippet::{RenderedLine, SnippetData}; use config; use entry::{self, EntryPointType}; use ext::base::{ExtCtxt, DummyMacroLoader}; @@ -474,7 +478,7 @@ fn mk_main(cx: &mut TestCtxt) -> P { let main_attr = ecx.attribute(sp, main_meta); // pub fn main() { ... } let main_ret_ty = ecx.ty(sp, ast::TyKind::Tup(vec![])); - let main_body = ecx.block_all(sp, vec![call_test_main], None); + let main_body = ecx.block(sp, vec![call_test_main]); let main = ast::ItemKind::Fn(ecx.fn_decl(vec![], main_ret_ty), ast::Unsafety::Normal, ast::Constness::NotConst, @@ -604,10 +608,10 @@ fn mk_test_descs(cx: &TestCtxt) -> P { mk_test_desc_and_fn_rec(cx, test) }).collect()), span: DUMMY_SP, - attrs: None, + attrs: ast::ThinVec::new(), })), span: DUMMY_SP, - attrs: None, + attrs: ast::ThinVec::new(), }) } diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs new file mode 100644 index 0000000000000..35377d14bab7c --- /dev/null +++ b/src/libsyntax/tokenstream.rs @@ -0,0 +1,210 @@ +// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! # Token Trees +//! TokenTrees are syntactic forms for dealing with tokens. The description below is +//! more complete; in short a TokenTree is a single token, a delimited sequence of token +//! trees, or a sequence with repetition for list splicing as part of macro expansion. + +use ast::{AttrStyle}; +use codemap::{Span}; +use ext::base; +use ext::tt::macro_parser; +use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; +use parse::lexer; +use parse::token; + +/// A delimited sequence of token trees +#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] +pub struct Delimited { + /// The type of delimiter + pub delim: token::DelimToken, + /// The span covering the opening delimiter + pub open_span: Span, + /// The delimited sequence of token trees + pub tts: Vec, + /// The span covering the closing delimiter + pub close_span: Span, +} + +impl Delimited { + /// Returns the opening delimiter as a token. + pub fn open_token(&self) -> token::Token { + token::OpenDelim(self.delim) + } + + /// Returns the closing delimiter as a token. + pub fn close_token(&self) -> token::Token { + token::CloseDelim(self.delim) + } + + /// Returns the opening delimiter as a token tree. + pub fn open_tt(&self) -> TokenTree { + TokenTree::Token(self.open_span, self.open_token()) + } + + /// Returns the closing delimiter as a token tree. + pub fn close_tt(&self) -> TokenTree { + TokenTree::Token(self.close_span, self.close_token()) + } +} + +/// A sequence of token trees +#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] +pub struct SequenceRepetition { + /// The sequence of token trees + pub tts: Vec, + /// The optional separator + pub separator: Option, + /// Whether the sequence can be repeated zero (*), or one or more times (+) + pub op: KleeneOp, + /// The number of `MatchNt`s that appear in the sequence (and subsequences) + pub num_captures: usize, +} + +/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star) +/// for token sequences. +#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] +pub enum KleeneOp { + ZeroOrMore, + OneOrMore, +} + +/// When the main rust parser encounters a syntax-extension invocation, it +/// parses the arguments to the invocation as a token-tree. This is a very +/// loose structure, such that all sorts of different AST-fragments can +/// be passed to syntax extensions using a uniform type. +/// +/// If the syntax extension is an MBE macro, it will attempt to match its +/// LHS token tree against the provided token tree, and if it finds a +/// match, will transcribe the RHS token tree, splicing in any captured +/// macro_parser::matched_nonterminals into the `SubstNt`s it finds. +/// +/// The RHS of an MBE macro is the only place `SubstNt`s are substituted. +/// Nothing special happens to misnamed or misplaced `SubstNt`s. +#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] +pub enum TokenTree { + /// A single token + Token(Span, token::Token), + /// A delimited sequence of token trees + Delimited(Span, Delimited), + + // This only makes sense in MBE macros. + + /// A kleene-style repetition sequence with a span + // FIXME(eddyb) #12938 Use DST. + Sequence(Span, SequenceRepetition), +} + +impl TokenTree { + pub fn len(&self) -> usize { + match *self { + TokenTree::Token(_, token::DocComment(name)) => { + match doc_comment_style(&name.as_str()) { + AttrStyle::Outer => 2, + AttrStyle::Inner => 3 + } + } + TokenTree::Token(_, token::SpecialVarNt(..)) => 2, + TokenTree::Token(_, token::MatchNt(..)) => 3, + TokenTree::Delimited(_, ref delimed) => { + delimed.tts.len() + 2 + } + TokenTree::Sequence(_, ref seq) => { + seq.tts.len() + } + TokenTree::Token(..) => 0 + } + } + + pub fn get_tt(&self, index: usize) -> TokenTree { + match (self, index) { + (&TokenTree::Token(sp, token::DocComment(_)), 0) => { + TokenTree::Token(sp, token::Pound) + } + (&TokenTree::Token(sp, token::DocComment(name)), 1) + if doc_comment_style(&name.as_str()) == AttrStyle::Inner => { + TokenTree::Token(sp, token::Not) + } + (&TokenTree::Token(sp, token::DocComment(name)), _) => { + let stripped = strip_doc_comment_decoration(&name.as_str()); + + // Searches for the occurrences of `"#*` and returns the minimum number of `#`s + // required to wrap the text. + let num_of_hashes = stripped.chars().scan(0, |cnt, x| { + *cnt = if x == '"' { + 1 + } else if *cnt != 0 && x == '#' { + *cnt + 1 + } else { + 0 + }; + Some(*cnt) + }).max().unwrap_or(0); + + TokenTree::Delimited(sp, Delimited { + delim: token::Bracket, + open_span: sp, + tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"))), + TokenTree::Token(sp, token::Eq), + TokenTree::Token(sp, token::Literal( + token::StrRaw(token::intern(&stripped), num_of_hashes), None))], + close_span: sp, + }) + } + (&TokenTree::Delimited(_, ref delimed), _) => { + if index == 0 { + return delimed.open_tt(); + } + if index == delimed.tts.len() + 1 { + return delimed.close_tt(); + } + delimed.tts[index - 1].clone() + } + (&TokenTree::Token(sp, token::SpecialVarNt(var)), _) => { + let v = [TokenTree::Token(sp, token::Dollar), + TokenTree::Token(sp, token::Ident(token::str_to_ident(var.as_str())))]; + v[index].clone() + } + (&TokenTree::Token(sp, token::MatchNt(name, kind)), _) => { + let v = [TokenTree::Token(sp, token::SubstNt(name)), + TokenTree::Token(sp, token::Colon), + TokenTree::Token(sp, token::Ident(kind))]; + v[index].clone() + } + (&TokenTree::Sequence(_, ref seq), _) => { + seq.tts[index].clone() + } + _ => panic!("Cannot expand a token tree") + } + } + + /// Returns the `Span` corresponding to this token tree. + pub fn get_span(&self) -> Span { + match *self { + TokenTree::Token(span, _) => span, + TokenTree::Delimited(span, _) => span, + TokenTree::Sequence(span, _) => span, + } + } + + /// Use this token tree as a matcher to parse given tts. + pub fn parse(cx: &base::ExtCtxt, mtch: &[TokenTree], tts: &[TokenTree]) + -> macro_parser::NamedParseResult { + // `None` is because we're not interpolating + let arg_rdr = lexer::new_tt_reader_with_doc_flag(&cx.parse_sess().span_diagnostic, + None, + None, + tts.iter().cloned().collect(), + true); + macro_parser::parse(cx.parse_sess(), cx.cfg(), arg_rdr, mtch) + } +} + diff --git a/src/libsyntax/util/node_count.rs b/src/libsyntax/util/node_count.rs index 919dd84b11799..14244bbdddf28 100644 --- a/src/libsyntax/util/node_count.rs +++ b/src/libsyntax/util/node_count.rs @@ -12,7 +12,7 @@ use visit::*; use ast::*; -use codemap::Span; +use syntax_pos::Span; pub struct NodeCounter { pub count: usize, @@ -26,133 +26,129 @@ impl NodeCounter { } } -impl<'v> Visitor<'v> for NodeCounter { +impl Visitor for NodeCounter { fn visit_ident(&mut self, span: Span, ident: Ident) { self.count += 1; walk_ident(self, span, ident); } - fn visit_mod(&mut self, m: &'v Mod, _s: Span, _n: NodeId) { + fn visit_mod(&mut self, m: &Mod, _s: Span, _n: NodeId) { self.count += 1; walk_mod(self, m) } - fn visit_foreign_item(&mut self, i: &'v ForeignItem) { + fn visit_foreign_item(&mut self, i: &ForeignItem) { self.count += 1; walk_foreign_item(self, i) } - fn visit_item(&mut self, i: &'v Item) { + fn visit_item(&mut self, i: &Item) { self.count += 1; walk_item(self, i) } - fn visit_local(&mut self, l: &'v Local) { + fn visit_local(&mut self, l: &Local) { self.count += 1; walk_local(self, l) } - fn visit_block(&mut self, b: &'v Block) { + fn visit_block(&mut self, b: &Block) { self.count += 1; walk_block(self, b) } - fn visit_stmt(&mut self, s: &'v Stmt) { + fn visit_stmt(&mut self, s: &Stmt) { self.count += 1; walk_stmt(self, s) } - fn visit_arm(&mut self, a: &'v Arm) { + fn visit_arm(&mut self, a: &Arm) { self.count += 1; walk_arm(self, a) } - fn visit_pat(&mut self, p: &'v Pat) { + fn visit_pat(&mut self, p: &Pat) { self.count += 1; walk_pat(self, p) } - fn visit_decl(&mut self, d: &'v Decl) { - self.count += 1; - walk_decl(self, d) - } - fn visit_expr(&mut self, ex: &'v Expr) { + fn visit_expr(&mut self, ex: &Expr) { self.count += 1; walk_expr(self, ex) } - fn visit_ty(&mut self, t: &'v Ty) { + fn visit_ty(&mut self, t: &Ty) { self.count += 1; walk_ty(self, t) } - fn visit_generics(&mut self, g: &'v Generics) { + fn visit_generics(&mut self, g: &Generics) { self.count += 1; walk_generics(self, g) } - fn visit_fn(&mut self, fk: FnKind<'v>, fd: &'v FnDecl, b: &'v Block, s: Span, _: NodeId) { + fn visit_fn(&mut self, fk: FnKind, fd: &FnDecl, b: &Block, s: Span, _: NodeId) { self.count += 1; walk_fn(self, fk, fd, b, s) } - fn visit_trait_item(&mut self, ti: &'v TraitItem) { + fn visit_trait_item(&mut self, ti: &TraitItem) { self.count += 1; walk_trait_item(self, ti) } - fn visit_impl_item(&mut self, ii: &'v ImplItem) { + fn visit_impl_item(&mut self, ii: &ImplItem) { self.count += 1; walk_impl_item(self, ii) } - fn visit_trait_ref(&mut self, t: &'v TraitRef) { + fn visit_trait_ref(&mut self, t: &TraitRef) { self.count += 1; walk_trait_ref(self, t) } - fn visit_ty_param_bound(&mut self, bounds: &'v TyParamBound) { + fn visit_ty_param_bound(&mut self, bounds: &TyParamBound) { self.count += 1; walk_ty_param_bound(self, bounds) } - fn visit_poly_trait_ref(&mut self, t: &'v PolyTraitRef, m: &'v TraitBoundModifier) { + fn visit_poly_trait_ref(&mut self, t: &PolyTraitRef, m: &TraitBoundModifier) { self.count += 1; walk_poly_trait_ref(self, t, m) } - fn visit_variant_data(&mut self, s: &'v VariantData, _: Ident, - _: &'v Generics, _: NodeId, _: Span) { + fn visit_variant_data(&mut self, s: &VariantData, _: Ident, + _: &Generics, _: NodeId, _: Span) { self.count += 1; walk_struct_def(self, s) } - fn visit_struct_field(&mut self, s: &'v StructField) { + fn visit_struct_field(&mut self, s: &StructField) { self.count += 1; walk_struct_field(self, s) } - fn visit_enum_def(&mut self, enum_definition: &'v EnumDef, - generics: &'v Generics, item_id: NodeId, _: Span) { + fn visit_enum_def(&mut self, enum_definition: &EnumDef, + generics: &Generics, item_id: NodeId, _: Span) { self.count += 1; walk_enum_def(self, enum_definition, generics, item_id) } - fn visit_variant(&mut self, v: &'v Variant, g: &'v Generics, item_id: NodeId) { + fn visit_variant(&mut self, v: &Variant, g: &Generics, item_id: NodeId) { self.count += 1; walk_variant(self, v, g, item_id) } - fn visit_lifetime(&mut self, lifetime: &'v Lifetime) { + fn visit_lifetime(&mut self, lifetime: &Lifetime) { self.count += 1; walk_lifetime(self, lifetime) } - fn visit_lifetime_def(&mut self, lifetime: &'v LifetimeDef) { + fn visit_lifetime_def(&mut self, lifetime: &LifetimeDef) { self.count += 1; walk_lifetime_def(self, lifetime) } - fn visit_mac(&mut self, _mac: &'v Mac) { + fn visit_mac(&mut self, _mac: &Mac) { self.count += 1; walk_mac(self, _mac) } - fn visit_path(&mut self, path: &'v Path, _id: NodeId) { + fn visit_path(&mut self, path: &Path, _id: NodeId) { self.count += 1; walk_path(self, path) } - fn visit_path_list_item(&mut self, prefix: &'v Path, item: &'v PathListItem) { + fn visit_path_list_item(&mut self, prefix: &Path, item: &PathListItem) { self.count += 1; walk_path_list_item(self, prefix, item) } - fn visit_path_parameters(&mut self, path_span: Span, path_parameters: &'v PathParameters) { + fn visit_path_parameters(&mut self, path_span: Span, path_parameters: &PathParameters) { self.count += 1; walk_path_parameters(self, path_span, path_parameters) } - fn visit_assoc_type_binding(&mut self, type_binding: &'v TypeBinding) { + fn visit_assoc_type_binding(&mut self, type_binding: &TypeBinding) { self.count += 1; walk_assoc_type_binding(self, type_binding) } - fn visit_attribute(&mut self, _attr: &'v Attribute) { + fn visit_attribute(&mut self, _attr: &Attribute) { self.count += 1; } - fn visit_macro_def(&mut self, macro_def: &'v MacroDef) { + fn visit_macro_def(&mut self, macro_def: &MacroDef) { self.count += 1; walk_macro_def(self, macro_def) } diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs index 06264196d9e7a..f59428bf536cf 100644 --- a/src/libsyntax/util/parser_testing.rs +++ b/src/libsyntax/util/parser_testing.rs @@ -14,10 +14,11 @@ use parse::{lexer, new_parser_from_source_str}; use parse::parser::Parser; use parse::token; use ptr::P; +use tokenstream; use std::iter::Peekable; /// Map a string to tts, using a made-up filename: -pub fn string_to_tts(source_str: String) -> Vec { +pub fn string_to_tts(source_str: String) -> Vec { let ps = ParseSess::new(); filemap_to_tts(&ps, ps.codemap().new_filemap("bogofile".to_string(), None, source_str)) } diff --git a/src/libsyntax/util/thin_vec.rs b/src/libsyntax/util/thin_vec.rs new file mode 100644 index 0000000000000..546686b46b8db --- /dev/null +++ b/src/libsyntax/util/thin_vec.rs @@ -0,0 +1,59 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +/// A vector type optimized for cases where this size is usually 0 (c.f. `SmallVector`). +/// The `Option>` wrapping allows us to represent a zero sized vector with `None`, +/// which uses only a single (null) pointer. +#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] +pub struct ThinVec(Option>>); + +impl ThinVec { + pub fn new() -> Self { + ThinVec(None) + } +} + +impl From> for ThinVec { + fn from(vec: Vec) -> Self { + if vec.is_empty() { + ThinVec(None) + } else { + ThinVec(Some(Box::new(vec))) + } + } +} + +impl Into> for ThinVec { + fn into(self) -> Vec { + match self { + ThinVec(None) => Vec::new(), + ThinVec(Some(vec)) => *vec, + } + } +} + +impl ::std::ops::Deref for ThinVec { + type Target = [T]; + fn deref(&self) -> &[T] { + match *self { + ThinVec(None) => &[], + ThinVec(Some(ref vec)) => vec, + } + } +} + +impl Extend for ThinVec { + fn extend>(&mut self, iter: I) { + match *self { + ThinVec(Some(ref mut vec)) => vec.extend(iter), + ThinVec(None) => *self = iter.into_iter().collect::>().into(), + } + } +} diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index 07a6317706b84..1fc4e54d21807 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -25,8 +25,8 @@ use abi::Abi; use ast::*; -use attr::ThinAttributesExt; -use codemap::{Span, Spanned}; +use syntax_pos::Span; +use codemap::Spanned; #[derive(Copy, Clone, PartialEq, Eq)] pub enum FnKind<'a> { @@ -49,57 +49,56 @@ pub enum FnKind<'a> { /// explicitly, you need to override each method. (And you also need /// to monitor future changes to `Visitor` in case a new method with a /// new default implementation gets introduced.) -pub trait Visitor<'v> : Sized { +pub trait Visitor: Sized { fn visit_name(&mut self, _span: Span, _name: Name) { // Nothing to do. } fn visit_ident(&mut self, span: Span, ident: Ident) { walk_ident(self, span, ident); } - fn visit_mod(&mut self, m: &'v Mod, _s: Span, _n: NodeId) { walk_mod(self, m) } - fn visit_foreign_item(&mut self, i: &'v ForeignItem) { walk_foreign_item(self, i) } - fn visit_item(&mut self, i: &'v Item) { walk_item(self, i) } - fn visit_local(&mut self, l: &'v Local) { walk_local(self, l) } - fn visit_block(&mut self, b: &'v Block) { walk_block(self, b) } - fn visit_stmt(&mut self, s: &'v Stmt) { walk_stmt(self, s) } - fn visit_arm(&mut self, a: &'v Arm) { walk_arm(self, a) } - fn visit_pat(&mut self, p: &'v Pat) { walk_pat(self, p) } - fn visit_decl(&mut self, d: &'v Decl) { walk_decl(self, d) } - fn visit_expr(&mut self, ex: &'v Expr) { walk_expr(self, ex) } - fn visit_expr_post(&mut self, _ex: &'v Expr) { } - fn visit_ty(&mut self, t: &'v Ty) { walk_ty(self, t) } - fn visit_generics(&mut self, g: &'v Generics) { walk_generics(self, g) } - fn visit_fn(&mut self, fk: FnKind<'v>, fd: &'v FnDecl, b: &'v Block, s: Span, _: NodeId) { + fn visit_mod(&mut self, m: &Mod, _s: Span, _n: NodeId) { walk_mod(self, m) } + fn visit_foreign_item(&mut self, i: &ForeignItem) { walk_foreign_item(self, i) } + fn visit_item(&mut self, i: &Item) { walk_item(self, i) } + fn visit_local(&mut self, l: &Local) { walk_local(self, l) } + fn visit_block(&mut self, b: &Block) { walk_block(self, b) } + fn visit_stmt(&mut self, s: &Stmt) { walk_stmt(self, s) } + fn visit_arm(&mut self, a: &Arm) { walk_arm(self, a) } + fn visit_pat(&mut self, p: &Pat) { walk_pat(self, p) } + fn visit_expr(&mut self, ex: &Expr) { walk_expr(self, ex) } + fn visit_expr_post(&mut self, _ex: &Expr) { } + fn visit_ty(&mut self, t: &Ty) { walk_ty(self, t) } + fn visit_generics(&mut self, g: &Generics) { walk_generics(self, g) } + fn visit_fn(&mut self, fk: FnKind, fd: &FnDecl, b: &Block, s: Span, _: NodeId) { walk_fn(self, fk, fd, b, s) } - fn visit_trait_item(&mut self, ti: &'v TraitItem) { walk_trait_item(self, ti) } - fn visit_impl_item(&mut self, ii: &'v ImplItem) { walk_impl_item(self, ii) } - fn visit_trait_ref(&mut self, t: &'v TraitRef) { walk_trait_ref(self, t) } - fn visit_ty_param_bound(&mut self, bounds: &'v TyParamBound) { + fn visit_trait_item(&mut self, ti: &TraitItem) { walk_trait_item(self, ti) } + fn visit_impl_item(&mut self, ii: &ImplItem) { walk_impl_item(self, ii) } + fn visit_trait_ref(&mut self, t: &TraitRef) { walk_trait_ref(self, t) } + fn visit_ty_param_bound(&mut self, bounds: &TyParamBound) { walk_ty_param_bound(self, bounds) } - fn visit_poly_trait_ref(&mut self, t: &'v PolyTraitRef, m: &'v TraitBoundModifier) { + fn visit_poly_trait_ref(&mut self, t: &PolyTraitRef, m: &TraitBoundModifier) { walk_poly_trait_ref(self, t, m) } - fn visit_variant_data(&mut self, s: &'v VariantData, _: Ident, - _: &'v Generics, _: NodeId, _: Span) { + fn visit_variant_data(&mut self, s: &VariantData, _: Ident, + _: &Generics, _: NodeId, _: Span) { walk_struct_def(self, s) } - fn visit_struct_field(&mut self, s: &'v StructField) { walk_struct_field(self, s) } - fn visit_enum_def(&mut self, enum_definition: &'v EnumDef, - generics: &'v Generics, item_id: NodeId, _: Span) { + fn visit_struct_field(&mut self, s: &StructField) { walk_struct_field(self, s) } + fn visit_enum_def(&mut self, enum_definition: &EnumDef, + generics: &Generics, item_id: NodeId, _: Span) { walk_enum_def(self, enum_definition, generics, item_id) } - fn visit_variant(&mut self, v: &'v Variant, g: &'v Generics, item_id: NodeId) { + fn visit_variant(&mut self, v: &Variant, g: &Generics, item_id: NodeId) { walk_variant(self, v, g, item_id) } - fn visit_lifetime(&mut self, lifetime: &'v Lifetime) { + fn visit_lifetime(&mut self, lifetime: &Lifetime) { walk_lifetime(self, lifetime) } - fn visit_lifetime_def(&mut self, lifetime: &'v LifetimeDef) { + fn visit_lifetime_def(&mut self, lifetime: &LifetimeDef) { walk_lifetime_def(self, lifetime) } - fn visit_mac(&mut self, _mac: &'v Mac) { + fn visit_mac(&mut self, _mac: &Mac) { panic!("visit_mac disabled by default"); // NB: see note about macros above. // if you really want a visitor that @@ -107,26 +106,26 @@ pub trait Visitor<'v> : Sized { // definition in your trait impl: // visit::walk_mac(self, _mac) } - fn visit_path(&mut self, path: &'v Path, _id: NodeId) { + fn visit_path(&mut self, path: &Path, _id: NodeId) { walk_path(self, path) } - fn visit_path_list_item(&mut self, prefix: &'v Path, item: &'v PathListItem) { + fn visit_path_list_item(&mut self, prefix: &Path, item: &PathListItem) { walk_path_list_item(self, prefix, item) } - fn visit_path_segment(&mut self, path_span: Span, path_segment: &'v PathSegment) { + fn visit_path_segment(&mut self, path_span: Span, path_segment: &PathSegment) { walk_path_segment(self, path_span, path_segment) } - fn visit_path_parameters(&mut self, path_span: Span, path_parameters: &'v PathParameters) { + fn visit_path_parameters(&mut self, path_span: Span, path_parameters: &PathParameters) { walk_path_parameters(self, path_span, path_parameters) } - fn visit_assoc_type_binding(&mut self, type_binding: &'v TypeBinding) { + fn visit_assoc_type_binding(&mut self, type_binding: &TypeBinding) { walk_assoc_type_binding(self, type_binding) } - fn visit_attribute(&mut self, _attr: &'v Attribute) {} - fn visit_macro_def(&mut self, macro_def: &'v MacroDef) { + fn visit_attribute(&mut self, _attr: &Attribute) {} + fn visit_macro_def(&mut self, macro_def: &MacroDef) { walk_macro_def(self, macro_def) } - fn visit_vis(&mut self, vis: &'v Visibility) { + fn visit_vis(&mut self, vis: &Visibility) { walk_vis(self, vis) } } @@ -145,47 +144,46 @@ macro_rules! walk_list { } } -pub fn walk_opt_name<'v, V: Visitor<'v>>(visitor: &mut V, span: Span, opt_name: Option) { +pub fn walk_opt_name(visitor: &mut V, span: Span, opt_name: Option) { if let Some(name) = opt_name { visitor.visit_name(span, name); } } -pub fn walk_opt_ident<'v, V: Visitor<'v>>(visitor: &mut V, span: Span, opt_ident: Option) { +pub fn walk_opt_ident(visitor: &mut V, span: Span, opt_ident: Option) { if let Some(ident) = opt_ident { visitor.visit_ident(span, ident); } } -pub fn walk_opt_sp_ident<'v, V: Visitor<'v>>(visitor: &mut V, - opt_sp_ident: &Option>) { +pub fn walk_opt_sp_ident(visitor: &mut V, opt_sp_ident: &Option>) { if let Some(ref sp_ident) = *opt_sp_ident { visitor.visit_ident(sp_ident.span, sp_ident.node); } } -pub fn walk_ident<'v, V: Visitor<'v>>(visitor: &mut V, span: Span, ident: Ident) { +pub fn walk_ident(visitor: &mut V, span: Span, ident: Ident) { visitor.visit_name(span, ident.name); } -pub fn walk_crate<'v, V: Visitor<'v>>(visitor: &mut V, krate: &'v Crate) { +pub fn walk_crate(visitor: &mut V, krate: &Crate) { visitor.visit_mod(&krate.module, krate.span, CRATE_NODE_ID); walk_list!(visitor, visit_attribute, &krate.attrs); walk_list!(visitor, visit_macro_def, &krate.exported_macros); } -pub fn walk_macro_def<'v, V: Visitor<'v>>(visitor: &mut V, macro_def: &'v MacroDef) { +pub fn walk_macro_def(visitor: &mut V, macro_def: &MacroDef) { visitor.visit_ident(macro_def.span, macro_def.ident); walk_opt_ident(visitor, macro_def.span, macro_def.imported_from); walk_list!(visitor, visit_attribute, ¯o_def.attrs); } -pub fn walk_mod<'v, V: Visitor<'v>>(visitor: &mut V, module: &'v Mod) { +pub fn walk_mod(visitor: &mut V, module: &Mod) { walk_list!(visitor, visit_item, &module.items); } -pub fn walk_local<'v, V: Visitor<'v>>(visitor: &mut V, local: &'v Local) { - for attr in local.attrs.as_attr_slice() { +pub fn walk_local(visitor: &mut V, local: &Local) { + for attr in local.attrs.iter() { visitor.visit_attribute(attr); } visitor.visit_pat(&local.pat); @@ -193,33 +191,27 @@ pub fn walk_local<'v, V: Visitor<'v>>(visitor: &mut V, local: &'v Local) { walk_list!(visitor, visit_expr, &local.init); } -pub fn walk_lifetime<'v, V: Visitor<'v>>(visitor: &mut V, lifetime: &'v Lifetime) { +pub fn walk_lifetime(visitor: &mut V, lifetime: &Lifetime) { visitor.visit_name(lifetime.span, lifetime.name); } -pub fn walk_lifetime_def<'v, V: Visitor<'v>>(visitor: &mut V, - lifetime_def: &'v LifetimeDef) { +pub fn walk_lifetime_def(visitor: &mut V, lifetime_def: &LifetimeDef) { visitor.visit_lifetime(&lifetime_def.lifetime); walk_list!(visitor, visit_lifetime, &lifetime_def.bounds); } -pub fn walk_poly_trait_ref<'v, V>(visitor: &mut V, - trait_ref: &'v PolyTraitRef, - _modifier: &'v TraitBoundModifier) - where V: Visitor<'v> +pub fn walk_poly_trait_ref(visitor: &mut V, trait_ref: &PolyTraitRef, _: &TraitBoundModifier) + where V: Visitor, { walk_list!(visitor, visit_lifetime_def, &trait_ref.bound_lifetimes); visitor.visit_trait_ref(&trait_ref.trait_ref); } -pub fn walk_trait_ref<'v,V>(visitor: &mut V, - trait_ref: &'v TraitRef) - where V: Visitor<'v> -{ +pub fn walk_trait_ref(visitor: &mut V, trait_ref: &TraitRef) { visitor.visit_path(&trait_ref.path, trait_ref.ref_id) } -pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) { +pub fn walk_item(visitor: &mut V, item: &Item) { visitor.visit_vis(&item.vis); visitor.visit_ident(item.span, item.ident); match item.node { @@ -298,17 +290,16 @@ pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) { walk_list!(visitor, visit_attribute, &item.attrs); } -pub fn walk_enum_def<'v, V: Visitor<'v>>(visitor: &mut V, - enum_definition: &'v EnumDef, - generics: &'v Generics, - item_id: NodeId) { +pub fn walk_enum_def(visitor: &mut V, + enum_definition: &EnumDef, + generics: &Generics, + item_id: NodeId) { walk_list!(visitor, visit_variant, &enum_definition.variants, generics, item_id); } -pub fn walk_variant<'v, V: Visitor<'v>>(visitor: &mut V, - variant: &'v Variant, - generics: &'v Generics, - item_id: NodeId) { +pub fn walk_variant(visitor: &mut V, variant: &Variant, generics: &Generics, item_id: NodeId) + where V: Visitor, +{ visitor.visit_ident(variant.span, variant.node.name); visitor.visit_variant_data(&variant.node.data, variant.node.name, generics, item_id, variant.span); @@ -316,7 +307,7 @@ pub fn walk_variant<'v, V: Visitor<'v>>(visitor: &mut V, walk_list!(visitor, visit_attribute, &variant.node.attrs); } -pub fn walk_ty<'v, V: Visitor<'v>>(visitor: &mut V, typ: &'v Ty) { +pub fn walk_ty(visitor: &mut V, typ: &Ty) { match typ.node { TyKind::Vec(ref ty) | TyKind::Paren(ref ty) => { visitor.visit_ty(ty) @@ -362,28 +353,25 @@ pub fn walk_ty<'v, V: Visitor<'v>>(visitor: &mut V, typ: &'v Ty) { } } -pub fn walk_path<'v, V: Visitor<'v>>(visitor: &mut V, path: &'v Path) { +pub fn walk_path(visitor: &mut V, path: &Path) { for segment in &path.segments { visitor.visit_path_segment(path.span, segment); } } -pub fn walk_path_list_item<'v, V: Visitor<'v>>(visitor: &mut V, _prefix: &'v Path, - item: &'v PathListItem) { +pub fn walk_path_list_item(visitor: &mut V, _prefix: &Path, item: &PathListItem) { walk_opt_ident(visitor, item.span, item.node.name()); walk_opt_ident(visitor, item.span, item.node.rename()); } -pub fn walk_path_segment<'v, V: Visitor<'v>>(visitor: &mut V, - path_span: Span, - segment: &'v PathSegment) { +pub fn walk_path_segment(visitor: &mut V, path_span: Span, segment: &PathSegment) { visitor.visit_ident(path_span, segment.identifier); visitor.visit_path_parameters(path_span, &segment.parameters); } -pub fn walk_path_parameters<'v, V: Visitor<'v>>(visitor: &mut V, - _path_span: Span, - path_parameters: &'v PathParameters) { +pub fn walk_path_parameters(visitor: &mut V, _path_span: Span, path_parameters: &PathParameters) + where V: Visitor, +{ match *path_parameters { PathParameters::AngleBracketed(ref data) => { walk_list!(visitor, visit_ty, &data.types); @@ -397,23 +385,21 @@ pub fn walk_path_parameters<'v, V: Visitor<'v>>(visitor: &mut V, } } -pub fn walk_assoc_type_binding<'v, V: Visitor<'v>>(visitor: &mut V, - type_binding: &'v TypeBinding) { +pub fn walk_assoc_type_binding(visitor: &mut V, type_binding: &TypeBinding) { visitor.visit_ident(type_binding.span, type_binding.ident); visitor.visit_ty(&type_binding.ty); } -pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat) { +pub fn walk_pat(visitor: &mut V, pattern: &Pat) { match pattern.node { PatKind::TupleStruct(ref path, ref children, _) => { visitor.visit_path(path, pattern.id); walk_list!(visitor, visit_pat, children); } - PatKind::Path(ref path) => { - visitor.visit_path(path, pattern.id); - } - PatKind::QPath(ref qself, ref path) => { - visitor.visit_ty(&qself.ty); + PatKind::Path(ref opt_qself, ref path) => { + if let Some(ref qself) = *opt_qself { + visitor.visit_ty(&qself.ty); + } visitor.visit_path(path, pattern.id) } PatKind::Struct(ref path, ref fields, _) => { @@ -449,8 +435,7 @@ pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat) { } } -pub fn walk_foreign_item<'v, V: Visitor<'v>>(visitor: &mut V, - foreign_item: &'v ForeignItem) { +pub fn walk_foreign_item(visitor: &mut V, foreign_item: &ForeignItem) { visitor.visit_vis(&foreign_item.vis); visitor.visit_ident(foreign_item.span, foreign_item.ident); @@ -465,8 +450,7 @@ pub fn walk_foreign_item<'v, V: Visitor<'v>>(visitor: &mut V, walk_list!(visitor, visit_attribute, &foreign_item.attrs); } -pub fn walk_ty_param_bound<'v, V: Visitor<'v>>(visitor: &mut V, - bound: &'v TyParamBound) { +pub fn walk_ty_param_bound(visitor: &mut V, bound: &TyParamBound) { match *bound { TraitTyParamBound(ref typ, ref modifier) => { visitor.visit_poly_trait_ref(typ, modifier); @@ -477,7 +461,7 @@ pub fn walk_ty_param_bound<'v, V: Visitor<'v>>(visitor: &mut V, } } -pub fn walk_generics<'v, V: Visitor<'v>>(visitor: &mut V, generics: &'v Generics) { +pub fn walk_generics(visitor: &mut V, generics: &Generics) { for param in &generics.ty_params { visitor.visit_ident(param.span, param.ident); walk_list!(visitor, visit_ty_param_bound, ¶m.bounds); @@ -511,13 +495,13 @@ pub fn walk_generics<'v, V: Visitor<'v>>(visitor: &mut V, generics: &'v Generics } } -pub fn walk_fn_ret_ty<'v, V: Visitor<'v>>(visitor: &mut V, ret_ty: &'v FunctionRetTy) { +pub fn walk_fn_ret_ty(visitor: &mut V, ret_ty: &FunctionRetTy) { if let FunctionRetTy::Ty(ref output_ty) = *ret_ty { visitor.visit_ty(output_ty) } } -pub fn walk_fn_decl<'v, V: Visitor<'v>>(visitor: &mut V, function_declaration: &'v FnDecl) { +pub fn walk_fn_decl(visitor: &mut V, function_declaration: &FnDecl) { for argument in &function_declaration.inputs { visitor.visit_pat(&argument.pat); visitor.visit_ty(&argument.ty) @@ -525,8 +509,7 @@ pub fn walk_fn_decl<'v, V: Visitor<'v>>(visitor: &mut V, function_declaration: & walk_fn_ret_ty(visitor, &function_declaration.output) } -pub fn walk_fn_kind<'v, V: Visitor<'v>>(visitor: &mut V, - function_kind: FnKind<'v>) { +pub fn walk_fn_kind(visitor: &mut V, function_kind: FnKind) { match function_kind { FnKind::ItemFn(_, generics, _, _, _, _) => { visitor.visit_generics(generics); @@ -538,17 +521,15 @@ pub fn walk_fn_kind<'v, V: Visitor<'v>>(visitor: &mut V, } } -pub fn walk_fn<'v, V: Visitor<'v>>(visitor: &mut V, - function_kind: FnKind<'v>, - function_declaration: &'v FnDecl, - function_body: &'v Block, - _span: Span) { - walk_fn_decl(visitor, function_declaration); - walk_fn_kind(visitor, function_kind); - visitor.visit_block(function_body) +pub fn walk_fn(visitor: &mut V, kind: FnKind, declaration: &FnDecl, body: &Block, _span: Span) + where V: Visitor, +{ + walk_fn_decl(visitor, declaration); + walk_fn_kind(visitor, kind); + visitor.visit_block(body) } -pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_item: &'v TraitItem) { +pub fn walk_trait_item(visitor: &mut V, trait_item: &TraitItem) { visitor.visit_ident(trait_item.span, trait_item.ident); walk_list!(visitor, visit_attribute, &trait_item.attrs); match trait_item.node { @@ -568,10 +549,13 @@ pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_item: &'v Trai walk_list!(visitor, visit_ty_param_bound, bounds); walk_list!(visitor, visit_ty, default); } + TraitItemKind::Macro(ref mac) => { + visitor.visit_mac(mac); + } } } -pub fn walk_impl_item<'v, V: Visitor<'v>>(visitor: &mut V, impl_item: &'v ImplItem) { +pub fn walk_impl_item(visitor: &mut V, impl_item: &ImplItem) { visitor.visit_vis(&impl_item.vis); visitor.visit_ident(impl_item.span, impl_item.ident); walk_list!(visitor, visit_attribute, &impl_item.attrs); @@ -593,52 +577,44 @@ pub fn walk_impl_item<'v, V: Visitor<'v>>(visitor: &mut V, impl_item: &'v ImplIt } } -pub fn walk_struct_def<'v, V: Visitor<'v>>(visitor: &mut V, - struct_definition: &'v VariantData) { +pub fn walk_struct_def(visitor: &mut V, struct_definition: &VariantData) { walk_list!(visitor, visit_struct_field, struct_definition.fields()); } -pub fn walk_struct_field<'v, V: Visitor<'v>>(visitor: &mut V, - struct_field: &'v StructField) { +pub fn walk_struct_field(visitor: &mut V, struct_field: &StructField) { visitor.visit_vis(&struct_field.vis); walk_opt_ident(visitor, struct_field.span, struct_field.ident); visitor.visit_ty(&struct_field.ty); walk_list!(visitor, visit_attribute, &struct_field.attrs); } -pub fn walk_block<'v, V: Visitor<'v>>(visitor: &mut V, block: &'v Block) { +pub fn walk_block(visitor: &mut V, block: &Block) { walk_list!(visitor, visit_stmt, &block.stmts); - walk_list!(visitor, visit_expr, &block.expr); } -pub fn walk_stmt<'v, V: Visitor<'v>>(visitor: &mut V, statement: &'v Stmt) { +pub fn walk_stmt(visitor: &mut V, statement: &Stmt) { match statement.node { - StmtKind::Decl(ref declaration, _) => visitor.visit_decl(declaration), - StmtKind::Expr(ref expression, _) | StmtKind::Semi(ref expression, _) => { + StmtKind::Local(ref local) => visitor.visit_local(local), + StmtKind::Item(ref item) => visitor.visit_item(item), + StmtKind::Expr(ref expression) | StmtKind::Semi(ref expression) => { visitor.visit_expr(expression) } - StmtKind::Mac(ref mac, _, ref attrs) => { + StmtKind::Mac(ref mac) => { + let (ref mac, _, ref attrs) = **mac; visitor.visit_mac(mac); - for attr in attrs.as_attr_slice() { + for attr in attrs.iter() { visitor.visit_attribute(attr); } } } } -pub fn walk_decl<'v, V: Visitor<'v>>(visitor: &mut V, declaration: &'v Decl) { - match declaration.node { - DeclKind::Local(ref local) => visitor.visit_local(local), - DeclKind::Item(ref item) => visitor.visit_item(item), - } -} - -pub fn walk_mac<'v, V: Visitor<'v>>(_: &mut V, _: &'v Mac) { +pub fn walk_mac(_: &mut V, _: &Mac) { // Empty! } -pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) { - for attr in expression.attrs.as_attr_slice() { +pub fn walk_expr(visitor: &mut V, expression: &Expr) { + for attr in expression.attrs.iter() { visitor.visit_attribute(attr); } match expression.node { @@ -761,7 +737,7 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) { } visitor.visit_path(path, expression.id) } - ExprKind::Break(ref opt_sp_ident) | ExprKind::Again(ref opt_sp_ident) => { + ExprKind::Break(ref opt_sp_ident) | ExprKind::Continue(ref opt_sp_ident) => { walk_opt_sp_ident(visitor, opt_sp_ident); } ExprKind::Ret(ref optional_expression) => { @@ -787,14 +763,14 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) { visitor.visit_expr_post(expression) } -pub fn walk_arm<'v, V: Visitor<'v>>(visitor: &mut V, arm: &'v Arm) { +pub fn walk_arm(visitor: &mut V, arm: &Arm) { walk_list!(visitor, visit_pat, &arm.pats); walk_list!(visitor, visit_expr, &arm.guard); visitor.visit_expr(&arm.body); walk_list!(visitor, visit_attribute, &arm.attrs); } -pub fn walk_vis<'v, V: Visitor<'v>>(visitor: &mut V, vis: &'v Visibility) { +pub fn walk_vis(visitor: &mut V, vis: &Visibility) { if let Visibility::Restricted { ref path, id } = *vis { visitor.visit_path(path, id); } diff --git a/src/libsyntax_ext/Cargo.toml b/src/libsyntax_ext/Cargo.toml index 671f3e4a7e330..040c6c8ebff26 100644 --- a/src/libsyntax_ext/Cargo.toml +++ b/src/libsyntax_ext/Cargo.toml @@ -12,3 +12,5 @@ crate-type = ["dylib"] fmt_macros = { path = "../libfmt_macros" } log = { path = "../liblog" } syntax = { path = "../libsyntax" } +syntax_pos = { path = "../libsyntax_pos" } +rustc_errors = { path = "../librustc_errors" } \ No newline at end of file diff --git a/src/libsyntax_ext/asm.rs b/src/libsyntax_ext/asm.rs index 50d2b9d31fe01..56a8c28ffedc2 100644 --- a/src/libsyntax_ext/asm.rs +++ b/src/libsyntax_ext/asm.rs @@ -15,7 +15,6 @@ use self::State::*; use syntax::ast; use syntax::codemap; -use syntax::codemap::Span; use syntax::ext::base; use syntax::ext::base::*; use syntax::feature_gate; @@ -23,6 +22,8 @@ use syntax::parse::token::intern; use syntax::parse::{self, token}; use syntax::ptr::P; use syntax::ast::AsmDialect; +use syntax_pos::Span; +use syntax::tokenstream; enum State { Asm, @@ -48,7 +49,7 @@ impl State { const OPTIONS: &'static [&'static str] = &["volatile", "alignstack", "intel"]; -pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) +pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box { if !cx.ecfg.enable_asm() { feature_gate::emit_feature_err( @@ -62,8 +63,8 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) // parsed as `asm!(z)` with `z = "x": y` which is type ascription. let first_colon = tts.iter().position(|tt| { match *tt { - ast::TokenTree::Token(_, token::Colon) | - ast::TokenTree::Token(_, token::ModSep) => true, + tokenstream::TokenTree::Token(_, token::Colon) | + tokenstream::TokenTree::Token(_, token::ModSep) => true, _ => false } }).unwrap_or(tts.len()); @@ -260,6 +261,6 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) expn_id: expn_id, }), span: sp, - attrs: None, + attrs: ast::ThinVec::new(), })) } diff --git a/src/libsyntax_ext/cfg.rs b/src/libsyntax_ext/cfg.rs index 593bf14a0182f..dbf23328f41fe 100644 --- a/src/libsyntax_ext/cfg.rs +++ b/src/libsyntax_ext/cfg.rs @@ -12,17 +12,17 @@ /// a literal `true` or `false` based on whether the given cfg matches the /// current compilation environment. -use syntax::ast; -use syntax::codemap::Span; use syntax::ext::base::*; use syntax::ext::base; use syntax::ext::build::AstBuilder; use syntax::attr; +use syntax::tokenstream; use syntax::parse::token; +use syntax_pos::Span; pub fn expand_cfg<'cx>(cx: &mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree]) + tts: &[tokenstream::TokenTree]) -> Box { let mut p = cx.new_parser_from_tts(tts); let cfg = panictry!(p.parse_meta_item()); diff --git a/src/libsyntax_ext/concat.rs b/src/libsyntax_ext/concat.rs index db731adf7943b..22c4aeefbd169 100644 --- a/src/libsyntax_ext/concat.rs +++ b/src/libsyntax_ext/concat.rs @@ -9,16 +9,17 @@ // except according to those terms. use syntax::ast; -use syntax::codemap; use syntax::ext::base; use syntax::ext::build::AstBuilder; use syntax::parse::token; +use syntax_pos; +use syntax::tokenstream; use std::string::String; pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, - sp: codemap::Span, - tts: &[ast::TokenTree]) + sp: syntax_pos::Span, + tts: &[tokenstream::TokenTree]) -> Box { let es = match base::get_exprs_from_tts(cx, sp, tts) { Some(e) => e, diff --git a/src/libsyntax_ext/concat_idents.rs b/src/libsyntax_ext/concat_idents.rs index 09c23682cd73f..870413a7f61b0 100644 --- a/src/libsyntax_ext/concat_idents.rs +++ b/src/libsyntax_ext/concat_idents.rs @@ -8,14 +8,15 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use syntax::ast::{self, TokenTree}; -use syntax::codemap::Span; +use syntax::ast; use syntax::ext::base::*; use syntax::ext::base; use syntax::feature_gate; use syntax::parse::token; use syntax::parse::token::str_to_ident; use syntax::ptr::P; +use syntax_pos::Span; +use syntax::tokenstream::TokenTree; pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box { @@ -70,7 +71,7 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree]) id: ast::DUMMY_NODE_ID, node: ast::ExprKind::Path(None, self.path()), span: self.span, - attrs: None, + attrs: ast::ThinVec::new(), })) } diff --git a/src/libsyntax_ext/deriving/bounds.rs b/src/libsyntax_ext/deriving/bounds.rs index 9bc0e08811071..36818e000b55b 100644 --- a/src/libsyntax_ext/deriving/bounds.rs +++ b/src/libsyntax_ext/deriving/bounds.rs @@ -12,8 +12,8 @@ use deriving::generic::*; use deriving::generic::ty::*; use syntax::ast::MetaItem; -use syntax::codemap::Span; use syntax::ext::base::{ExtCtxt, Annotatable}; +use syntax_pos::Span; pub fn expand_deriving_unsafe_bound(cx: &mut ExtCtxt, span: Span, diff --git a/src/libsyntax_ext/deriving/clone.rs b/src/libsyntax_ext/deriving/clone.rs index 30fe0f2db8a1c..1e47ebb85837a 100644 --- a/src/libsyntax_ext/deriving/clone.rs +++ b/src/libsyntax_ext/deriving/clone.rs @@ -13,11 +13,11 @@ use deriving::generic::ty::*; use syntax::ast::{Expr, ItemKind, Generics, MetaItem, VariantData}; use syntax::attr; -use syntax::codemap::Span; use syntax::ext::base::{ExtCtxt, Annotatable}; use syntax::ext::build::AstBuilder; use syntax::parse::token::InternedString; use syntax::ptr::P; +use syntax_pos::Span; #[derive(PartialEq)] enum Mode { Deep, Shallow } @@ -145,12 +145,10 @@ fn cs_clone( match mode { Mode::Shallow => { - cx.expr_block(cx.block(trait_span, - all_fields.iter() - .map(subcall) - .map(|e| cx.stmt_expr(e)) - .collect(), - Some(cx.expr_deref(trait_span, cx.expr_self(trait_span))))) + let mut stmts: Vec<_> = + all_fields.iter().map(subcall).map(|e| cx.stmt_expr(e)).collect(); + stmts.push(cx.stmt_expr(cx.expr_deref(trait_span, cx.expr_self(trait_span)))); + cx.expr_block(cx.block(trait_span, stmts)) } Mode::Deep => { match *vdata { diff --git a/src/libsyntax_ext/deriving/cmp/eq.rs b/src/libsyntax_ext/deriving/cmp/eq.rs index 8bd12c393370d..9c5072eeb3e0b 100644 --- a/src/libsyntax_ext/deriving/cmp/eq.rs +++ b/src/libsyntax_ext/deriving/cmp/eq.rs @@ -12,11 +12,11 @@ use deriving::generic::*; use deriving::generic::ty::*; use syntax::ast::{MetaItem, Expr}; -use syntax::codemap::Span; use syntax::ext::base::{ExtCtxt, Annotatable}; use syntax::ext::build::AstBuilder; use syntax::parse::token::InternedString; use syntax::ptr::P; +use syntax_pos::Span; pub fn expand_deriving_eq(cx: &mut ExtCtxt, span: Span, @@ -30,7 +30,7 @@ pub fn expand_deriving_eq(cx: &mut ExtCtxt, // create `a.(); b.(); c.(); ...` // (where method is `assert_receiver_is_total_eq`) let stmts = exprs.into_iter().map(|e| cx.stmt_expr(e)).collect(); - let block = cx.block(span, stmts, None); + let block = cx.block(span, stmts); cx.expr_block(block) }, Box::new(|cx, sp, _, _| { diff --git a/src/libsyntax_ext/deriving/cmp/ord.rs b/src/libsyntax_ext/deriving/cmp/ord.rs index 6133adb8fc5d1..cbd7ac0eadad0 100644 --- a/src/libsyntax_ext/deriving/cmp/ord.rs +++ b/src/libsyntax_ext/deriving/cmp/ord.rs @@ -12,11 +12,11 @@ use deriving::generic::*; use deriving::generic::ty::*; use syntax::ast::{MetaItem, Expr, self}; -use syntax::codemap::Span; use syntax::ext::base::{ExtCtxt, Annotatable}; use syntax::ext::build::AstBuilder; use syntax::parse::token::InternedString; use syntax::ptr::P; +use syntax_pos::Span; pub fn expand_deriving_ord(cx: &mut ExtCtxt, span: Span, diff --git a/src/libsyntax_ext/deriving/cmp/partial_eq.rs b/src/libsyntax_ext/deriving/cmp/partial_eq.rs index e5890d7213bed..b5a8167fb5550 100644 --- a/src/libsyntax_ext/deriving/cmp/partial_eq.rs +++ b/src/libsyntax_ext/deriving/cmp/partial_eq.rs @@ -12,11 +12,11 @@ use deriving::generic::*; use deriving::generic::ty::*; use syntax::ast::{MetaItem, Expr, BinOpKind}; -use syntax::codemap::Span; use syntax::ext::base::{ExtCtxt, Annotatable}; use syntax::ext::build::AstBuilder; use syntax::parse::token::InternedString; use syntax::ptr::P; +use syntax_pos::Span; pub fn expand_deriving_partial_eq(cx: &mut ExtCtxt, span: Span, diff --git a/src/libsyntax_ext/deriving/cmp/partial_ord.rs b/src/libsyntax_ext/deriving/cmp/partial_ord.rs index cfc6dbe5cd030..26c14ae934f72 100644 --- a/src/libsyntax_ext/deriving/cmp/partial_ord.rs +++ b/src/libsyntax_ext/deriving/cmp/partial_ord.rs @@ -14,11 +14,11 @@ use deriving::generic::*; use deriving::generic::ty::*; use syntax::ast::{MetaItem, Expr, BinOpKind, self}; -use syntax::codemap::Span; use syntax::ext::base::{ExtCtxt, Annotatable}; use syntax::ext::build::AstBuilder; use syntax::parse::token::InternedString; use syntax::ptr::P; +use syntax_pos::Span; pub fn expand_deriving_partial_ord(cx: &mut ExtCtxt, span: Span, diff --git a/src/libsyntax_ext/deriving/debug.rs b/src/libsyntax_ext/deriving/debug.rs index d86eae820a884..34c872bef11d1 100644 --- a/src/libsyntax_ext/deriving/debug.rs +++ b/src/libsyntax_ext/deriving/debug.rs @@ -13,11 +13,11 @@ use deriving::generic::ty::*; use syntax::ast; use syntax::ast::{MetaItem, Expr}; -use syntax::codemap::{Span, respan, DUMMY_SP}; use syntax::ext::base::{ExtCtxt, Annotatable}; use syntax::ext::build::AstBuilder; use syntax::parse::token; use syntax::ptr::P; +use syntax_pos::{Span, DUMMY_SP}; pub fn expand_deriving_debug(cx: &mut ExtCtxt, span: Span, @@ -78,7 +78,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, let fmt = substr.nonself_args[0].clone(); - let stmts = match *substr.fields { + let mut stmts = match *substr.fields { Struct(_, ref fields) | EnumMatching(_, _, ref fields) => { let mut stmts = vec![]; if !is_struct { @@ -136,7 +136,8 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, token::str_to_ident("finish"), vec![]); - let block = cx.block(span, stmts, Some(expr)); + stmts.push(cx.stmt_expr(expr)); + let block = cx.block(span, stmts); cx.expr_block(block) } @@ -149,8 +150,11 @@ fn stmt_let_undescore(cx: &mut ExtCtxt, init: Some(expr), id: ast::DUMMY_NODE_ID, span: sp, - attrs: None, + attrs: ast::ThinVec::new(), }); - let decl = respan(sp, ast::DeclKind::Local(local)); - respan(sp, ast::StmtKind::Decl(P(decl), ast::DUMMY_NODE_ID)) + ast::Stmt { + id: ast::DUMMY_NODE_ID, + node: ast::StmtKind::Local(local), + span: sp, + } } diff --git a/src/libsyntax_ext/deriving/decodable.rs b/src/libsyntax_ext/deriving/decodable.rs index 04888d046ad2d..488402c48f70a 100644 --- a/src/libsyntax_ext/deriving/decodable.rs +++ b/src/libsyntax_ext/deriving/decodable.rs @@ -16,12 +16,12 @@ use deriving::generic::ty::*; use syntax::ast; use syntax::ast::{MetaItem, Expr, Mutability}; -use syntax::codemap::Span; use syntax::ext::base::{ExtCtxt, Annotatable}; use syntax::ext::build::AstBuilder; use syntax::parse::token::InternedString; use syntax::parse::token; use syntax::ptr::P; +use syntax_pos::Span; pub fn expand_deriving_rustc_decodable(cx: &mut ExtCtxt, span: Span, diff --git a/src/libsyntax_ext/deriving/default.rs b/src/libsyntax_ext/deriving/default.rs index a6a4830fab7f8..2711ccba81914 100644 --- a/src/libsyntax_ext/deriving/default.rs +++ b/src/libsyntax_ext/deriving/default.rs @@ -12,11 +12,11 @@ use deriving::generic::*; use deriving::generic::ty::*; use syntax::ast::{MetaItem, Expr}; -use syntax::codemap::Span; use syntax::ext::base::{ExtCtxt, Annotatable}; use syntax::ext::build::AstBuilder; use syntax::parse::token::InternedString; use syntax::ptr::P; +use syntax_pos::Span; pub fn expand_deriving_default(cx: &mut ExtCtxt, span: Span, diff --git a/src/libsyntax_ext/deriving/encodable.rs b/src/libsyntax_ext/deriving/encodable.rs index 66672305829b9..ad3786212475e 100644 --- a/src/libsyntax_ext/deriving/encodable.rs +++ b/src/libsyntax_ext/deriving/encodable.rs @@ -93,11 +93,11 @@ use deriving::generic::*; use deriving::generic::ty::*; use syntax::ast::{MetaItem, Expr, ExprKind, Mutability}; -use syntax::codemap::Span; use syntax::ext::base::{ExtCtxt,Annotatable}; use syntax::ext::build::AstBuilder; use syntax::parse::token; use syntax::ptr::P; +use syntax_pos::Span; pub fn expand_deriving_rustc_encodable(cx: &mut ExtCtxt, span: Span, @@ -285,7 +285,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span, cx.expr_str(trait_span, substr.type_ident.name.as_str()), blk )); - cx.expr_block(cx.block(trait_span, vec!(me), Some(ret))) + cx.expr_block(cx.block(trait_span, vec![me, cx.stmt_expr(ret)])) } _ => cx.bug("expected Struct or EnumMatching in derive(Encodable)") diff --git a/src/libsyntax_ext/deriving/generic/mod.rs b/src/libsyntax_ext/deriving/generic/mod.rs index d6adec84e8440..647e414a7fd27 100644 --- a/src/libsyntax_ext/deriving/generic/mod.rs +++ b/src/libsyntax_ext/deriving/generic/mod.rs @@ -197,12 +197,12 @@ use syntax::attr; use syntax::attr::AttrMetaMethods; use syntax::ext::base::{ExtCtxt, Annotatable}; use syntax::ext::build::AstBuilder; -use syntax::codemap::{self, respan, DUMMY_SP}; -use syntax::codemap::Span; -use syntax::errors::Handler; +use syntax::codemap::{self, respan}; use syntax::util::move_map::MoveMap; use syntax::parse::token::{keywords, InternedString}; use syntax::ptr::P; +use syntax_pos::{Span, DUMMY_SP}; +use errors::Handler; use self::ty::{LifetimeBounds, Path, Ptr, PtrTy, Self_, Ty}; @@ -353,8 +353,8 @@ fn find_type_parameters(ty: &ast::Ty, ty_param_names: &[ast::Name]) -> Vec>, } - impl<'a> visit::Visitor<'a> for Visitor<'a> { - fn visit_ty(&mut self, ty: &'a ast::Ty) { + impl<'a> visit::Visitor for Visitor<'a> { + fn visit_ty(&mut self, ty: &ast::Ty) { match ty.node { ast::TyKind::Path(_, ref path) if !path.global => { match path.segments.first() { @@ -1332,8 +1332,8 @@ impl<'a> MethodDef<'a> { // } let all_match = cx.expr_match(sp, match_arg, match_arms); let arm_expr = cx.expr_if(sp, discriminant_test, all_match, Some(arm_expr)); - cx.expr_block( - cx.block_all(sp, index_let_stmts, Some(arm_expr))) + index_let_stmts.push(cx.stmt_expr(arm_expr)); + cx.expr_block(cx.block(sp, index_let_stmts)) } else if variants.is_empty() { // As an additional wrinkle, For a zero-variant enum A, // currently the compiler diff --git a/src/libsyntax_ext/deriving/generic/ty.rs b/src/libsyntax_ext/deriving/generic/ty.rs index b581f5267eaac..626fbaada5cbf 100644 --- a/src/libsyntax_ext/deriving/generic/ty.rs +++ b/src/libsyntax_ext/deriving/generic/ty.rs @@ -18,8 +18,9 @@ use syntax::ast; use syntax::ast::{Expr, Generics, Ident, SelfKind}; use syntax::ext::base::ExtCtxt; use syntax::ext::build::AstBuilder; -use syntax::codemap::{Span,respan}; +use syntax::codemap::respan; use syntax::ptr::P; +use syntax_pos::Span; /// The types of pointers #[derive(Clone, Eq, PartialEq)] diff --git a/src/libsyntax_ext/deriving/hash.rs b/src/libsyntax_ext/deriving/hash.rs index fd449372cb376..0fad96c84ef3d 100644 --- a/src/libsyntax_ext/deriving/hash.rs +++ b/src/libsyntax_ext/deriving/hash.rs @@ -13,10 +13,10 @@ use deriving::generic::*; use deriving::generic::ty::*; use syntax::ast::{MetaItem, Expr, Mutability}; -use syntax::codemap::Span; use syntax::ext::base::{ExtCtxt, Annotatable}; use syntax::ext::build::AstBuilder; use syntax::ptr::P; +use syntax_pos::Span; pub fn expand_deriving_hash(cx: &mut ExtCtxt, span: Span, @@ -99,5 +99,5 @@ fn hash_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) stmts.push(call_hash(span, self_.clone())); } - cx.expr_block(cx.block(trait_span, stmts, None)) + cx.expr_block(cx.block(trait_span, stmts)) } diff --git a/src/libsyntax_ext/deriving/mod.rs b/src/libsyntax_ext/deriving/mod.rs index 6fb6dee94ed35..169e807366197 100644 --- a/src/libsyntax_ext/deriving/mod.rs +++ b/src/libsyntax_ext/deriving/mod.rs @@ -16,9 +16,10 @@ use syntax::ext::base::{ExtCtxt, SyntaxEnv, Annotatable}; use syntax::ext::base::{MultiDecorator, MultiItemDecorator, MultiModifier}; use syntax::ext::build::AstBuilder; use syntax::feature_gate; -use syntax::codemap::{self, Span}; +use syntax::codemap; use syntax::parse::token::{intern, intern_and_get_ident}; use syntax::ptr::P; +use syntax_pos::Span; macro_rules! pathvec { ($($x:ident)::+) => ( @@ -297,8 +298,7 @@ fn call_intrinsic(cx: &ExtCtxt, let call = cx.expr_call_global(span, path, args); cx.expr_block(P(ast::Block { - stmts: vec![], - expr: Some(call), + stmts: vec![cx.stmt_expr(call)], id: ast::DUMMY_NODE_ID, rules: ast::BlockCheckMode::Unsafe(ast::CompilerGenerated), span: span })) diff --git a/src/libsyntax_ext/env.rs b/src/libsyntax_ext/env.rs index 63ec9cac07317..546f8eaa69286 100644 --- a/src/libsyntax_ext/env.rs +++ b/src/libsyntax_ext/env.rs @@ -15,15 +15,16 @@ */ use syntax::ast; -use syntax::codemap::Span; use syntax::ext::base::*; use syntax::ext::base; use syntax::ext::build::AstBuilder; use syntax::parse::token; +use syntax_pos::Span; +use syntax::tokenstream; use std::env; -pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) +pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box { let var = match get_single_str_from_tts(cx, sp, tts, "option_env!") { None => return DummyResult::expr(sp), @@ -56,7 +57,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenT MacEager::expr(e) } -pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) +pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box { let mut exprs = match get_exprs_from_tts(cx, sp, tts) { Some(ref exprs) if exprs.is_empty() => { diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs index abfa65580646d..f311f16f11b0e 100644 --- a/src/libsyntax_ext/format.rs +++ b/src/libsyntax_ext/format.rs @@ -14,13 +14,14 @@ use self::Position::*; use fmt_macros as parse; use syntax::ast; -use syntax::codemap::{Span, respan, DUMMY_SP}; use syntax::ext::base::*; use syntax::ext::base; use syntax::ext::build::AstBuilder; use syntax::fold::Folder; use syntax::parse::token::{self, keywords}; use syntax::ptr::P; +use syntax_pos::{Span, DUMMY_SP}; +use syntax::tokenstream; use std::collections::HashMap; @@ -80,7 +81,7 @@ struct Context<'a, 'b:'a> { /// Some((fmtstr, unnamed arguments, ordering of named arguments, /// named arguments)) /// ``` -fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) +fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Option<(P, Vec>, Vec, HashMap>)> { let mut args = Vec::new(); @@ -441,12 +442,14 @@ impl<'a, 'b> Context<'a, 'b> { let name = ecx.ident_of(name); let item = ecx.item(sp, name, vec![], st); - let decl = respan(sp, ast::DeclKind::Item(item)); + let stmt = ast::Stmt { + id: ast::DUMMY_NODE_ID, + node: ast::StmtKind::Item(item), + span: sp, + }; // Wrap the declaration in a block so that it forms a single expression. - ecx.expr_block(ecx.block(sp, - vec![respan(sp, ast::StmtKind::Decl(P(decl), ast::DUMMY_NODE_ID))], - Some(ecx.expr_ident(sp, name)))) + ecx.expr_block(ecx.block(sp, vec![stmt, ecx.stmt_expr(ecx.expr_ident(sp, name))])) } /// Actually builds the expression which the iformat! block will be expanded @@ -606,7 +609,7 @@ impl<'a, 'b> Context<'a, 'b> { } pub fn expand_format_args<'cx>(ecx: &'cx mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree]) + tts: &[tokenstream::TokenTree]) -> Box { match parse_args(ecx, sp, tts) { diff --git a/src/libsyntax_ext/lib.rs b/src/libsyntax_ext/lib.rs index 8f5362b4d2895..17b200bac58c5 100644 --- a/src/libsyntax_ext/lib.rs +++ b/src/libsyntax_ext/lib.rs @@ -26,6 +26,8 @@ extern crate fmt_macros; #[macro_use] extern crate log; #[macro_use] extern crate syntax; +extern crate syntax_pos; +extern crate rustc_errors as errors; use syntax::ext::base::{MacroExpanderFn, NormalTT}; use syntax::ext::base::{SyntaxEnv, SyntaxExtension}; diff --git a/src/libsyntax_ext/log_syntax.rs b/src/libsyntax_ext/log_syntax.rs index ee944abb645dc..9645c5bb42723 100644 --- a/src/libsyntax_ext/log_syntax.rs +++ b/src/libsyntax_ext/log_syntax.rs @@ -8,15 +8,15 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use syntax::ast; -use syntax::codemap; use syntax::ext::base; use syntax::feature_gate; use syntax::print; +use syntax::tokenstream; +use syntax_pos; pub fn expand_syntax_ext<'cx>(cx: &'cx mut base::ExtCtxt, - sp: codemap::Span, - tts: &[ast::TokenTree]) + sp: syntax_pos::Span, + tts: &[tokenstream::TokenTree]) -> Box { if !cx.ecfg.enable_log_syntax() { feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic, diff --git a/src/libsyntax_ext/trace_macros.rs b/src/libsyntax_ext/trace_macros.rs index 7b1e985442adb..ad396d38de9f5 100644 --- a/src/libsyntax_ext/trace_macros.rs +++ b/src/libsyntax_ext/trace_macros.rs @@ -8,13 +8,12 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use syntax::ast::TokenTree; -use syntax::codemap::Span; use syntax::ext::base::ExtCtxt; use syntax::ext::base; use syntax::feature_gate; use syntax::parse::token::keywords; - +use syntax_pos::Span; +use syntax::tokenstream::TokenTree; pub fn expand_trace_macros(cx: &mut ExtCtxt, sp: Span, diff --git a/src/libsyntax_pos/Cargo.toml b/src/libsyntax_pos/Cargo.toml new file mode 100644 index 0000000000000..760aaa8a95784 --- /dev/null +++ b/src/libsyntax_pos/Cargo.toml @@ -0,0 +1,12 @@ +[package] +authors = ["The Rust Project Developers"] +name = "syntax_pos" +version = "0.0.0" + +[lib] +name = "syntax_pos" +path = "lib.rs" +crate-type = ["dylib"] + +[dependencies] +serialize = { path = "../libserialize" } diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs new file mode 100644 index 0000000000000..39bb5956312bc --- /dev/null +++ b/src/libsyntax_pos/lib.rs @@ -0,0 +1,667 @@ +// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! The source positions and related helper functions +//! +//! # Note +//! +//! This API is completely unstable and subject to change. + +#![crate_name = "syntax_pos"] +#![unstable(feature = "rustc_private", issue = "27812")] +#![crate_type = "dylib"] +#![crate_type = "rlib"] +#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", + html_favicon_url = "https://doc.rust-lang.org/favicon.ico", + html_root_url = "https://doc.rust-lang.org/nightly/")] +#![cfg_attr(not(stage0), deny(warnings))] + +#![feature(custom_attribute)] +#![allow(unused_attributes)] +#![feature(rustc_private)] +#![feature(staged_api)] +#![feature(question_mark)] + +use std::cell::{Cell, RefCell}; +use std::ops::{Add, Sub}; +use std::rc::Rc; +use std::cmp; + +use std::fmt; + +use serialize::{Encodable, Decodable, Encoder, Decoder}; + +extern crate serialize; +extern crate serialize as rustc_serialize; // used by deriving + +pub type FileName = String; + +/// Spans represent a region of code, used for error reporting. Positions in spans +/// are *absolute* positions from the beginning of the codemap, not positions +/// relative to FileMaps. Methods on the CodeMap can be used to relate spans back +/// to the original source. +/// You must be careful if the span crosses more than one file - you will not be +/// able to use many of the functions on spans in codemap and you cannot assume +/// that the length of the span = hi - lo; there may be space in the BytePos +/// range between files. +#[derive(Clone, Copy, Hash, PartialEq, Eq)] +pub struct Span { + pub lo: BytePos, + pub hi: BytePos, + /// Information about where the macro came from, if this piece of + /// code was created by a macro expansion. + pub expn_id: ExpnId +} + +/// A collection of spans. Spans have two orthogonal attributes: +/// +/// - they can be *primary spans*. In this case they are the locus of +/// the error, and would be rendered with `^^^`. +/// - they can have a *label*. In this case, the label is written next +/// to the mark in the snippet when we render. +#[derive(Clone)] +pub struct MultiSpan { + primary_spans: Vec, + span_labels: Vec<(Span, String)>, +} + +impl Span { + /// Returns a new span representing just the end-point of this span + pub fn end_point(self) -> Span { + let lo = cmp::max(self.hi.0 - 1, self.lo.0); + Span { lo: BytePos(lo), hi: self.hi, expn_id: self.expn_id} + } + + /// Returns `self` if `self` is not the dummy span, and `other` otherwise. + pub fn substitute_dummy(self, other: Span) -> Span { + if self.source_equal(&DUMMY_SP) { other } else { self } + } + + pub fn contains(self, other: Span) -> bool { + self.lo <= other.lo && other.hi <= self.hi + } + + /// Return true if the spans are equal with regards to the source text. + /// + /// Use this instead of `==` when either span could be generated code, + /// and you only care that they point to the same bytes of source text. + pub fn source_equal(&self, other: &Span) -> bool { + self.lo == other.lo && self.hi == other.hi + } + + /// Returns `Some(span)`, a union of `self` and `other`, on overlap. + pub fn merge(self, other: Span) -> Option { + if self.expn_id != other.expn_id { + return None; + } + + if (self.lo <= other.lo && self.hi > other.lo) || + (self.lo >= other.lo && self.lo < other.hi) { + Some(Span { + lo: cmp::min(self.lo, other.lo), + hi: cmp::max(self.hi, other.hi), + expn_id: self.expn_id, + }) + } else { + None + } + } + + /// Returns `Some(span)`, where the start is trimmed by the end of `other` + pub fn trim_start(self, other: Span) -> Option { + if self.hi > other.hi { + Some(Span { lo: cmp::max(self.lo, other.hi), .. self }) + } else { + None + } + } +} + +#[derive(Clone, Debug)] +pub struct SpanLabel { + /// The span we are going to include in the final snippet. + pub span: Span, + + /// Is this a primary span? This is the "locus" of the message, + /// and is indicated with a `^^^^` underline, versus `----`. + pub is_primary: bool, + + /// What label should we attach to this span (if any)? + pub label: Option, +} + +impl Encodable for Span { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_struct("Span", 2, |s| { + s.emit_struct_field("lo", 0, |s| { + self.lo.encode(s) + })?; + + s.emit_struct_field("hi", 1, |s| { + self.hi.encode(s) + }) + }) + } +} + +impl Decodable for Span { + fn decode(d: &mut D) -> Result { + d.read_struct("Span", 2, |d| { + let lo = d.read_struct_field("lo", 0, |d| { + BytePos::decode(d) + })?; + + let hi = d.read_struct_field("hi", 1, |d| { + BytePos::decode(d) + })?; + + Ok(mk_sp(lo, hi)) + }) + } +} + +fn default_span_debug(span: Span, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "Span {{ lo: {:?}, hi: {:?}, expn_id: {:?} }}", + span.lo, span.hi, span.expn_id) +} + +impl fmt::Debug for Span { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + SPAN_DEBUG.with(|span_debug| span_debug.get()(*self, f)) + } +} + +pub const DUMMY_SP: Span = Span { lo: BytePos(0), hi: BytePos(0), expn_id: NO_EXPANSION }; + +// Generic span to be used for code originating from the command line +pub const COMMAND_LINE_SP: Span = Span { lo: BytePos(0), + hi: BytePos(0), + expn_id: COMMAND_LINE_EXPN }; + +impl MultiSpan { + pub fn new() -> MultiSpan { + MultiSpan { + primary_spans: vec![], + span_labels: vec![] + } + } + + pub fn from_span(primary_span: Span) -> MultiSpan { + MultiSpan { + primary_spans: vec![primary_span], + span_labels: vec![] + } + } + + pub fn from_spans(vec: Vec) -> MultiSpan { + MultiSpan { + primary_spans: vec, + span_labels: vec![] + } + } + + pub fn push_span_label(&mut self, span: Span, label: String) { + self.span_labels.push((span, label)); + } + + /// Selects the first primary span (if any) + pub fn primary_span(&self) -> Option { + self.primary_spans.first().cloned() + } + + /// Returns all primary spans. + pub fn primary_spans(&self) -> &[Span] { + &self.primary_spans + } + + /// Returns the strings to highlight. We always ensure that there + /// is an entry for each of the primary spans -- for each primary + /// span P, if there is at least one label with span P, we return + /// those labels (marked as primary). But otherwise we return + /// `SpanLabel` instances with empty labels. + pub fn span_labels(&self) -> Vec { + let is_primary = |span| self.primary_spans.contains(&span); + let mut span_labels = vec![]; + + for &(span, ref label) in &self.span_labels { + span_labels.push(SpanLabel { + span: span, + is_primary: is_primary(span), + label: Some(label.clone()) + }); + } + + for &span in &self.primary_spans { + if !span_labels.iter().any(|sl| sl.span == span) { + span_labels.push(SpanLabel { + span: span, + is_primary: true, + label: None + }); + } + } + + span_labels + } +} + +impl From for MultiSpan { + fn from(span: Span) -> MultiSpan { + MultiSpan::from_span(span) + } +} + +#[derive(PartialEq, Eq, Clone, Debug, Hash, RustcEncodable, RustcDecodable, Copy)] +pub struct ExpnId(pub u32); + +pub const NO_EXPANSION: ExpnId = ExpnId(!0); +// For code appearing from the command line +pub const COMMAND_LINE_EXPN: ExpnId = ExpnId(!1); + +impl ExpnId { + pub fn from_u32(id: u32) -> ExpnId { + ExpnId(id) + } + + pub fn into_u32(self) -> u32 { + self.0 + } +} + +/// Identifies an offset of a multi-byte character in a FileMap +#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq)] +pub struct MultiByteChar { + /// The absolute offset of the character in the CodeMap + pub pos: BytePos, + /// The number of bytes, >=2 + pub bytes: usize, +} + +/// A single source in the CodeMap. +pub struct FileMap { + /// The name of the file that the source came from, source that doesn't + /// originate from files has names between angle brackets by convention, + /// e.g. `` + pub name: FileName, + /// The absolute path of the file that the source came from. + pub abs_path: Option, + /// The complete source code + pub src: Option>, + /// The start position of this source in the CodeMap + pub start_pos: BytePos, + /// The end position of this source in the CodeMap + pub end_pos: BytePos, + /// Locations of lines beginnings in the source code + pub lines: RefCell>, + /// Locations of multi-byte characters in the source code + pub multibyte_chars: RefCell>, +} + +impl Encodable for FileMap { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_struct("FileMap", 6, |s| { + s.emit_struct_field("name", 0, |s| self.name.encode(s))?; + s.emit_struct_field("abs_path", 1, |s| self.abs_path.encode(s))?; + s.emit_struct_field("start_pos", 2, |s| self.start_pos.encode(s))?; + s.emit_struct_field("end_pos", 3, |s| self.end_pos.encode(s))?; + s.emit_struct_field("lines", 4, |s| { + let lines = self.lines.borrow(); + // store the length + s.emit_u32(lines.len() as u32)?; + + if !lines.is_empty() { + // In order to preserve some space, we exploit the fact that + // the lines list is sorted and individual lines are + // probably not that long. Because of that we can store lines + // as a difference list, using as little space as possible + // for the differences. + let max_line_length = if lines.len() == 1 { + 0 + } else { + lines.windows(2) + .map(|w| w[1] - w[0]) + .map(|bp| bp.to_usize()) + .max() + .unwrap() + }; + + let bytes_per_diff: u8 = match max_line_length { + 0 ... 0xFF => 1, + 0x100 ... 0xFFFF => 2, + _ => 4 + }; + + // Encode the number of bytes used per diff. + bytes_per_diff.encode(s)?; + + // Encode the first element. + lines[0].encode(s)?; + + let diff_iter = (&lines[..]).windows(2) + .map(|w| (w[1] - w[0])); + + match bytes_per_diff { + 1 => for diff in diff_iter { (diff.0 as u8).encode(s)? }, + 2 => for diff in diff_iter { (diff.0 as u16).encode(s)? }, + 4 => for diff in diff_iter { diff.0.encode(s)? }, + _ => unreachable!() + } + } + + Ok(()) + })?; + s.emit_struct_field("multibyte_chars", 5, |s| { + (*self.multibyte_chars.borrow()).encode(s) + }) + }) + } +} + +impl Decodable for FileMap { + fn decode(d: &mut D) -> Result { + + d.read_struct("FileMap", 6, |d| { + let name: String = d.read_struct_field("name", 0, |d| Decodable::decode(d))?; + let abs_path: Option = + d.read_struct_field("abs_path", 1, |d| Decodable::decode(d))?; + let start_pos: BytePos = d.read_struct_field("start_pos", 2, |d| Decodable::decode(d))?; + let end_pos: BytePos = d.read_struct_field("end_pos", 3, |d| Decodable::decode(d))?; + let lines: Vec = d.read_struct_field("lines", 4, |d| { + let num_lines: u32 = Decodable::decode(d)?; + let mut lines = Vec::with_capacity(num_lines as usize); + + if num_lines > 0 { + // Read the number of bytes used per diff. + let bytes_per_diff: u8 = Decodable::decode(d)?; + + // Read the first element. + let mut line_start: BytePos = Decodable::decode(d)?; + lines.push(line_start); + + for _ in 1..num_lines { + let diff = match bytes_per_diff { + 1 => d.read_u8()? as u32, + 2 => d.read_u16()? as u32, + 4 => d.read_u32()?, + _ => unreachable!() + }; + + line_start = line_start + BytePos(diff); + + lines.push(line_start); + } + } + + Ok(lines) + })?; + let multibyte_chars: Vec = + d.read_struct_field("multibyte_chars", 5, |d| Decodable::decode(d))?; + Ok(FileMap { + name: name, + abs_path: abs_path, + start_pos: start_pos, + end_pos: end_pos, + src: None, + lines: RefCell::new(lines), + multibyte_chars: RefCell::new(multibyte_chars) + }) + }) + } +} + +impl fmt::Debug for FileMap { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + write!(fmt, "FileMap({})", self.name) + } +} + +impl FileMap { + /// EFFECT: register a start-of-line offset in the + /// table of line-beginnings. + /// UNCHECKED INVARIANT: these offsets must be added in the right + /// order and must be in the right places; there is shared knowledge + /// about what ends a line between this file and parse.rs + /// WARNING: pos param here is the offset relative to start of CodeMap, + /// and CodeMap will append a newline when adding a filemap without a newline at the end, + /// so the safe way to call this is with value calculated as + /// filemap.start_pos + newline_offset_relative_to_the_start_of_filemap. + pub fn next_line(&self, pos: BytePos) { + // the new charpos must be > the last one (or it's the first one). + let mut lines = self.lines.borrow_mut(); + let line_len = lines.len(); + assert!(line_len == 0 || ((*lines)[line_len - 1] < pos)); + lines.push(pos); + } + + /// get a line from the list of pre-computed line-beginnings. + /// line-number here is 0-based. + pub fn get_line(&self, line_number: usize) -> Option<&str> { + match self.src { + Some(ref src) => { + let lines = self.lines.borrow(); + lines.get(line_number).map(|&line| { + let begin: BytePos = line - self.start_pos; + let begin = begin.to_usize(); + // We can't use `lines.get(line_number+1)` because we might + // be parsing when we call this function and thus the current + // line is the last one we have line info for. + let slice = &src[begin..]; + match slice.find('\n') { + Some(e) => &slice[..e], + None => slice + } + }) + } + None => None + } + } + + pub fn record_multibyte_char(&self, pos: BytePos, bytes: usize) { + assert!(bytes >=2 && bytes <= 4); + let mbc = MultiByteChar { + pos: pos, + bytes: bytes, + }; + self.multibyte_chars.borrow_mut().push(mbc); + } + + pub fn is_real_file(&self) -> bool { + !(self.name.starts_with("<") && + self.name.ends_with(">")) + } + + pub fn is_imported(&self) -> bool { + self.src.is_none() + } + + pub fn count_lines(&self) -> usize { + self.lines.borrow().len() + } +} + +// _____________________________________________________________________________ +// Pos, BytePos, CharPos +// + +pub trait Pos { + fn from_usize(n: usize) -> Self; + fn to_usize(&self) -> usize; +} + +/// A byte offset. Keep this small (currently 32-bits), as AST contains +/// a lot of them. +#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)] +pub struct BytePos(pub u32); + +/// A character offset. Because of multibyte utf8 characters, a byte offset +/// is not equivalent to a character offset. The CodeMap will convert BytePos +/// values to CharPos values as necessary. +#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)] +pub struct CharPos(pub usize); + +// FIXME: Lots of boilerplate in these impls, but so far my attempts to fix +// have been unsuccessful + +impl Pos for BytePos { + fn from_usize(n: usize) -> BytePos { BytePos(n as u32) } + fn to_usize(&self) -> usize { let BytePos(n) = *self; n as usize } +} + +impl Add for BytePos { + type Output = BytePos; + + fn add(self, rhs: BytePos) -> BytePos { + BytePos((self.to_usize() + rhs.to_usize()) as u32) + } +} + +impl Sub for BytePos { + type Output = BytePos; + + fn sub(self, rhs: BytePos) -> BytePos { + BytePos((self.to_usize() - rhs.to_usize()) as u32) + } +} + +impl Encodable for BytePos { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_u32(self.0) + } +} + +impl Decodable for BytePos { + fn decode(d: &mut D) -> Result { + Ok(BytePos(d.read_u32()?)) + } +} + +impl Pos for CharPos { + fn from_usize(n: usize) -> CharPos { CharPos(n) } + fn to_usize(&self) -> usize { let CharPos(n) = *self; n } +} + +impl Add for CharPos { + type Output = CharPos; + + fn add(self, rhs: CharPos) -> CharPos { + CharPos(self.to_usize() + rhs.to_usize()) + } +} + +impl Sub for CharPos { + type Output = CharPos; + + fn sub(self, rhs: CharPos) -> CharPos { + CharPos(self.to_usize() - rhs.to_usize()) + } +} + +// _____________________________________________________________________________ +// Loc, LocWithOpt, FileMapAndLine, FileMapAndBytePos +// + +/// A source code location used for error reporting +#[derive(Debug)] +pub struct Loc { + /// Information about the original source + pub file: Rc, + /// The (1-based) line number + pub line: usize, + /// The (0-based) column offset + pub col: CharPos +} + +/// A source code location used as the result of lookup_char_pos_adj +// Actually, *none* of the clients use the filename *or* file field; +// perhaps they should just be removed. +#[derive(Debug)] +pub struct LocWithOpt { + pub filename: FileName, + pub line: usize, + pub col: CharPos, + pub file: Option>, +} + +// used to be structural records. Better names, anyone? +#[derive(Debug)] +pub struct FileMapAndLine { pub fm: Rc, pub line: usize } +#[derive(Debug)] +pub struct FileMapAndBytePos { pub fm: Rc, pub pos: BytePos } + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct LineInfo { + /// Index of line, starting from 0. + pub line_index: usize, + + /// Column in line where span begins, starting from 0. + pub start_col: CharPos, + + /// Column in line where span ends, starting from 0, exclusive. + pub end_col: CharPos, +} + +pub struct FileLines { + pub file: Rc, + pub lines: Vec +} + +thread_local!(pub static SPAN_DEBUG: Cell fmt::Result> = + Cell::new(default_span_debug)); + +/* assuming that we're not in macro expansion */ +pub fn mk_sp(lo: BytePos, hi: BytePos) -> Span { + Span {lo: lo, hi: hi, expn_id: NO_EXPANSION} +} + +pub struct MacroBacktrace { + /// span where macro was applied to generate this code + pub call_site: Span, + + /// name of macro that was applied (e.g., "foo!" or "#[derive(Eq)]") + pub macro_decl_name: String, + + /// span where macro was defined (if known) + pub def_site_span: Option, +} + +// _____________________________________________________________________________ +// SpanLinesError, SpanSnippetError, DistinctSources, MalformedCodemapPositions +// + +pub type FileLinesResult = Result; + +#[derive(Clone, PartialEq, Eq, Debug)] +pub enum SpanLinesError { + IllFormedSpan(Span), + DistinctSources(DistinctSources), +} + +#[derive(Clone, PartialEq, Eq, Debug)] +pub enum SpanSnippetError { + IllFormedSpan(Span), + DistinctSources(DistinctSources), + MalformedForCodemap(MalformedCodemapPositions), + SourceNotAvailable { filename: String } +} + +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct DistinctSources { + pub begin: (String, BytePos), + pub end: (String, BytePos) +} + +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct MalformedCodemapPositions { + pub name: String, + pub source_len: usize, + pub begin_pos: BytePos, + pub end_pos: BytePos +} + diff --git a/src/rustc/Cargo.lock b/src/rustc/Cargo.lock index 3e8277e28ba73..9c6ed991df8a6 100644 --- a/src/rustc/Cargo.lock +++ b/src/rustc/Cargo.lock @@ -62,9 +62,11 @@ dependencies = [ "rustc_bitflags 0.0.0", "rustc_const_math 0.0.0", "rustc_data_structures 0.0.0", + "rustc_errors 0.0.0", "rustc_llvm 0.0.0", "serialize 0.0.0", "syntax 0.0.0", + "syntax_pos 0.0.0", ] [[package]] @@ -88,8 +90,10 @@ dependencies = [ "log 0.0.0", "rustc 0.0.0", "rustc_data_structures 0.0.0", + "rustc_errors 0.0.0", "rustc_mir 0.0.0", "syntax 0.0.0", + "syntax_pos 0.0.0", ] [[package]] @@ -103,6 +107,7 @@ dependencies = [ "rustc_const_math 0.0.0", "serialize 0.0.0", "syntax 0.0.0", + "syntax_pos 0.0.0", ] [[package]] @@ -134,6 +139,7 @@ dependencies = [ "rustc_back 0.0.0", "rustc_borrowck 0.0.0", "rustc_const_eval 0.0.0", + "rustc_errors 0.0.0", "rustc_incremental 0.0.0", "rustc_lint 0.0.0", "rustc_llvm 0.0.0", @@ -149,6 +155,16 @@ dependencies = [ "serialize 0.0.0", "syntax 0.0.0", "syntax_ext 0.0.0", + "syntax_pos 0.0.0", +] + +[[package]] +name = "rustc_errors" +version = "0.0.0" +dependencies = [ + "log 0.0.0", + "serialize 0.0.0", + "syntax_pos 0.0.0", ] [[package]] @@ -162,6 +178,7 @@ dependencies = [ "rustc_data_structures 0.0.0", "serialize 0.0.0", "syntax 0.0.0", + "syntax_pos 0.0.0", ] [[package]] @@ -173,6 +190,7 @@ dependencies = [ "rustc_back 0.0.0", "rustc_const_eval 0.0.0", "syntax 0.0.0", + "syntax_pos 0.0.0", ] [[package]] @@ -195,9 +213,11 @@ dependencies = [ "rustc_back 0.0.0", "rustc_bitflags 0.0.0", "rustc_const_math 0.0.0", + "rustc_errors 0.0.0", "rustc_llvm 0.0.0", "serialize 0.0.0", "syntax 0.0.0", + "syntax_pos 0.0.0", ] [[package]] @@ -213,6 +233,7 @@ dependencies = [ "rustc_const_math 0.0.0", "rustc_data_structures 0.0.0", "syntax 0.0.0", + "syntax_pos 0.0.0", ] [[package]] @@ -223,7 +244,9 @@ dependencies = [ "rustc 0.0.0", "rustc_const_eval 0.0.0", "rustc_const_math 0.0.0", + "rustc_errors 0.0.0", "syntax 0.0.0", + "syntax_pos 0.0.0", ] [[package]] @@ -238,8 +261,10 @@ dependencies = [ "rustc 0.0.0", "rustc_back 0.0.0", "rustc_bitflags 0.0.0", + "rustc_errors 0.0.0", "rustc_metadata 0.0.0", "syntax 0.0.0", + "syntax_pos 0.0.0", ] [[package]] @@ -248,6 +273,7 @@ version = "0.0.0" dependencies = [ "rustc 0.0.0", "syntax 0.0.0", + "syntax_pos 0.0.0", ] [[package]] @@ -257,7 +283,9 @@ dependencies = [ "arena 0.0.0", "log 0.0.0", "rustc 0.0.0", + "rustc_errors 0.0.0", "syntax 0.0.0", + "syntax_pos 0.0.0", ] [[package]] @@ -268,6 +296,7 @@ dependencies = [ "rustc 0.0.0", "serialize 0.0.0", "syntax 0.0.0", + "syntax_pos 0.0.0", ] [[package]] @@ -283,11 +312,13 @@ dependencies = [ "rustc_const_eval 0.0.0", "rustc_const_math 0.0.0", "rustc_data_structures 0.0.0", + "rustc_errors 0.0.0", "rustc_incremental 0.0.0", "rustc_llvm 0.0.0", "rustc_platform_intrinsics 0.0.0", "serialize 0.0.0", "syntax 0.0.0", + "syntax_pos 0.0.0", ] [[package]] @@ -301,8 +332,10 @@ dependencies = [ "rustc_back 0.0.0", "rustc_const_eval 0.0.0", "rustc_const_math 0.0.0", + "rustc_errors 0.0.0", "rustc_platform_intrinsics 0.0.0", "syntax 0.0.0", + "syntax_pos 0.0.0", ] [[package]] @@ -317,12 +350,14 @@ dependencies = [ "rustc_back 0.0.0", "rustc_const_eval 0.0.0", "rustc_driver 0.0.0", + "rustc_errors 0.0.0", "rustc_lint 0.0.0", "rustc_metadata 0.0.0", "rustc_resolve 0.0.0", "rustc_trans 0.0.0", "serialize 0.0.0", "syntax 0.0.0", + "syntax_pos 0.0.0", ] [[package]] @@ -338,7 +373,9 @@ version = "0.0.0" dependencies = [ "log 0.0.0", "rustc_bitflags 0.0.0", + "rustc_errors 0.0.0", "serialize 0.0.0", + "syntax_pos 0.0.0", ] [[package]] @@ -347,6 +384,15 @@ version = "0.0.0" dependencies = [ "fmt_macros 0.0.0", "log 0.0.0", + "rustc_errors 0.0.0", "syntax 0.0.0", + "syntax_pos 0.0.0", +] + +[[package]] +name = "syntax_pos" +version = "0.0.0" +dependencies = [ + "serialize 0.0.0", ] diff --git a/src/test/compile-fail-fulldeps/auxiliary/macro_crate_test.rs b/src/test/compile-fail-fulldeps/auxiliary/macro_crate_test.rs index 3516f566e8a1f..a6bc9db199c8b 100644 --- a/src/test/compile-fail-fulldeps/auxiliary/macro_crate_test.rs +++ b/src/test/compile-fail-fulldeps/auxiliary/macro_crate_test.rs @@ -13,14 +13,16 @@ #![feature(plugin_registrar, quote, rustc_private)] extern crate syntax; +extern crate syntax_pos; extern crate rustc; extern crate rustc_plugin; -use syntax::ast::{self, TokenTree, Item, MetaItem, ImplItem, TraitItem, ItemKind}; -use syntax::codemap::Span; +use syntax::ast::{self, Item, MetaItem, ImplItem, TraitItem, ItemKind}; use syntax::ext::base::*; use syntax::parse::{self, token}; use syntax::ptr::P; +use syntax::tokenstream::TokenTree; +use syntax_pos::Span; use rustc_plugin::Registry; #[macro_export] diff --git a/src/test/compile-fail-fulldeps/gated-quote.rs b/src/test/compile-fail-fulldeps/gated-quote.rs index cd801fbcd889b..dade0e946c5bf 100644 --- a/src/test/compile-fail-fulldeps/gated-quote.rs +++ b/src/test/compile-fail-fulldeps/gated-quote.rs @@ -22,8 +22,8 @@ extern crate syntax; use syntax::ast; -use syntax::codemap::Span; use syntax::parse; +use syntax_pos::Span; struct ParseSess; diff --git a/src/test/compile-fail-fulldeps/qquote.rs b/src/test/compile-fail-fulldeps/qquote.rs index 89a4869bd6975..e29ded8a052c6 100644 --- a/src/test/compile-fail-fulldeps/qquote.rs +++ b/src/test/compile-fail-fulldeps/qquote.rs @@ -13,11 +13,12 @@ #![feature(quote, rustc_private)] extern crate syntax; +extern crate syntax_pos; use syntax::ast; -use syntax::codemap::{self, DUMMY_SP}; use syntax::parse; use syntax::print::pprust; +use syntax_pos::DUMMY_SP; fn main() { let ps = syntax::parse::ParseSess::new(); diff --git a/src/test/compile-fail/bad-format-args.rs b/src/test/compile-fail/bad-format-args.rs index 816c696a89566..8c58c8c60627d 100644 --- a/src/test/compile-fail/bad-format-args.rs +++ b/src/test/compile-fail/bad-format-args.rs @@ -9,11 +9,11 @@ // except according to those terms. // error-pattern: requires at least a format string argument -// error-pattern: bad-format-args.rs:19:5: 19:15 note: in this expansion +// error-pattern: in this expansion // error-pattern: expected token: `,` -// error-pattern: bad-format-args.rs:20:5: 20:19 note: in this expansion -// error-pattern: bad-format-args.rs:21:5: 21:22 note: in this expansion +// error-pattern: in this expansion +// error-pattern: in this expansion fn main() { format!(); diff --git a/src/test/compile-fail/issue-34418.rs b/src/test/compile-fail/issue-34418.rs new file mode 100644 index 0000000000000..6bc0add220517 --- /dev/null +++ b/src/test/compile-fail/issue-34418.rs @@ -0,0 +1,31 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(rustc_attrs)] +#![allow(unused)] + +macro_rules! make_item { + () => { fn f() {} } +} + +macro_rules! make_stmt { + () => { let x = 0; } +} + +fn f() { + make_item! {} +} + +fn g() { + make_stmt! {} +} + +#[rustc_error] +fn main() {} //~ ERROR compilation successful diff --git a/src/test/compile-fail/method-resolvable-path-in-pattern.rs b/src/test/compile-fail/method-resolvable-path-in-pattern.rs index 1cba64ccf2cde..3ae792f9c0f37 100644 --- a/src/test/compile-fail/method-resolvable-path-in-pattern.rs +++ b/src/test/compile-fail/method-resolvable-path-in-pattern.rs @@ -19,6 +19,6 @@ impl MyTrait for Foo {} fn main() { match 0u32 { ::trait_bar => {} - //~^ ERROR expected associated constant, found method `trait_bar` + //~^ ERROR expected variant, struct or constant, found method `trait_bar` } } diff --git a/src/test/parse-fail/trait-non-item-macros.rs b/src/test/parse-fail/trait-non-item-macros.rs new file mode 100644 index 0000000000000..fd356f4a81792 --- /dev/null +++ b/src/test/parse-fail/trait-non-item-macros.rs @@ -0,0 +1,20 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +macro_rules! bah { + ($a:expr) => ($a) + //~^ ERROR expected one of `const`, `extern`, `fn`, `type`, or `unsafe`, found `2` +} + +trait bar { + bah!(2); +} + +fn main() {} diff --git a/src/test/run-fail-fulldeps/qquote.rs b/src/test/run-fail-fulldeps/qquote.rs index 560b742f8a6c7..e1461c7847e4c 100644 --- a/src/test/run-fail-fulldeps/qquote.rs +++ b/src/test/run-fail-fulldeps/qquote.rs @@ -15,11 +15,13 @@ #![feature(quote, rustc_private)] extern crate syntax; +extern crate syntax_pos; use syntax::ast; -use syntax::codemap::{self, DUMMY_SP}; +use syntax::codemap; use syntax::parse; use syntax::print::pprust; +use syntax_pos::DUMMY_SP; fn main() { let ps = syntax::parse::ParseSess::new(); diff --git a/src/test/run-make/execution-engine/test.rs b/src/test/run-make/execution-engine/test.rs index 8a7959212f54a..a94b2a85c7754 100644 --- a/src/test/run-make/execution-engine/test.rs +++ b/src/test/run-make/execution-engine/test.rs @@ -18,6 +18,8 @@ extern crate rustc_lint; extern crate rustc_llvm as llvm; extern crate rustc_metadata; extern crate rustc_resolve; +extern crate rustc_errors; +extern crate rustc_errors as errors; #[macro_use] extern crate syntax; use std::ffi::{CStr, CString}; @@ -38,7 +40,7 @@ use rustc_metadata::creader::read_local_crates; use rustc_metadata::cstore::CStore; use libc::c_void; -use syntax::diagnostics::registry::Registry; +use rustc_errors::registry::Registry; use syntax::parse::token; fn main() { diff --git a/src/test/run-make/issue-19371/foo.rs b/src/test/run-make/issue-19371/foo.rs index 41d250eadec4e..aa3495ec5eebb 100644 --- a/src/test/run-make/issue-19371/foo.rs +++ b/src/test/run-make/issue-19371/foo.rs @@ -14,6 +14,7 @@ extern crate rustc; extern crate rustc_driver; extern crate rustc_lint; extern crate rustc_metadata; +extern crate rustc_errors; extern crate syntax; use rustc::dep_graph::DepGraph; @@ -21,7 +22,7 @@ use rustc::session::{build_session, Session}; use rustc::session::config::{basic_options, build_configuration, Input, OutputType}; use rustc_driver::driver::{compile_input, CompileController, anon_src}; use rustc_metadata::cstore::CStore; -use syntax::diagnostics::registry::Registry; +use rustc_errors::registry::Registry; use syntax::parse::token; use std::path::PathBuf; diff --git a/src/test/run-pass-fulldeps/ast_stmt_expr_attr.rs b/src/test/run-pass-fulldeps/ast_stmt_expr_attr.rs index ed971faf8c6a1..64747002a65b0 100644 --- a/src/test/run-pass-fulldeps/ast_stmt_expr_attr.rs +++ b/src/test/run-pass-fulldeps/ast_stmt_expr_attr.rs @@ -86,7 +86,7 @@ fn check_expr_attrs(es: &str, expected: &[&str]) { let actual = &e.attrs; str_compare(es, &expected.iter().map(|r| attr(r, &ps).unwrap()).collect::>(), - actual.as_attr_slice(), + &actual, pprust::attribute_to_string); } diff --git a/src/test/run-pass-fulldeps/auxiliary/custom_derive_plugin.rs b/src/test/run-pass-fulldeps/auxiliary/custom_derive_plugin.rs index 0132014de0ab5..42135703b75a4 100644 --- a/src/test/run-pass-fulldeps/auxiliary/custom_derive_plugin.rs +++ b/src/test/run-pass-fulldeps/auxiliary/custom_derive_plugin.rs @@ -16,17 +16,18 @@ extern crate syntax; extern crate syntax_ext; +extern crate syntax_pos; extern crate rustc; extern crate rustc_plugin; use syntax::ast; -use syntax::codemap::Span; use syntax::ext::base::{MultiDecorator, ExtCtxt, Annotatable}; use syntax::ext::build::AstBuilder; use syntax::parse::token; use syntax::ptr::P; use syntax_ext::deriving::generic::{cs_fold, TraitDef, MethodDef, combine_substructure}; use syntax_ext::deriving::generic::ty::{Literal, LifetimeBounds, Path, borrowed_explicit_self}; +use syntax_pos::Span; use rustc_plugin::Registry; #[plugin_registrar] diff --git a/src/test/run-pass-fulldeps/auxiliary/custom_derive_plugin_attr.rs b/src/test/run-pass-fulldeps/auxiliary/custom_derive_plugin_attr.rs index 6fa78913839b7..eeecd0b24e29e 100644 --- a/src/test/run-pass-fulldeps/auxiliary/custom_derive_plugin_attr.rs +++ b/src/test/run-pass-fulldeps/auxiliary/custom_derive_plugin_attr.rs @@ -16,12 +16,12 @@ extern crate syntax; extern crate syntax_ext; +extern crate syntax_pos; extern crate rustc; extern crate rustc_plugin; use syntax::ast; use syntax::attr::AttrMetaMethods; -use syntax::codemap::Span; use syntax::ext::base::{MultiDecorator, ExtCtxt, Annotatable}; use syntax::ext::build::AstBuilder; use syntax::parse::token; @@ -29,6 +29,7 @@ use syntax::ptr::P; use syntax_ext::deriving::generic::{cs_fold, TraitDef, MethodDef, combine_substructure}; use syntax_ext::deriving::generic::{Substructure, Struct, EnumMatching}; use syntax_ext::deriving::generic::ty::{Literal, LifetimeBounds, Path, borrowed_explicit_self}; +use syntax_pos::Span; use rustc_plugin::Registry; #[plugin_registrar] diff --git a/src/test/run-pass-fulldeps/auxiliary/issue_16723_multiple_items_syntax_ext.rs b/src/test/run-pass-fulldeps/auxiliary/issue_16723_multiple_items_syntax_ext.rs index 25a75c2d2952e..7f8a741465b30 100644 --- a/src/test/run-pass-fulldeps/auxiliary/issue_16723_multiple_items_syntax_ext.rs +++ b/src/test/run-pass-fulldeps/auxiliary/issue_16723_multiple_items_syntax_ext.rs @@ -16,11 +16,12 @@ extern crate syntax; extern crate rustc; extern crate rustc_plugin; +extern crate syntax_pos; use syntax::ast; -use syntax::codemap; use syntax::ext::base::{ExtCtxt, MacResult, MacEager}; use syntax::util::small_vector::SmallVector; +use syntax::tokenstream; use rustc_plugin::Registry; #[plugin_registrar] @@ -28,7 +29,8 @@ pub fn plugin_registrar(reg: &mut Registry) { reg.register_macro("multiple_items", expand) } -fn expand(cx: &mut ExtCtxt, _: codemap::Span, _: &[ast::TokenTree]) -> Box { +fn expand(cx: &mut ExtCtxt, _: syntax_pos::Span, _: &[tokenstream::TokenTree]) + -> Box { MacEager::items(SmallVector::many(vec![ quote_item!(cx, struct Struct1;).unwrap(), quote_item!(cx, struct Struct2;).unwrap() diff --git a/src/test/run-pass-fulldeps/auxiliary/macro_crate_test.rs b/src/test/run-pass-fulldeps/auxiliary/macro_crate_test.rs index a22c3ba484926..11d81eda55625 100644 --- a/src/test/run-pass-fulldeps/auxiliary/macro_crate_test.rs +++ b/src/test/run-pass-fulldeps/auxiliary/macro_crate_test.rs @@ -15,12 +15,14 @@ extern crate syntax; extern crate rustc; extern crate rustc_plugin; +extern crate syntax_pos; -use syntax::ast::{self, TokenTree, Item, MetaItem, ImplItem, TraitItem, ItemKind}; -use syntax::codemap::Span; +use syntax::ast::{self, Item, MetaItem, ImplItem, TraitItem, ItemKind}; use syntax::ext::base::*; use syntax::parse::{self, token}; use syntax::ptr::P; +use syntax::tokenstream::TokenTree; +use syntax_pos::Span; use rustc_plugin::Registry; #[macro_export] diff --git a/src/test/run-pass-fulldeps/auxiliary/plugin_args.rs b/src/test/run-pass-fulldeps/auxiliary/plugin_args.rs index 99321ad42418d..f0edc0f2b120f 100644 --- a/src/test/run-pass-fulldeps/auxiliary/plugin_args.rs +++ b/src/test/run-pass-fulldeps/auxiliary/plugin_args.rs @@ -14,17 +14,19 @@ #![feature(box_syntax, rustc_private)] extern crate syntax; +extern crate syntax_pos; extern crate rustc; extern crate rustc_plugin; use std::borrow::ToOwned; use syntax::ast; -use syntax::codemap::Span; use syntax::ext::build::AstBuilder; use syntax::ext::base::{TTMacroExpander, ExtCtxt, MacResult, MacEager, NormalTT}; use syntax::parse::token; use syntax::print::pprust; use syntax::ptr::P; +use syntax_pos::Span; +use syntax::tokenstream; use rustc_plugin::Registry; struct Expander { @@ -35,7 +37,7 @@ impl TTMacroExpander for Expander { fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, sp: Span, - _: &[ast::TokenTree]) -> Box { + _: &[tokenstream::TokenTree]) -> Box { let args = self.args.iter().map(|i| pprust::meta_item_to_string(&*i)) .collect::>().join(", "); let interned = token::intern_and_get_ident(&args[..]); diff --git a/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs b/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs index 713a7d1e811a2..5b1e210b0b258 100644 --- a/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs +++ b/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs @@ -14,17 +14,19 @@ #![feature(plugin_registrar, quote, rustc_private)] extern crate syntax; +extern crate syntax_pos; extern crate rustc; extern crate rustc_plugin; -use syntax::codemap::Span; use syntax::parse::token::{self, str_to_ident, NtExpr, NtPat}; -use syntax::ast::{TokenTree, Pat}; +use syntax::ast::{Pat}; +use syntax::tokenstream::{TokenTree}; use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager}; use syntax::ext::build::AstBuilder; use syntax::ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal}; use syntax::ext::tt::macro_parser::{Success, Failure, Error}; use syntax::ptr::P; +use syntax_pos::Span; use rustc_plugin::Registry; fn expand_mbe_matches(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree]) diff --git a/src/test/run-pass-fulldeps/auxiliary/roman_numerals.rs b/src/test/run-pass-fulldeps/auxiliary/roman_numerals.rs index 839ece49c3eb5..0c8af013fd12d 100644 --- a/src/test/run-pass-fulldeps/auxiliary/roman_numerals.rs +++ b/src/test/run-pass-fulldeps/auxiliary/roman_numerals.rs @@ -15,14 +15,15 @@ #![feature(slice_patterns)] extern crate syntax; +extern crate syntax_pos; extern crate rustc; extern crate rustc_plugin; -use syntax::codemap::Span; -use syntax::ast::TokenTree; use syntax::parse::token; +use syntax::tokenstream::TokenTree; use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager}; use syntax::ext::build::AstBuilder; // trait for expr_usize +use syntax_pos::Span; use rustc_plugin::Registry; // WARNING WARNING WARNING WARNING WARNING diff --git a/src/test/run-pass-fulldeps/auxiliary/syntax_extension_with_dll_deps_2.rs b/src/test/run-pass-fulldeps/auxiliary/syntax_extension_with_dll_deps_2.rs index 7281698a7fb34..72d2628535556 100644 --- a/src/test/run-pass-fulldeps/auxiliary/syntax_extension_with_dll_deps_2.rs +++ b/src/test/run-pass-fulldeps/auxiliary/syntax_extension_with_dll_deps_2.rs @@ -15,12 +15,14 @@ extern crate syntax_extension_with_dll_deps_1 as other; extern crate syntax; +extern crate syntax_pos; extern crate rustc; extern crate rustc_plugin; -use syntax::ast::{TokenTree, Item, MetaItem}; -use syntax::codemap::Span; +use syntax::ast::{Item, MetaItem}; use syntax::ext::base::*; +use syntax::tokenstream::TokenTree; +use syntax_pos::Span; use rustc_plugin::Registry; #[plugin_registrar] diff --git a/src/test/run-pass-fulldeps/compiler-calls.rs b/src/test/run-pass-fulldeps/compiler-calls.rs index af641d717edd9..ff57e9d6b7368 100644 --- a/src/test/run-pass-fulldeps/compiler-calls.rs +++ b/src/test/run-pass-fulldeps/compiler-calls.rs @@ -19,11 +19,11 @@ extern crate getopts; extern crate rustc; extern crate rustc_driver; extern crate syntax; +extern crate rustc_errors as errors; use rustc::session::Session; use rustc::session::config::{self, Input}; use rustc_driver::{driver, CompilerCalls, Compilation}; -use syntax::{diagnostics, errors}; use std::path::PathBuf; @@ -35,7 +35,7 @@ impl<'a> CompilerCalls<'a> for TestCalls { fn early_callback(&mut self, _: &getopts::Matches, _: &config::Options, - _: &diagnostics::registry::Registry, + _: &errors::registry::Registry, _: config::ErrorOutputType) -> Compilation { self.count *= 2; @@ -64,7 +64,7 @@ impl<'a> CompilerCalls<'a> for TestCalls { _: &config::Options, _: &Option, _: &Option, - _: &diagnostics::registry::Registry) + _: &errors::registry::Registry) -> Option<(Input, Option)> { panic!("This shouldn't happen"); } diff --git a/src/test/run-pass-fulldeps/qquote.rs b/src/test/run-pass-fulldeps/qquote.rs index 65c642a1eca59..a4f0e35cc5ac7 100644 --- a/src/test/run-pass-fulldeps/qquote.rs +++ b/src/test/run-pass-fulldeps/qquote.rs @@ -13,10 +13,11 @@ #![feature(quote, rustc_private)] extern crate syntax; +extern crate syntax_pos; -use syntax::codemap::DUMMY_SP; use syntax::print::pprust::*; use syntax::parse::token::intern; +use syntax_pos::DUMMY_SP; fn main() { let ps = syntax::parse::ParseSess::new(); diff --git a/src/test/run-pass-fulldeps/quote-tokens.rs b/src/test/run-pass-fulldeps/quote-tokens.rs index 4397da35d7a34..710e2fd1d07a3 100644 --- a/src/test/run-pass-fulldeps/quote-tokens.rs +++ b/src/test/run-pass-fulldeps/quote-tokens.rs @@ -20,8 +20,8 @@ use syntax::ptr::P; use syntax::parse::PResult; fn syntax_extension(cx: &ExtCtxt) { - let e_toks : Vec = quote_tokens!(cx, 1 + 2); - let p_toks : Vec = quote_tokens!(cx, (x, 1 .. 4, *)); + let e_toks : Vec = quote_tokens!(cx, 1 + 2); + let p_toks : Vec = quote_tokens!(cx, (x, 1 .. 4, *)); let a: P = quote_expr!(cx, 1 + 2); let _b: Option> = quote_item!(cx, static foo : isize = $e_toks; ); @@ -39,7 +39,7 @@ fn syntax_extension(cx: &ExtCtxt) { let _l: P = quote_ty!(cx, &isize); - let _m: Vec = quote_matcher!(cx, $($foo:tt,)* bar); + let _m: Vec = quote_matcher!(cx, $($foo:tt,)* bar); let _n: syntax::ast::Attribute = quote_attr!(cx, #![cfg(foo, bar = "baz")]); let _o: Option> = quote_item!(cx, fn foo() {}); diff --git a/src/test/run-pass/trait-item-inside-macro.rs b/src/test/run-pass/trait-item-inside-macro.rs new file mode 100644 index 0000000000000..7c13576120b44 --- /dev/null +++ b/src/test/run-pass/trait-item-inside-macro.rs @@ -0,0 +1,39 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Issue #34183 + +macro_rules! foo { + () => { + fn foo() { } + } +} + +macro_rules! bar { + () => { + fn bar(); + } +} + +trait Bleh { + foo!(); + bar!(); +} + +struct Test; + +impl Bleh for Test { + fn bar() {} +} + +fn main() { + Test::bar(); + Test::foo(); +} diff --git a/src/tools/compiletest/src/json.rs b/src/tools/compiletest/src/json.rs index 84b78547ab9a4..e5b628bb00295 100644 --- a/src/tools/compiletest/src/json.rs +++ b/src/tools/compiletest/src/json.rs @@ -15,7 +15,7 @@ use std::path::Path; use runtest::{ProcRes}; // These structs are a subset of the ones found in -// `syntax::errors::json`. +// `syntax::json`. #[derive(RustcEncodable, RustcDecodable)] struct Diagnostic {