From 1979f96549fc41b544d2bf05eb868f26941f2b25 Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Thu, 16 Mar 2017 10:23:33 +0000 Subject: [PATCH 1/5] Move `syntax::ext::hygiene` to `syntax_pos::hygiene`. --- src/librustc/hir/map/def_collector.rs | 2 +- src/librustc_resolve/build_reduced_graph.rs | 2 +- src/libsyntax/ast.rs | 10 +++++++++- src/libsyntax/ext/expand.rs | 6 +++--- src/libsyntax/ext/placeholders.rs | 4 ++-- src/libsyntax/lib.rs | 2 +- src/{libsyntax/ext => libsyntax_pos}/hygiene.rs | 13 ++++--------- src/libsyntax_pos/lib.rs | 3 +++ 8 files changed, 24 insertions(+), 18 deletions(-) rename src/{libsyntax/ext => libsyntax_pos}/hygiene.rs (95%) diff --git a/src/librustc/hir/map/def_collector.rs b/src/librustc/hir/map/def_collector.rs index cae358a303e02..afdb9059ea7c0 100644 --- a/src/librustc/hir/map/def_collector.rs +++ b/src/librustc/hir/map/def_collector.rs @@ -92,7 +92,7 @@ impl<'a> DefCollector<'a> { fn visit_macro_invoc(&mut self, id: NodeId, const_expr: bool) { if let Some(ref mut visit) = self.visit_macro_invoc { visit(MacroInvocationData { - mark: Mark::from_placeholder_id(id), + mark: id.placeholder_to_mark(), const_expr: const_expr, def_index: self.parent_def.unwrap(), }) diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs index 86e0d0039d1a7..a15431afc164b 100644 --- a/src/librustc_resolve/build_reduced_graph.rs +++ b/src/librustc_resolve/build_reduced_graph.rs @@ -680,7 +680,7 @@ pub struct BuildReducedGraphVisitor<'a, 'b: 'a> { impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { fn visit_invoc(&mut self, id: ast::NodeId) -> &'b InvocationData<'b> { - let mark = Mark::from_placeholder_id(id); + let mark = id.placeholder_to_mark(); self.resolver.current_module.unresolved_invocations.borrow_mut().insert(mark); let invocation = self.resolver.invocations[&mark]; invocation.module.set(self.resolver.current_module); diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 3dd4bdbd14ddb..7e2b225193f6d 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -20,7 +20,7 @@ pub use util::ThinVec; use syntax_pos::{mk_sp, BytePos, Span, DUMMY_SP, ExpnId}; use codemap::{respan, Spanned}; use abi::Abi; -use ext::hygiene::SyntaxContext; +use ext::hygiene::{Mark, SyntaxContext}; use print::pprust; use ptr::P; use rustc_data_structures::indexed_vec; @@ -256,6 +256,14 @@ impl NodeId { pub fn as_u32(&self) -> u32 { self.0 } + + pub fn placeholder_from_mark(mark: Mark) -> Self { + NodeId(mark.as_u32()) + } + + pub fn placeholder_to_mark(self) -> Mark { + Mark::from_u32(self.0) + } } impl fmt::Display for NodeId { diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 6abeb4b0b2805..e258c51a3295f 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast::{self, Block, Ident, PatKind, Path}; +use ast::{self, Block, Ident, NodeId, PatKind, Path}; use ast::{MacStmtStyle, StmtKind, ItemKind}; use attr::{self, HasAttrs}; use codemap::{ExpnInfo, NameAndSpan, MacroBang, MacroAttribute}; @@ -321,7 +321,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { while let Some(expansions) = expansions.pop() { for (mark, expansion) in expansions.into_iter().rev() { let derives = derives.remove(&mark).unwrap_or_else(Vec::new); - placeholder_expander.add(mark.as_placeholder_id(), expansion, derives); + placeholder_expander.add(NodeId::placeholder_from_mark(mark), expansion, derives); } } @@ -703,7 +703,7 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { ..self.cx.current_expansion.clone() }, }); - placeholder(expansion_kind, mark.as_placeholder_id()) + placeholder(expansion_kind, NodeId::placeholder_from_mark(mark)) } fn collect_bang(&mut self, mac: ast::Mac, span: Span, kind: ExpansionKind) -> Expansion { diff --git a/src/libsyntax/ext/placeholders.rs b/src/libsyntax/ext/placeholders.rs index 2d0994a7b78fb..4fb138d506a8e 100644 --- a/src/libsyntax/ext/placeholders.rs +++ b/src/libsyntax/ext/placeholders.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast; +use ast::{self, NodeId}; use codemap::{DUMMY_SP, dummy_spanned}; use ext::base::ExtCtxt; use ext::expand::{Expansion, ExpansionKind}; @@ -88,7 +88,7 @@ impl<'a, 'b> PlaceholderExpander<'a, 'b> { let mut expansion = expansion.fold_with(self); if let Expansion::Items(mut items) = expansion { for derive in derives { - match self.remove(derive.as_placeholder_id()) { + match self.remove(NodeId::placeholder_from_mark(derive)) { Expansion::Items(derived_items) => items.extend(derived_items), _ => unreachable!(), } diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 4c9a5d512af02..6c975f3fc4021 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -136,12 +136,12 @@ pub mod print { } pub mod ext { + pub use syntax_pos::hygiene; pub mod base; pub mod build; pub mod derive; pub mod expand; pub mod placeholders; - pub mod hygiene; pub mod quote; pub mod source_util; diff --git a/src/libsyntax/ext/hygiene.rs b/src/libsyntax_pos/hygiene.rs similarity index 95% rename from src/libsyntax/ext/hygiene.rs rename to src/libsyntax_pos/hygiene.rs index 57f5ab73d3706..feebbcd6f03b6 100644 --- a/src/libsyntax/ext/hygiene.rs +++ b/src/libsyntax_pos/hygiene.rs @@ -15,7 +15,6 @@ //! and definition contexts*. J. Funct. Program. 22, 2 (March 2012), 181-216. //! DOI=10.1017/S0956796812000093 http://dx.doi.org/10.1017/S0956796812000093 -use ast::NodeId; use std::cell::RefCell; use std::collections::HashMap; use std::fmt; @@ -47,17 +46,13 @@ impl Mark { Mark(0) } - pub fn from_placeholder_id(id: NodeId) -> Self { - Mark(id.as_u32()) - } - - pub fn as_placeholder_id(self) -> NodeId { - NodeId::from_u32(self.0) - } - pub fn as_u32(self) -> u32 { self.0 } + + pub fn from_u32(raw: u32) -> Mark { + Mark(raw) + } } struct HygieneData { diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index 3808923e7728f..1c9a05dadd15f 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -23,6 +23,7 @@ html_root_url = "https://doc.rust-lang.org/nightly/")] #![deny(warnings)] +#![feature(const_fn)] #![feature(custom_attribute)] #![allow(unused_attributes)] #![feature(rustc_private)] @@ -41,6 +42,8 @@ use serialize::{Encodable, Decodable, Encoder, Decoder}; extern crate serialize; extern crate serialize as rustc_serialize; // used by deriving +pub mod hygiene; + pub type FileName = String; /// Spans represent a region of code, used for error reporting. Positions in spans From 496996c2af6174cb83a65756249d289f315dff80 Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Thu, 16 Mar 2017 10:31:36 +0000 Subject: [PATCH 2/5] Remove code in `syntax::codemap`. --- src/libsyntax/codemap.rs | 185 --------------------------------------- src/libsyntax_pos/lib.rs | 4 - 2 files changed, 189 deletions(-) diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 0f4b844b0eac8..388f3cb732351 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -409,101 +409,6 @@ impl CodeMap { hi.col.to_usize() + 1)).to_string() } - // Returns true if two spans have the same callee - // (Assumes the same ExpnFormat implies same callee) - fn match_callees(&self, sp_a: &Span, sp_b: &Span) -> bool { - let fmt_a = self - .with_expn_info(sp_a.expn_id, - |ei| ei.map(|ei| ei.callee.format.clone())); - - let fmt_b = self - .with_expn_info(sp_b.expn_id, - |ei| ei.map(|ei| ei.callee.format.clone())); - fmt_a == fmt_b - } - - /// Returns a formatted string showing the expansion chain of a span - /// - /// Spans are printed in the following format: - /// - /// filename:start_line:col: end_line:col - /// snippet - /// Callee: - /// Callee span - /// Callsite: - /// Callsite span - /// - /// Callees and callsites are printed recursively (if available, otherwise header - /// and span is omitted), expanding into their own callee/callsite spans. - /// Each layer of recursion has an increased indent, and snippets are truncated - /// to at most 50 characters. Finally, recursive calls to the same macro are squashed, - /// with '...' used to represent any number of recursive calls. - pub fn span_to_expanded_string(&self, sp: Span) -> String { - self.span_to_expanded_string_internal(sp, "") - } - - fn span_to_expanded_string_internal(&self, sp:Span, indent: &str) -> String { - let mut indent = indent.to_owned(); - let mut output = "".to_owned(); - let span_str = self.span_to_string(sp); - let mut span_snip = self.span_to_snippet(sp) - .unwrap_or("Snippet unavailable".to_owned()); - - // Truncate by code points - in worst case this will be more than 50 characters, - // but ensures at least 50 characters and respects byte boundaries. - let char_vec: Vec<(usize, char)> = span_snip.char_indices().collect(); - if char_vec.len() > 50 { - span_snip.truncate(char_vec[49].0); - span_snip.push_str("..."); - } - - output.push_str(&format!("{}{}\n{}`{}`\n", indent, span_str, indent, span_snip)); - - if sp.expn_id == NO_EXPANSION || sp.expn_id == COMMAND_LINE_EXPN { - return output; - } - - let mut callee = self.with_expn_info(sp.expn_id, - |ei| ei.and_then(|ei| ei.callee.span.clone())); - let mut callsite = self.with_expn_info(sp.expn_id, - |ei| ei.map(|ei| ei.call_site.clone())); - - indent.push_str(" "); - let mut is_recursive = false; - - while callee.is_some() && self.match_callees(&sp, &callee.unwrap()) { - callee = self.with_expn_info(callee.unwrap().expn_id, - |ei| ei.and_then(|ei| ei.callee.span.clone())); - is_recursive = true; - } - if let Some(span) = callee { - output.push_str(&indent); - output.push_str("Callee:\n"); - if is_recursive { - output.push_str(&indent); - output.push_str("...\n"); - } - output.push_str(&(self.span_to_expanded_string_internal(span, &indent))); - } - - is_recursive = false; - while callsite.is_some() && self.match_callees(&sp, &callsite.unwrap()) { - callsite = self.with_expn_info(callsite.unwrap().expn_id, - |ei| ei.map(|ei| ei.call_site.clone())); - is_recursive = true; - } - if let Some(span) = callsite { - output.push_str(&indent); - output.push_str("Callsite:\n"); - if is_recursive { - output.push_str(&indent); - output.push_str("...\n"); - } - output.push_str(&(self.span_to_expanded_string_internal(span, &indent))); - } - output - } - /// Return the source span - this is either the supplied span, or the span for /// the macro callsite that expanded to it. pub fn source_callsite(&self, sp: Span) -> Span { @@ -1069,59 +974,6 @@ mod tests { assert_eq!(sstr, "blork.rs:2:1: 2:12"); } - #[test] - fn t10() { - // Test span_to_expanded_string works in base case (no expansion) - let cm = init_code_map(); - let span = Span { lo: BytePos(0), hi: BytePos(11), expn_id: NO_EXPANSION }; - let sstr = cm.span_to_expanded_string(span); - assert_eq!(sstr, "blork.rs:1:1: 1:12\n`first line.`\n"); - - let span = Span { lo: BytePos(12), hi: BytePos(23), expn_id: NO_EXPANSION }; - let sstr = cm.span_to_expanded_string(span); - assert_eq!(sstr, "blork.rs:2:1: 2:12\n`second line`\n"); - } - - #[test] - fn t11() { - // Test span_to_expanded_string works with expansion - let cm = init_code_map(); - let root = Span { lo: BytePos(0), hi: BytePos(11), expn_id: NO_EXPANSION }; - let format = ExpnFormat::MacroBang(keywords::Invalid.name()); - let callee = NameAndSpan { format: format, - allow_internal_unstable: false, - span: None }; - - let info = ExpnInfo { call_site: root, callee: callee }; - let id = cm.record_expansion(info); - let sp = Span { lo: BytePos(12), hi: BytePos(23), expn_id: id }; - - let sstr = cm.span_to_expanded_string(sp); - assert_eq!(sstr, - "blork.rs:2:1: 2:12\n`second line`\n Callsite:\n \ - blork.rs:1:1: 1:12\n `first line.`\n"); - } - - /// Test merging two spans on the same line - #[test] - fn span_merging() { - let cm = CodeMap::new(); - let inputtext = "bbbb BB bb CCC\n"; - let selection1 = " ~~ \n"; - let selection2 = " ~~~\n"; - cm.new_filemap_and_lines("blork.rs", None, inputtext); - let span1 = span_from_selection(inputtext, selection1); - let span2 = span_from_selection(inputtext, selection2); - - if let Some(sp) = cm.merge_spans(span1, span2) { - let sstr = cm.span_to_expanded_string(sp); - assert_eq!(sstr, "blork.rs:1:6: 1:15\n`BB bb CCC`\n"); - } - else { - assert!(false); - } - } - /// Test failing to merge two spans on different lines #[test] fn span_merging_fail() { @@ -1221,41 +1073,4 @@ mod tests { let id_end = cm.record_expansion(info_end); Span { lo: BytePos(37), hi: BytePos(48), expn_id: id_end } } - - #[test] - fn t12() { - // Test span_to_expanded_string collapses recursive macros and handles - // recursive callsite and callee expansions - let cm = init_code_map(); - let end = init_expansion_chain(&cm); - let sstr = cm.span_to_expanded_string(end); - let res_str = -r"blork2.rs:2:1: 2:12 -`second line` - Callsite: - ... - blork2.rs:1:1: 1:12 - `first line.` - Callee: - blork.rs:2:1: 2:12 - `second line` - Callee: - blork.rs:1:1: 1:12 - `first line.` - Callsite: - blork.rs:1:1: 1:12 - `first line.` - Callsite: - ... - blork.rs:2:1: 2:12 - `second line` - Callee: - blork.rs:1:1: 1:12 - `first line.` - Callsite: - blork.rs:1:1: 1:12 - `first line.` -"; - assert_eq!(sstr, res_str); - } } diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index 1c9a05dadd15f..1b62d62348bc8 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -263,10 +263,6 @@ pub const NO_EXPANSION: ExpnId = ExpnId(!0); // For code appearing from the command line pub const COMMAND_LINE_EXPN: ExpnId = ExpnId(!1); -// For code generated by a procedural macro, without knowing which -// Used in `qquote!` -pub const PROC_EXPN: ExpnId = ExpnId(!2); - impl ExpnId { pub fn from_u32(id: u32) -> ExpnId { ExpnId(id) From ec7c0aece17c9a11bc2eca15b994355a161bf878 Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Fri, 17 Mar 2017 04:04:41 +0000 Subject: [PATCH 3/5] Merge `ExpnId` and `SyntaxContext`. --- src/librustc/hir/lowering.rs | 7 +- src/librustc/hir/mod.rs | 5 +- src/librustc/ich/caching_codemap_view.rs | 4 - src/librustc/middle/region.rs | 2 +- src/librustc/middle/stability.rs | 2 +- src/librustc_driver/driver.rs | 4 +- src/librustc_errors/emitter.rs | 22 +- src/librustc_errors/lib.rs | 4 +- .../calculate_svh/svh_visitor.rs | 17 +- src/librustc_mir/transform/qualify_consts.rs | 4 +- src/librustc_plugin/load.rs | 4 +- src/librustc_save_analysis/lib.rs | 7 +- src/librustc_save_analysis/span_utils.rs | 3 +- src/librustc_trans/asm.rs | 4 +- src/librustc_trans/back/write.rs | 6 +- src/librustc_trans/mir/mod.rs | 18 +- src/librustc_typeck/check/mod.rs | 5 +- src/libsyntax/ast.rs | 57 +--- src/libsyntax/codemap.rs | 291 +----------------- src/libsyntax/ext/base.rs | 74 ++--- src/libsyntax/ext/derive.rs | 50 +-- src/libsyntax/ext/expand.rs | 111 +++---- src/libsyntax/ext/source_util.rs | 2 +- src/libsyntax/ext/tt/quoted.rs | 14 +- src/libsyntax/feature_gate.rs | 20 +- src/libsyntax/json.rs | 2 +- src/libsyntax/lib.rs | 2 +- src/libsyntax/parse/parser.rs | 6 +- src/libsyntax/std_inject.rs | 21 +- src/libsyntax/test.rs | 21 +- src/libsyntax/test_snippet.rs | 2 +- src/libsyntax/tokenstream.rs | 16 +- src/libsyntax_ext/asm.rs | 12 +- src/libsyntax_ext/deriving/clone.rs | 2 +- src/libsyntax_ext/deriving/cmp/eq.rs | 2 +- src/libsyntax_ext/deriving/debug.rs | 4 +- src/libsyntax_ext/deriving/generic/mod.rs | 12 +- src/libsyntax_ext/deriving/mod.rs | 34 +- src/libsyntax_ext/format.rs | 3 +- src/libsyntax_ext/proc_macro_registrar.rs | 6 +- src/libsyntax_pos/hygiene.rs | 94 +++++- src/libsyntax_pos/lib.rs | 101 ++++-- src/{libsyntax => libsyntax_pos}/symbol.rs | 57 +++- src/test/compile-fail-fulldeps/qquote.rs | 8 - src/test/run-fail-fulldeps/qquote.rs | 8 - src/test/run-pass-fulldeps/qquote.rs | 8 - 46 files changed, 456 insertions(+), 702 deletions(-) rename src/{libsyntax => libsyntax_pos}/symbol.rs (87%) diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index 6ca0c971ea497..786145f3091db 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -57,6 +57,7 @@ use std::mem; use syntax::attr; use syntax::ast::*; use syntax::errors; +use syntax::ext::hygiene::{Mark, SyntaxContext}; use syntax::ptr::P; use syntax::codemap::{self, respan, Spanned}; use syntax::std_inject; @@ -392,7 +393,8 @@ impl<'a> LoweringContext<'a> { } fn allow_internal_unstable(&self, reason: &'static str, mut span: Span) -> Span { - span.expn_id = self.sess.codemap().record_expansion(codemap::ExpnInfo { + let mark = Mark::fresh(); + mark.set_expn_info(codemap::ExpnInfo { call_site: span, callee: codemap::NameAndSpan { format: codemap::CompilerDesugaring(Symbol::intern(reason)), @@ -400,6 +402,7 @@ impl<'a> LoweringContext<'a> { allow_internal_unstable: true, }, }); + span.ctxt = SyntaxContext::empty().apply_mark(mark); span } @@ -1986,7 +1989,7 @@ impl<'a> LoweringContext<'a> { volatile: asm.volatile, alignstack: asm.alignstack, dialect: asm.dialect, - expn_id: asm.expn_id, + ctxt: asm.ctxt, }; let outputs = asm.outputs.iter().map(|out| self.lower_expr(&out.expr)).collect(); diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index f4f2f4cf9211b..da7e71ac07d21 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -33,11 +33,12 @@ use hir::def::Def; use hir::def_id::{DefId, DefIndex, CRATE_DEF_INDEX}; use util::nodemap::{NodeMap, FxHashSet}; -use syntax_pos::{Span, ExpnId, DUMMY_SP}; +use syntax_pos::{Span, DUMMY_SP}; use syntax::codemap::{self, Spanned}; use syntax::abi::Abi; use syntax::ast::{Ident, Name, NodeId, DUMMY_NODE_ID, AsmDialect}; use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem}; +use syntax::ext::hygiene::SyntaxContext; use syntax::ptr::P; use syntax::symbol::{Symbol, keywords}; use syntax::tokenstream::TokenStream; @@ -1367,7 +1368,7 @@ pub struct InlineAsm { pub volatile: bool, pub alignstack: bool, pub dialect: AsmDialect, - pub expn_id: ExpnId, + pub ctxt: SyntaxContext, } /// represents an argument in a function header diff --git a/src/librustc/ich/caching_codemap_view.rs b/src/librustc/ich/caching_codemap_view.rs index a71251eedf5d0..1278d9f5171b3 100644 --- a/src/librustc/ich/caching_codemap_view.rs +++ b/src/librustc/ich/caching_codemap_view.rs @@ -47,10 +47,6 @@ impl<'tcx> CachingCodemapView<'tcx> { } } - pub fn codemap(&self) -> &'tcx CodeMap { - self.codemap - } - pub fn byte_pos_to_line_and_col(&mut self, pos: BytePos) -> Option<(Rc, usize, BytePos)> { diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs index a19f15a9329fb..0676075930dc3 100644 --- a/src/librustc/middle/region.rs +++ b/src/librustc/middle/region.rs @@ -236,7 +236,7 @@ impl CodeExtent { // (This is the special case aluded to in the // doc-comment for this method) let stmt_span = blk.stmts[r.first_statement_index as usize].span; - Some(Span { lo: stmt_span.hi, hi: blk.span.hi, expn_id: stmt_span.expn_id }) + Some(Span { lo: stmt_span.hi, hi: blk.span.hi, ctxt: stmt_span.ctxt }) } } } diff --git a/src/librustc/middle/stability.rs b/src/librustc/middle/stability.rs index 4354ed6817ae9..2b5ea61d4e854 100644 --- a/src/librustc/middle/stability.rs +++ b/src/librustc/middle/stability.rs @@ -467,7 +467,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } pub fn check_stability(self, def_id: DefId, id: NodeId, span: Span) { - if self.sess.codemap().span_allows_unstable(span) { + if span.allows_unstable() { debug!("stability: \ skipping span={:?} since it is internal", span); return; diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index 4873b21c54874..977382b33adf7 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -580,7 +580,7 @@ pub fn phase_2_configure_and_expand(sess: &Session, krate = time(time_passes, "crate injection", || { let alt_std_name = sess.opts.alt_std_name.clone(); - syntax::std_inject::maybe_inject_crates_ref(&sess.parse_sess, krate, alt_std_name) + syntax::std_inject::maybe_inject_crates_ref(krate, alt_std_name) }); let mut addl_plugins = Some(addl_plugins); @@ -798,7 +798,7 @@ pub fn phase_2_configure_and_expand(sess: &Session, // Discard hygiene data, which isn't required after lowering to HIR. if !keep_hygiene_data(sess) { - syntax::ext::hygiene::reset_hygiene_data(); + syntax::ext::hygiene::clear_markings(); } Ok(ExpansionResult { diff --git a/src/librustc_errors/emitter.rs b/src/librustc_errors/emitter.rs index 431edb3c9bc4d..367b85ac726db 100644 --- a/src/librustc_errors/emitter.rs +++ b/src/librustc_errors/emitter.rs @@ -10,7 +10,7 @@ use self::Destination::*; -use syntax_pos::{COMMAND_LINE_SP, DUMMY_SP, FileMap, Span, MultiSpan, CharPos}; +use syntax_pos::{DUMMY_SP, FileMap, Span, MultiSpan, CharPos}; use {Level, CodeSuggestion, DiagnosticBuilder, SubDiagnostic, CodeMapper}; use RenderSpan::*; @@ -151,7 +151,7 @@ impl EmitterWriter { if let Some(ref cm) = self.cm { for span_label in msp.span_labels() { - if span_label.span == DUMMY_SP || span_label.span == COMMAND_LINE_SP { + if span_label.span == DUMMY_SP { continue; } let lo = cm.lookup_char_pos(span_label.span.lo); @@ -615,7 +615,7 @@ impl EmitterWriter { let mut max = 0; if let Some(ref cm) = self.cm { for primary_span in msp.primary_spans() { - if primary_span != &DUMMY_SP && primary_span != &COMMAND_LINE_SP { + if primary_span != &DUMMY_SP { let hi = cm.lookup_char_pos(primary_span.hi); if hi.line > max { max = hi.line; @@ -623,7 +623,7 @@ impl EmitterWriter { } } for span_label in msp.span_labels() { - if span_label.span != DUMMY_SP && span_label.span != COMMAND_LINE_SP { + if span_label.span != DUMMY_SP { let hi = cm.lookup_char_pos(span_label.span.hi); if hi.line > max { max = hi.line; @@ -659,20 +659,20 @@ impl EmitterWriter { // First, find all the spans in <*macros> and point instead at their use site for sp in span.primary_spans() { - if (*sp == COMMAND_LINE_SP) || (*sp == DUMMY_SP) { + if *sp == DUMMY_SP { continue; } if cm.span_to_filename(sp.clone()).contains("macros>") { - let v = cm.macro_backtrace(sp.clone()); + let v = sp.macro_backtrace(); if let Some(use_site) = v.last() { before_after.push((sp.clone(), use_site.call_site.clone())); } } - for trace in cm.macro_backtrace(sp.clone()).iter().rev() { + for trace in sp.macro_backtrace().iter().rev() { // Only show macro locations that are local // and display them like a span_note if let Some(def_site) = trace.def_site_span { - if (def_site == COMMAND_LINE_SP) || (def_site == DUMMY_SP) { + if def_site == DUMMY_SP { continue; } // Check to make sure we're not in any <*macros> @@ -689,11 +689,11 @@ impl EmitterWriter { span.push_span_label(label_span, label_text); } for sp_label in span.span_labels() { - if (sp_label.span == COMMAND_LINE_SP) || (sp_label.span == DUMMY_SP) { + if sp_label.span == DUMMY_SP { continue; } if cm.span_to_filename(sp_label.span.clone()).contains("macros>") { - let v = cm.macro_backtrace(sp_label.span.clone()); + let v = sp_label.span.macro_backtrace(); if let Some(use_site) = v.last() { before_after.push((sp_label.span.clone(), use_site.call_site.clone())); } @@ -848,7 +848,7 @@ impl EmitterWriter { // Make sure our primary file comes first let primary_lo = if let (Some(ref cm), Some(ref primary_span)) = (self.cm.as_ref(), msp.primary_span().as_ref()) { - if primary_span != &&DUMMY_SP && primary_span != &&COMMAND_LINE_SP { + if primary_span != &&DUMMY_SP { cm.lookup_char_pos(primary_span.lo) } else { emit_to_destination(&buffer.render(), level, &mut self.dst)?; diff --git a/src/librustc_errors/lib.rs b/src/librustc_errors/lib.rs index 4c889dad8ca50..2efdaa57fba36 100644 --- a/src/librustc_errors/lib.rs +++ b/src/librustc_errors/lib.rs @@ -48,7 +48,6 @@ pub mod styled_buffer; mod lock; use syntax_pos::{BytePos, Loc, FileLinesResult, FileName, MultiSpan, Span, NO_EXPANSION}; -use syntax_pos::MacroBacktrace; #[derive(Clone, Debug, PartialEq, RustcEncodable, RustcDecodable)] pub enum RenderSpan { @@ -75,7 +74,6 @@ pub trait CodeMapper { fn span_to_lines(&self, sp: Span) -> FileLinesResult; fn span_to_string(&self, sp: Span) -> String; fn span_to_filename(&self, sp: Span) -> FileName; - fn macro_backtrace(&self, span: Span) -> Vec; fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option; } @@ -120,7 +118,7 @@ impl CodeSuggestion { let bounding_span = Span { lo: lo, hi: hi, - expn_id: NO_EXPANSION, + ctxt: NO_EXPANSION, }; let lines = cm.span_to_lines(bounding_span).unwrap(); assert!(!lines.lines.is_empty()); diff --git a/src/librustc_incremental/calculate_svh/svh_visitor.rs b/src/librustc_incremental/calculate_svh/svh_visitor.rs index 210803c3f329c..5401b371888e9 100644 --- a/src/librustc_incremental/calculate_svh/svh_visitor.rs +++ b/src/librustc_incremental/calculate_svh/svh_visitor.rs @@ -17,9 +17,10 @@ use self::SawTraitOrImplItemComponent::*; use syntax::abi::Abi; use syntax::ast::{self, Name, NodeId}; use syntax::attr; +use syntax::ext::hygiene::SyntaxContext; use syntax::parse::token; use syntax::symbol::InternedString; -use syntax_pos::{Span, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos}; +use syntax_pos::{Span, BytePos}; use syntax::tokenstream; use rustc::hir; use rustc::hir::*; @@ -92,10 +93,10 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> { span.hi }; - let expn_kind = match span.expn_id { - NO_EXPANSION => SawSpanExpnKind::NoExpansion, - COMMAND_LINE_EXPN => SawSpanExpnKind::CommandLine, - _ => SawSpanExpnKind::SomeExpansion, + let expn_kind = if span.ctxt == SyntaxContext::empty() { + SawSpanExpnKind::NoExpansion + } else { + SawSpanExpnKind::SomeExpansion }; let loc1 = self.codemap.byte_pos_to_line_and_col(span.lo); @@ -121,8 +122,7 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> { saw.hash(self.st); if expn_kind == SawSpanExpnKind::SomeExpansion { - let call_site = self.codemap.codemap().source_callsite(span); - self.hash_span(call_site); + self.hash_span(span.source_callsite()); } } @@ -483,7 +483,6 @@ fn saw_impl_item(ii: &ImplItemKind) -> SawTraitOrImplItemComponent { #[derive(Clone, Copy, Hash, Eq, PartialEq)] enum SawSpanExpnKind { NoExpansion, - CommandLine, SomeExpansion, } @@ -501,7 +500,7 @@ impl<'a> Hash for StableInlineAsm<'a> { volatile, alignstack, dialect, - expn_id: _, // This is used for error reporting + ctxt: _, // This is used for error reporting } = *self.0; asm.as_str().hash(state); diff --git a/src/librustc_mir/transform/qualify_consts.rs b/src/librustc_mir/transform/qualify_consts.rs index ba42804c9262f..9d236bd013c43 100644 --- a/src/librustc_mir/transform/qualify_consts.rs +++ b/src/librustc_mir/transform/qualify_consts.rs @@ -223,7 +223,7 @@ impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> { } // This comes from a macro that has #[allow_internal_unstable]. - if self.tcx.sess.codemap().span_allows_unstable(self.span) { + if self.span.allows_unstable() { return; } @@ -805,7 +805,7 @@ impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> { self.def_id.is_local() && // this doesn't come from a macro that has #[allow_internal_unstable] - !self.tcx.sess.codemap().span_allows_unstable(self.span) + !self.span.allows_unstable() { let mut err = self.tcx.sess.struct_span_err(self.span, "const fns are an unstable feature"); diff --git a/src/librustc_plugin/load.rs b/src/librustc_plugin/load.rs index efe9963cecc73..e884f3bdbb122 100644 --- a/src/librustc_plugin/load.rs +++ b/src/librustc_plugin/load.rs @@ -20,7 +20,7 @@ use std::env; use std::mem; use std::path::PathBuf; use syntax::ast; -use syntax_pos::{Span, COMMAND_LINE_SP}; +use syntax_pos::{Span, DUMMY_SP}; /// Pointer to a registrar function. pub type PluginRegistrarFun = @@ -81,7 +81,7 @@ pub fn load_plugins(sess: &Session, if let Some(plugins) = addl_plugins { for plugin in plugins { - loader.load_plugin(COMMAND_LINE_SP, &plugin, vec![]); + loader.load_plugin(DUMMY_SP, &plugin, vec![]); } } diff --git a/src/librustc_save_analysis/lib.rs b/src/librustc_save_analysis/lib.rs index e5c04f6b61ec2..fd6803e087a08 100644 --- a/src/librustc_save_analysis/lib.rs +++ b/src/librustc_save_analysis/lib.rs @@ -690,9 +690,8 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { // Note we take care to use the source callsite/callee, to handle // nested expansions and ensure we only generate data for source-visible // macro uses. - let callsite = self.tcx.sess.codemap().source_callsite(span); - let callee = self.tcx.sess.codemap().source_callee(span); - let callee = option_try!(callee); + let callsite = span.source_callsite(); + let callee = option_try!(span.source_callee()); let callee_span = option_try!(callee.span); // Ignore attribute macros, their spans are usually mangled @@ -1013,5 +1012,5 @@ fn escape(s: String) -> String { // Helper function to determine if a span came from a // macro expansion or syntax extension. pub fn generated_code(span: Span) -> bool { - span.expn_id != NO_EXPANSION || span == DUMMY_SP + span.ctxt != NO_EXPANSION || span == DUMMY_SP } diff --git a/src/librustc_save_analysis/span_utils.rs b/src/librustc_save_analysis/span_utils.rs index 34402742e6c33..c19f805a28575 100644 --- a/src/librustc_save_analysis/span_utils.rs +++ b/src/librustc_save_analysis/span_utils.rs @@ -462,8 +462,7 @@ impl<'a> SpanUtils<'a> { // Otherwise, a generated span is deemed invalid if it is not a sub-span of the root // callsite. This filters out macro internal variables and most malformed spans. - let span = self.sess.codemap().source_callsite(parent); - !(span.contains(parent)) + !parent.source_callsite().contains(parent) } } diff --git a/src/librustc_trans/asm.rs b/src/librustc_trans/asm.rs index b6195765b27c2..3e270b7928ebc 100644 --- a/src/librustc_trans/asm.rs +++ b/src/librustc_trans/asm.rs @@ -111,14 +111,14 @@ pub fn trans_inline_asm<'a, 'tcx>( bcx.store(v, val, None); } - // Store expn_id in a metadata node so we can map LLVM errors + // Store mark in a metadata node so we can map LLVM errors // back to source locations. See #17552. unsafe { let key = "srcloc"; let kind = llvm::LLVMGetMDKindIDInContext(bcx.ccx.llcx(), key.as_ptr() as *const c_char, key.len() as c_uint); - let val: llvm::ValueRef = C_i32(bcx.ccx, ia.expn_id.into_u32() as i32); + let val: llvm::ValueRef = C_i32(bcx.ccx, ia.ctxt.outer().as_u32() as i32); llvm::LLVMSetMetadata(r, kind, llvm::LLVMMDNodeInContext(bcx.ccx.llcx(), &val, 1)); diff --git a/src/librustc_trans/back/write.rs b/src/librustc_trans/back/write.rs index 5a017e4fb8a9a..ccb3f7ac882aa 100644 --- a/src/librustc_trans/back/write.rs +++ b/src/librustc_trans/back/write.rs @@ -371,14 +371,14 @@ struct HandlerFreeVars<'a> { unsafe extern "C" fn report_inline_asm<'a, 'b>(cgcx: &'a CodegenContext<'a>, msg: &'b str, cookie: c_uint) { - use syntax_pos::ExpnId; + use syntax::ext::hygiene::Mark; match cgcx.lto_ctxt { Some((sess, _)) => { - sess.codemap().with_expn_info(ExpnId::from_u32(cookie), |info| match info { + match Mark::from_u32(cookie).expn_info() { Some(ei) => sess.span_err(ei.call_site, msg), None => sess.err(msg), - }); + }; } None => { diff --git a/src/librustc_trans/mir/mod.rs b/src/librustc_trans/mir/mod.rs index 6419f41f86b6d..21bbbea77d442 100644 --- a/src/librustc_trans/mir/mod.rs +++ b/src/librustc_trans/mir/mod.rs @@ -26,7 +26,7 @@ use monomorphize::{self, Instance}; use abi::FnType; use type_of; -use syntax_pos::{DUMMY_SP, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos, Span}; +use syntax_pos::{DUMMY_SP, NO_EXPANSION, BytePos, Span}; use syntax::symbol::keywords; use std::iter; @@ -124,24 +124,18 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> { // In order to have a good line stepping behavior in debugger, we overwrite debug // locations of macro expansions with that of the outermost expansion site // (unless the crate is being compiled with `-Z debug-macros`). - if source_info.span.expn_id == NO_EXPANSION || - source_info.span.expn_id == COMMAND_LINE_EXPN || - self.ccx.sess().opts.debugging_opts.debug_macros { - + if source_info.span.ctxt == NO_EXPANSION || + self.ccx.sess().opts.debugging_opts.debug_macros { let scope = self.scope_metadata_for_loc(source_info.scope, source_info.span.lo); (scope, source_info.span) } else { - let cm = self.ccx.sess().codemap(); // Walk up the macro expansion chain until we reach a non-expanded span. // We also stop at the function body level because no line stepping can occurr // at the level above that. let mut span = source_info.span; - while span.expn_id != NO_EXPANSION && - span.expn_id != COMMAND_LINE_EXPN && - span.expn_id != self.mir.span.expn_id { - if let Some(callsite_span) = cm.with_expn_info(span.expn_id, - |ei| ei.map(|ei| ei.call_site.clone())) { - span = callsite_span; + while span.ctxt != NO_EXPANSION && span.ctxt != self.mir.span.ctxt { + if let Some(info) = span.ctxt.outer().expn_info() { + span = info.call_site; } else { break; } diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index 9c62fd486d45a..b95e01f4ff600 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -4161,12 +4161,11 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { } } if let Some(last_stmt) = extra_semi { - let original_span = original_sp(self.tcx.sess.codemap(), - last_stmt.span, blk.span); + let original_span = original_sp(last_stmt.span, blk.span); let span_semi = Span { lo: original_span.hi - BytePos(1), hi: original_span.hi, - expn_id: original_span.expn_id + ctxt: original_span.ctxt, }; err.span_help(span_semi, "consider removing this semicolon:"); } diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 7e2b225193f6d..a4bebd311ded2 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -14,10 +14,10 @@ pub use self::TyParamBound::*; pub use self::UnsafeSource::*; pub use self::ViewPath_::*; pub use self::PathParameters::*; -pub use symbol::Symbol as Name; +pub use symbol::{Ident, Symbol as Name}; pub use util::ThinVec; -use syntax_pos::{mk_sp, BytePos, Span, DUMMY_SP, ExpnId}; +use syntax_pos::{mk_sp, BytePos, Span, DUMMY_SP}; use codemap::{respan, Spanned}; use abi::Abi; use ext::hygiene::{Mark, SyntaxContext}; @@ -27,61 +27,12 @@ use rustc_data_structures::indexed_vec; use symbol::{Symbol, keywords}; use tokenstream::{ThinTokenStream, TokenStream}; +use serialize::{self, Encoder, Decoder}; use std::collections::HashSet; use std::fmt; use std::rc::Rc; use std::u32; -use serialize::{self, Encodable, Decodable, Encoder, Decoder}; - -/// An identifier contains a Name (index into the interner -/// table) and a SyntaxContext to track renaming and -/// macro expansion per Flatt et al., "Macros That Work Together" -#[derive(Clone, Copy, PartialEq, Eq, Hash)] -pub struct Ident { - pub name: Symbol, - pub ctxt: SyntaxContext -} - -impl Ident { - pub const fn with_empty_ctxt(name: Name) -> Ident { - Ident { name: name, ctxt: SyntaxContext::empty() } - } - - /// Maps a string to an identifier with an empty syntax context. - pub fn from_str(s: &str) -> Ident { - Ident::with_empty_ctxt(Symbol::intern(s)) - } - - pub fn unhygienize(&self) -> Ident { - Ident { name: self.name, ctxt: SyntaxContext::empty() } - } -} - -impl fmt::Debug for Ident { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}{:?}", self.name, self.ctxt) - } -} - -impl fmt::Display for Ident { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&self.name, f) - } -} - -impl Encodable for Ident { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - self.name.encode(s) - } -} - -impl Decodable for Ident { - fn decode(d: &mut D) -> Result { - Ok(Ident::with_empty_ctxt(Name::decode(d)?)) - } -} - #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)] pub struct Lifetime { pub id: NodeId, @@ -1445,7 +1396,7 @@ pub struct InlineAsm { pub volatile: bool, pub alignstack: bool, pub dialect: AsmDialect, - pub expn_id: ExpnId, + pub ctxt: SyntaxContext, } /// An argument in a function header. diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 388f3cb732351..ba199eacb6276 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -17,6 +17,8 @@ //! within the CodeMap, which upon request can be converted to line and column //! information, source code snippets, etc. +pub use syntax_pos::*; +pub use syntax_pos::hygiene::{ExpnFormat, ExpnInfo, NameAndSpan}; pub use self::ExpnFormat::*; use std::cell::RefCell; @@ -26,35 +28,21 @@ use std::rc::Rc; use std::env; use std::fs; use std::io::{self, Read}; -pub use syntax_pos::*; use errors::CodeMapper; -use ast::Name; - /// Return the span itself if it doesn't come from a macro expansion, /// otherwise return the call site span up to the `enclosing_sp` by /// following the `expn_info` chain. -pub fn original_sp(cm: &CodeMap, sp: Span, enclosing_sp: Span) -> Span { - let call_site1 = cm.with_expn_info(sp.expn_id, |ei| ei.map(|ei| ei.call_site)); - let call_site2 = cm.with_expn_info(enclosing_sp.expn_id, |ei| ei.map(|ei| ei.call_site)); +pub fn original_sp(sp: Span, enclosing_sp: Span) -> Span { + let call_site1 = sp.ctxt.outer().expn_info().map(|ei| ei.call_site); + let call_site2 = enclosing_sp.ctxt.outer().expn_info().map(|ei| ei.call_site); match (call_site1, call_site2) { (None, _) => sp, (Some(call_site1), Some(call_site2)) if call_site1 == call_site2 => sp, - (Some(call_site1), _) => original_sp(cm, call_site1, enclosing_sp), + (Some(call_site1), _) => original_sp(call_site1, enclosing_sp), } } -/// The source of expansion. -#[derive(Clone, Hash, Debug, PartialEq, Eq)] -pub enum ExpnFormat { - /// e.g. #[derive(...)] - MacroAttribute(Name), - /// e.g. `format!()` - MacroBang(Name), - /// Desugaring done by the compiler during HIR lowering. - CompilerDesugaring(Name) -} - #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub struct Spanned { pub node: T, @@ -73,47 +61,6 @@ pub fn dummy_spanned(t: T) -> Spanned { respan(DUMMY_SP, t) } -#[derive(Clone, Hash, Debug)] -pub struct NameAndSpan { - /// The format with which the macro was invoked. - pub format: ExpnFormat, - /// Whether the macro is allowed to use #[unstable]/feature-gated - /// features internally without forcing the whole crate to opt-in - /// to them. - pub allow_internal_unstable: bool, - /// The span of the macro definition itself. The macro may not - /// have a sensible definition span (e.g. something defined - /// completely inside libsyntax) in which case this is None. - pub span: Option -} - -impl NameAndSpan { - pub fn name(&self) -> Name { - match self.format { - ExpnFormat::MacroAttribute(s) | - ExpnFormat::MacroBang(s) | - ExpnFormat::CompilerDesugaring(s) => s, - } - } -} - -/// Extra information for tracking spans of macro and syntax sugar expansion -#[derive(Hash, Debug)] -pub struct ExpnInfo { - /// The location of the actual macro invocation or syntax sugar , e.g. - /// `let x = foo!();` or `if let Some(y) = x {}` - /// - /// This may recursively refer to other macro invocations, e.g. if - /// `foo!()` invoked `bar!()` internally, and there was an - /// expression inside `bar!`; the call_site of the expression in - /// the expansion would point to the `bar!` invocation; that - /// call_site span would have its own ExpnInfo, with the call_site - /// pointing to the `foo!` invocation. - pub call_site: Span, - /// Information about the expansion. - pub callee: NameAndSpan -} - // _____________________________________________________________________________ // FileMap, MultiByteChar, FileName, FileLines // @@ -161,7 +108,6 @@ impl FileLoader for RealFileLoader { pub struct CodeMap { pub files: RefCell>>, - expansions: RefCell>, file_loader: Box } @@ -169,7 +115,6 @@ impl CodeMap { pub fn new() -> CodeMap { CodeMap { files: RefCell::new(Vec::new()), - expansions: RefCell::new(Vec::new()), file_loader: Box::new(RealFileLoader) } } @@ -177,7 +122,6 @@ impl CodeMap { pub fn with_file_loader(file_loader: Box) -> CodeMap { CodeMap { files: RefCell::new(Vec::new()), - expansions: RefCell::new(Vec::new()), file_loader: file_loader } } @@ -353,14 +297,14 @@ impl CodeMap { /// Returns `Some(span)`, a union of the lhs and rhs span. The lhs must precede the rhs. If /// there are gaps between lhs and rhs, the resulting union will cross these gaps. /// For this to work, the spans have to be: - /// * the expn_id of both spans much match + /// * the ctxt of both spans much match /// * the lhs span needs to end on the same line the rhs span begins /// * the lhs span must start at or before the rhs span pub fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option { use std::cmp; // make sure we're at the same expansion id - if sp_lhs.expn_id != sp_rhs.expn_id { + if sp_lhs.ctxt != sp_rhs.ctxt { return None; } @@ -383,7 +327,7 @@ impl CodeMap { Some(Span { lo: cmp::min(sp_lhs.lo, sp_rhs.lo), hi: cmp::max(sp_lhs.hi, sp_rhs.hi), - expn_id: sp_lhs.expn_id, + ctxt: sp_lhs.ctxt, }) } else { None @@ -391,10 +335,6 @@ impl CodeMap { } pub fn span_to_string(&self, sp: Span) -> String { - if sp == COMMAND_LINE_SP { - return "".to_string(); - } - if self.files.borrow().is_empty() && sp.source_equal(&DUMMY_SP) { return "no-location".to_string(); } @@ -409,62 +349,6 @@ impl CodeMap { hi.col.to_usize() + 1)).to_string() } - /// Return the source span - this is either the supplied span, or the span for - /// the macro callsite that expanded to it. - pub fn source_callsite(&self, sp: Span) -> Span { - let mut span = sp; - // Special case - if a macro is parsed as an argument to another macro, the source - // callsite is the first callsite, which is also source-equivalent to the span. - let mut first = true; - while span.expn_id != NO_EXPANSION && span.expn_id != COMMAND_LINE_EXPN { - if let Some(callsite) = self.with_expn_info(span.expn_id, - |ei| ei.map(|ei| ei.call_site.clone())) { - if first && span.source_equal(&callsite) { - if self.lookup_char_pos(span.lo).file.is_real_file() { - return Span { expn_id: NO_EXPANSION, .. span }; - } - } - first = false; - span = callsite; - } - else { - break; - } - } - span - } - - /// Return the source callee. - /// - /// Returns None if the supplied span has no expansion trace, - /// else returns the NameAndSpan for the macro definition - /// corresponding to the source callsite. - pub fn source_callee(&self, sp: Span) -> Option { - let mut span = sp; - // Special case - if a macro is parsed as an argument to another macro, the source - // callsite is source-equivalent to the span, and the source callee is the first callee. - let mut first = true; - while let Some(callsite) = self.with_expn_info(span.expn_id, - |ei| ei.map(|ei| ei.call_site.clone())) { - if first && span.source_equal(&callsite) { - if self.lookup_char_pos(span.lo).file.is_real_file() { - return self.with_expn_info(span.expn_id, - |ei| ei.map(|ei| ei.callee.clone())); - } - } - first = false; - if let Some(_) = self.with_expn_info(callsite.expn_id, - |ei| ei.map(|ei| ei.call_site.clone())) { - span = callsite; - } - else { - return self.with_expn_info(span.expn_id, - |ei| ei.map(|ei| ei.callee.clone())); - } - } - None - } - pub fn span_to_filename(&self, sp: Span) -> FileName { self.lookup_char_pos(sp.lo).file.name.to_string() } @@ -628,111 +512,9 @@ impl CodeMap { return a; } - pub fn record_expansion(&self, expn_info: ExpnInfo) -> ExpnId { - let mut expansions = self.expansions.borrow_mut(); - expansions.push(expn_info); - let len = expansions.len(); - if len > u32::max_value() as usize { - panic!("too many ExpnInfo's!"); - } - ExpnId(len as u32 - 1) - } - - pub fn with_expn_info(&self, id: ExpnId, f: F) -> T where - F: FnOnce(Option<&ExpnInfo>) -> T, - { - match id { - NO_EXPANSION | COMMAND_LINE_EXPN => f(None), - ExpnId(i) => f(Some(&(*self.expansions.borrow())[i as usize])) - } - } - - /// Check if a span is "internal" to a macro in which #[unstable] - /// items can be used (that is, a macro marked with - /// `#[allow_internal_unstable]`). - pub fn span_allows_unstable(&self, span: Span) -> bool { - debug!("span_allows_unstable(span = {:?})", span); - let mut allows_unstable = false; - let mut expn_id = span.expn_id; - loop { - let quit = self.with_expn_info(expn_id, |expninfo| { - debug!("span_allows_unstable: expninfo = {:?}", expninfo); - expninfo.map_or(/* hit the top level */ true, |info| { - - let span_comes_from_this_expansion = - info.callee.span.map_or(span.source_equal(&info.call_site), |mac_span| { - mac_span.contains(span) - }); - - debug!("span_allows_unstable: span: {:?} call_site: {:?} callee: {:?}", - (span.lo, span.hi), - (info.call_site.lo, info.call_site.hi), - info.callee.span.map(|x| (x.lo, x.hi))); - debug!("span_allows_unstable: from this expansion? {}, allows unstable? {}", - span_comes_from_this_expansion, - info.callee.allow_internal_unstable); - if span_comes_from_this_expansion { - allows_unstable = info.callee.allow_internal_unstable; - // we've found the right place, stop looking - true - } else { - // not the right place, keep looking - expn_id = info.call_site.expn_id; - false - } - }) - }); - if quit { - break - } - } - debug!("span_allows_unstable? {}", allows_unstable); - allows_unstable - } - pub fn count_lines(&self) -> usize { self.files.borrow().iter().fold(0, |a, f| a + f.count_lines()) } - - pub fn macro_backtrace(&self, span: Span) -> Vec { - let mut prev_span = DUMMY_SP; - let mut span = span; - let mut result = vec![]; - loop { - let span_name_span = self.with_expn_info(span.expn_id, |expn_info| { - expn_info.map(|ei| { - let (pre, post) = match ei.callee.format { - MacroAttribute(..) => ("#[", "]"), - MacroBang(..) => ("", "!"), - CompilerDesugaring(..) => ("desugaring of `", "`"), - }; - let macro_decl_name = format!("{}{}{}", - pre, - ei.callee.name(), - post); - let def_site_span = ei.callee.span; - (ei.call_site, macro_decl_name, def_site_span) - }) - }); - - match span_name_span { - None => break, - Some((call_site, macro_decl_name, def_site_span)) => { - // Don't print recursive invocations - if !call_site.source_equal(&prev_span) { - result.push(MacroBacktrace { - call_site: call_site, - macro_decl_name: macro_decl_name, - def_site_span: def_site_span, - }); - } - prev_span = span; - span = call_site; - } - } - } - result - } } impl CodeMapper for CodeMap { @@ -748,9 +530,6 @@ impl CodeMapper for CodeMap { fn span_to_filename(&self, sp: Span) -> FileName { self.span_to_filename(sp) } - fn macro_backtrace(&self, span: Span) -> Vec { - self.macro_backtrace(span) - } fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option { self.merge_spans(sp_lhs, sp_rhs) } @@ -763,7 +542,6 @@ impl CodeMapper for CodeMap { #[cfg(test)] mod tests { use super::*; - use symbol::keywords; use std::rc::Rc; #[test] @@ -912,7 +690,7 @@ mod tests { fn t7() { // Test span_to_lines for a span ending at the end of filemap let cm = init_code_map(); - let span = Span {lo: BytePos(12), hi: BytePos(23), expn_id: NO_EXPANSION}; + let span = Span {lo: BytePos(12), hi: BytePos(23), ctxt: NO_EXPANSION}; let file_lines = cm.span_to_lines(span).unwrap(); assert_eq!(file_lines.file.name, "blork.rs"); @@ -928,7 +706,7 @@ mod tests { assert_eq!(input.len(), selection.len()); let left_index = selection.find('~').unwrap() as u32; let right_index = selection.rfind('~').map(|x|x as u32).unwrap_or(left_index); - Span { lo: BytePos(left_index), hi: BytePos(right_index + 1), expn_id: NO_EXPANSION } + Span { lo: BytePos(left_index), hi: BytePos(right_index + 1), ctxt: NO_EXPANSION } } /// Test span_to_snippet and span_to_lines for a span coverting 3 @@ -958,7 +736,7 @@ mod tests { fn t8() { // Test span_to_snippet for a span ending at the end of filemap let cm = init_code_map(); - let span = Span {lo: BytePos(12), hi: BytePos(23), expn_id: NO_EXPANSION}; + let span = Span {lo: BytePos(12), hi: BytePos(23), ctxt: NO_EXPANSION}; let snippet = cm.span_to_snippet(span); assert_eq!(snippet, Ok("second line".to_string())); @@ -968,7 +746,7 @@ mod tests { fn t9() { // Test span_to_str for a span ending at the end of filemap let cm = init_code_map(); - let span = Span {lo: BytePos(12), hi: BytePos(23), expn_id: NO_EXPANSION}; + let span = Span {lo: BytePos(12), hi: BytePos(23), ctxt: NO_EXPANSION}; let sstr = cm.span_to_string(span); assert_eq!(sstr, "blork.rs:2:1: 2:12"); @@ -1022,7 +800,7 @@ mod tests { let span = Span { lo: BytePos(lo as u32 + file.start_pos.0), hi: BytePos(hi as u32 + file.start_pos.0), - expn_id: NO_EXPANSION, + ctxt: NO_EXPANSION, }; assert_eq!(&self.span_to_snippet(span).unwrap()[..], substring); @@ -1032,45 +810,4 @@ mod tests { } } } - - fn init_expansion_chain(cm: &CodeMap) -> Span { - // Creates an expansion chain containing two recursive calls - // root -> expA -> expA -> expB -> expB -> end - let root = Span { lo: BytePos(0), hi: BytePos(11), expn_id: NO_EXPANSION }; - - let format_root = ExpnFormat::MacroBang(keywords::Invalid.name()); - let callee_root = NameAndSpan { format: format_root, - allow_internal_unstable: false, - span: Some(root) }; - - let info_a1 = ExpnInfo { call_site: root, callee: callee_root }; - let id_a1 = cm.record_expansion(info_a1); - let span_a1 = Span { lo: BytePos(12), hi: BytePos(23), expn_id: id_a1 }; - - let format_a = ExpnFormat::MacroBang(keywords::As.name()); - let callee_a = NameAndSpan { format: format_a, - allow_internal_unstable: false, - span: Some(span_a1) }; - - let info_a2 = ExpnInfo { call_site: span_a1, callee: callee_a.clone() }; - let id_a2 = cm.record_expansion(info_a2); - let span_a2 = Span { lo: BytePos(12), hi: BytePos(23), expn_id: id_a2 }; - - let info_b1 = ExpnInfo { call_site: span_a2, callee: callee_a }; - let id_b1 = cm.record_expansion(info_b1); - let span_b1 = Span { lo: BytePos(25), hi: BytePos(36), expn_id: id_b1 }; - - let format_b = ExpnFormat::MacroBang(keywords::Box.name()); - let callee_b = NameAndSpan { format: format_b, - allow_internal_unstable: false, - span: None }; - - let info_b2 = ExpnInfo { call_site: span_b1, callee: callee_b.clone() }; - let id_b2 = cm.record_expansion(info_b2); - let span_b2 = Span { lo: BytePos(25), hi: BytePos(36), expn_id: id_b2 }; - - let info_end = ExpnInfo { call_site: span_b2, callee: callee_b }; - let id_end = cm.record_expansion(info_end); - Span { lo: BytePos(37), hi: BytePos(48), expn_id: id_end } - } } diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index dc7e7673eb03c..a2d54b62ec65d 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -12,11 +12,11 @@ pub use self::SyntaxExtension::{MultiDecorator, MultiModifier, NormalTT, IdentTT use ast::{self, Attribute, Name, PatKind, MetaItem}; use attr::HasAttrs; -use codemap::{self, CodeMap, ExpnInfo, Spanned, respan}; -use syntax_pos::{Span, ExpnId, NO_EXPANSION}; -use errors::{DiagnosticBuilder, FatalError}; +use codemap::{self, CodeMap, Spanned, respan}; +use syntax_pos::{Span, DUMMY_SP}; +use errors::DiagnosticBuilder; use ext::expand::{self, Expansion, Invocation}; -use ext::hygiene::Mark; +use ext::hygiene::{Mark, SyntaxContext}; use fold::{self, Folder}; use parse::{self, parser, DirectoryOwnership}; use parse::token; @@ -56,6 +56,14 @@ impl HasAttrs for Annotatable { } impl Annotatable { + pub fn span(&self) -> Span { + match *self { + Annotatable::Item(ref item) => item.span, + Annotatable::TraitItem(ref trait_item) => trait_item.span, + Annotatable::ImplItem(ref impl_item) => impl_item.span, + } + } + pub fn expect_item(self) -> P { match self { Annotatable::Item(i) => i, @@ -602,7 +610,6 @@ pub struct ModuleData { pub struct ExpansionData { pub mark: Mark, pub depth: usize, - pub backtrace: ExpnId, pub module: Rc, pub directory_ownership: DirectoryOwnership, } @@ -633,7 +640,6 @@ impl<'a> ExtCtxt<'a> { current_expansion: ExpansionData { mark: Mark::root(), depth: 0, - backtrace: NO_EXPANSION, module: Rc::new(ModuleData { mod_path: Vec::new(), directory: PathBuf::new() }), directory_ownership: DirectoryOwnership::Owned, }, @@ -658,30 +664,30 @@ impl<'a> ExtCtxt<'a> { pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess } pub fn cfg(&self) -> &ast::CrateConfig { &self.parse_sess.config } pub fn call_site(&self) -> Span { - self.codemap().with_expn_info(self.backtrace(), |ei| match ei { + match self.current_expansion.mark.expn_info() { Some(expn_info) => expn_info.call_site, - None => self.bug("missing top span") - }) + None => DUMMY_SP, + } + } + pub fn backtrace(&self) -> SyntaxContext { + SyntaxContext::empty().apply_mark(self.current_expansion.mark) } - pub fn backtrace(&self) -> ExpnId { self.current_expansion.backtrace } /// Returns span for the macro which originally caused the current expansion to happen. /// /// Stops backtracing at include! boundary. pub fn expansion_cause(&self) -> Span { - let mut expn_id = self.backtrace(); + let mut ctxt = self.backtrace(); let mut last_macro = None; loop { - if self.codemap().with_expn_info(expn_id, |info| { - info.map_or(None, |i| { - if i.callee.name() == "include" { - // Stop going up the backtrace once include! is encountered - return None; - } - expn_id = i.call_site.expn_id; - last_macro = Some(i.call_site); - return Some(()); - }) + if ctxt.outer().expn_info().map_or(None, |info| { + if info.callee.name() == "include" { + // Stop going up the backtrace once include! is encountered + return None; + } + ctxt = info.call_site.ctxt; + last_macro = Some(info.call_site); + return Some(()); }).is_none() { break } @@ -689,28 +695,6 @@ impl<'a> ExtCtxt<'a> { last_macro.expect("missing expansion backtrace") } - pub fn bt_push(&mut self, ei: ExpnInfo) { - if self.current_expansion.depth > self.ecfg.recursion_limit { - let suggested_limit = self.ecfg.recursion_limit * 2; - let mut err = self.struct_span_fatal(ei.call_site, - &format!("recursion limit reached while expanding the macro `{}`", - ei.callee.name())); - err.help(&format!( - "consider adding a `#![recursion_limit=\"{}\"]` attribute to your crate", - suggested_limit)); - err.emit(); - panic!(FatalError); - } - - let mut call_site = ei.call_site; - call_site.expn_id = self.backtrace(); - self.current_expansion.backtrace = self.codemap().record_expansion(ExpnInfo { - call_site: call_site, - callee: ei.callee - }); - } - pub fn bt_pop(&mut self) {} - pub fn struct_span_warn(&self, sp: Span, msg: &str) @@ -792,9 +776,9 @@ impl<'a> ExtCtxt<'a> { /// compilation on error, merely emits a non-fatal error and returns None. pub fn expr_to_spanned_string(cx: &mut ExtCtxt, expr: P, err_msg: &str) -> Option> { - // Update `expr.span`'s expn_id now in case expr is an `include!` macro invocation. + // Update `expr.span`'s ctxt now in case expr is an `include!` macro invocation. let expr = expr.map(|mut expr| { - expr.span.expn_id = cx.backtrace(); + expr.span.ctxt = expr.span.ctxt.apply_mark(cx.current_expansion.mark); expr }); diff --git a/src/libsyntax/ext/derive.rs b/src/libsyntax/ext/derive.rs index 1569d9f540b8e..c79040424f619 100644 --- a/src/libsyntax/ext/derive.rs +++ b/src/libsyntax/ext/derive.rs @@ -9,13 +9,16 @@ // except according to those terms. use attr::HasAttrs; -use {ast, codemap}; +use ast; +use codemap::{ExpnInfo, NameAndSpan, ExpnFormat}; use ext::base::ExtCtxt; use ext::build::AstBuilder; use parse::parser::PathStyle; use symbol::Symbol; use syntax_pos::Span; +use std::collections::HashSet; + pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec) -> Vec { let mut result = Vec::new(); attrs.retain(|attr| { @@ -41,36 +44,35 @@ pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec) -> Vec result } -fn allow_unstable(cx: &mut ExtCtxt, span: Span, attr_name: &str) -> Span { - Span { - expn_id: cx.codemap().record_expansion(codemap::ExpnInfo { - call_site: span, - callee: codemap::NameAndSpan { - format: codemap::MacroAttribute(Symbol::intern(attr_name)), - span: Some(span), - allow_internal_unstable: true, - }, - }), - ..span +pub fn add_derived_markers(cx: &mut ExtCtxt, span: Span, traits: &[ast::Path], item: T) -> T + where T: HasAttrs, +{ + let (mut names, mut pretty_name) = (HashSet::new(), "derive(".to_owned()); + for (i, path) in traits.iter().enumerate() { + if i > 0 { + pretty_name.push_str(", "); + } + pretty_name.push_str(&path.to_string()); + names.insert(unwrap_or!(path.segments.get(0), continue).identifier.name); } -} + pretty_name.push(')'); -pub fn add_derived_markers(cx: &mut ExtCtxt, traits: &[ast::Path], item: T) -> T { - let span = match traits.get(0) { - Some(path) => path.span, - None => return item, - }; + cx.current_expansion.mark.set_expn_info(ExpnInfo { + call_site: span, + callee: NameAndSpan { + format: ExpnFormat::MacroAttribute(Symbol::intern(&pretty_name)), + span: None, + allow_internal_unstable: true, + }, + }); + let span = Span { ctxt: cx.backtrace(), ..span }; item.map_attrs(|mut attrs| { - if traits.iter().any(|path| *path == "PartialEq") && - traits.iter().any(|path| *path == "Eq") { - let span = allow_unstable(cx, span, "derive(PartialEq, Eq)"); + if names.contains(&Symbol::intern("Eq")) && names.contains(&Symbol::intern("PartialEq")) { let meta = cx.meta_word(span, Symbol::intern("structural_match")); attrs.push(cx.attribute(span, meta)); } - if traits.iter().any(|path| *path == "Copy") && - traits.iter().any(|path| *path == "Clone") { - let span = allow_unstable(cx, span, "derive(Copy, Clone)"); + if names.contains(&Symbol::intern("Copy")) && names.contains(&Symbol::intern("Clone")) { let meta = cx.meta_word(span, Symbol::intern("rustc_copy_clone_marker")); attrs.push(cx.attribute(span, meta)); } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index e258c51a3295f..1b3352f73ade7 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -13,6 +13,7 @@ use ast::{MacStmtStyle, StmtKind, ItemKind}; use attr::{self, HasAttrs}; use codemap::{ExpnInfo, NameAndSpan, MacroBang, MacroAttribute}; use config::{is_test_or_bench, StripUnconfigured}; +use errors::FatalError; use ext::base::*; use ext::derive::{add_derived_markers, collect_derives}; use ext::hygiene::Mark; @@ -27,7 +28,7 @@ use ptr::P; use std_inject; use symbol::Symbol; use symbol::keywords; -use syntax_pos::{Span, ExpnId, DUMMY_SP}; +use syntax_pos::{Span, DUMMY_SP}; use tokenstream::TokenStream; use util::small_vector::SmallVector; use visit::Visitor; @@ -273,7 +274,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { let item = item .map_attrs(|mut attrs| { attrs.retain(|a| a.path != "derive"); attrs }); let item_with_markers = - add_derived_markers(&mut self.cx, &traits, item.clone()); + add_derived_markers(&mut self.cx, item.span(), &traits, item.clone()); let derives = derives.entry(invoc.expansion_data.mark).or_insert_with(Vec::new); for path in &traits { @@ -363,11 +364,26 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } fn expand_invoc(&mut self, invoc: Invocation, ext: Rc) -> Expansion { - match invoc.kind { + let result = match invoc.kind { InvocationKind::Bang { .. } => self.expand_bang_invoc(invoc, ext), InvocationKind::Attr { .. } => self.expand_attr_invoc(invoc, ext), InvocationKind::Derive { .. } => self.expand_derive_invoc(invoc, ext), + }; + + if self.cx.current_expansion.depth > self.cx.ecfg.recursion_limit { + let info = self.cx.current_expansion.mark.expn_info().unwrap(); + let suggested_limit = self.cx.ecfg.recursion_limit * 2; + let mut err = self.cx.struct_span_fatal(info.call_site, + &format!("recursion limit reached while expanding the macro `{}`", + info.callee.name())); + err.help(&format!( + "consider adding a `#![recursion_limit=\"{}\"]` attribute to your crate", + suggested_limit)); + err.emit(); + panic!(FatalError); } + + result } fn expand_attr_invoc(&mut self, invoc: Invocation, ext: Rc) -> Expansion { @@ -378,11 +394,11 @@ impl<'a, 'b> MacroExpander<'a, 'b> { }; attr::mark_used(&attr); - self.cx.bt_push(ExpnInfo { + invoc.expansion_data.mark.set_expn_info(ExpnInfo { call_site: attr.span, callee: NameAndSpan { format: MacroAttribute(Symbol::intern(&format!("{}", attr.path))), - span: Some(attr.span), + span: None, allow_internal_unstable: false, } }); @@ -403,19 +419,8 @@ impl<'a, 'b> MacroExpander<'a, 'b> { SyntaxExtension::AttrProcMacro(ref mac) => { let item_toks = stream_for_item(&item, &self.cx.parse_sess); - let span = Span { - expn_id: self.cx.codemap().record_expansion(ExpnInfo { - call_site: attr.span, - callee: NameAndSpan { - format: MacroAttribute(Symbol::intern(&format!("{}", attr.path))), - span: None, - allow_internal_unstable: false, - }, - }), - ..attr.span - }; - - let tok_result = mac.expand(self.cx, attr.span, attr.tokens.clone(), item_toks); + let span = Span { ctxt: self.cx.backtrace(), ..attr.span }; + let tok_result = mac.expand(self.cx, attr.span, attr.tokens, item_toks); self.parse_expansion(tok_result, kind, &attr.path, span) } SyntaxExtension::ProcMacroDerive(..) | SyntaxExtension::BuiltinDerive(..) => { @@ -440,8 +445,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { let path = &mac.node.path; let ident = ident.unwrap_or(keywords::Invalid.ident()); - let marked_tts = - noop_fold_tts(mac.node.stream(), &mut Marker { mark: mark, expn_id: None }); + let marked_tts = noop_fold_tts(mac.node.stream(), &mut Marker(mark)); let opt_expanded = match *ext { NormalTT(ref expandfun, exp_span, allow_internal_unstable) => { if ident.name != keywords::Invalid.name() { @@ -451,7 +455,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { return kind.dummy(span); } - self.cx.bt_push(ExpnInfo { + invoc.expansion_data.mark.set_expn_info(ExpnInfo { call_site: span, callee: NameAndSpan { format: MacroBang(Symbol::intern(&format!("{}", path))), @@ -470,7 +474,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { return kind.dummy(span); }; - self.cx.bt_push(ExpnInfo { + invoc.expansion_data.mark.set_expn_info(ExpnInfo { call_site: span, callee: NameAndSpan { format: MacroBang(Symbol::intern(&format!("{}", path))), @@ -502,7 +506,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { return kind.dummy(span); } - self.cx.bt_push(ExpnInfo { + invoc.expansion_data.mark.set_expn_info(ExpnInfo { call_site: span, callee: NameAndSpan { format: MacroBang(Symbol::intern(&format!("{}", path))), @@ -528,10 +532,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { return kind.dummy(span); }; - expanded.fold_with(&mut Marker { - mark: mark, - expn_id: Some(self.cx.backtrace()), - }) + expanded.fold_with(&mut Marker(mark)) } /// Expand a derive invocation. Returns the result of expansion. @@ -550,50 +551,33 @@ impl<'a, 'b> MacroExpander<'a, 'b> { id: ast::AttrId(0), style: ast::AttrStyle::Outer, is_sugared_doc: false, }; - self.cx.bt_push(ExpnInfo { + let mut expn_info = ExpnInfo { call_site: span, callee: NameAndSpan { format: MacroAttribute(pretty_name), span: None, allow_internal_unstable: false, } - }); + }; match *ext { SyntaxExtension::ProcMacroDerive(ref ext, _) => { - let span = Span { - expn_id: self.cx.codemap().record_expansion(ExpnInfo { - call_site: span, - callee: NameAndSpan { - format: MacroAttribute(pretty_name), - span: None, - allow_internal_unstable: false, - }, - }), - ..span - }; + invoc.expansion_data.mark.set_expn_info(expn_info); + let span = Span { ctxt: self.cx.backtrace(), ..span }; let dummy = ast::MetaItem { // FIXME(jseyfried) avoid this name: keywords::Invalid.name(), span: DUMMY_SP, node: ast::MetaItemKind::Word, }; - return kind.expect_from_annotatables(ext.expand(self.cx, span, &dummy, item)); + kind.expect_from_annotatables(ext.expand(self.cx, span, &dummy, item)) } SyntaxExtension::BuiltinDerive(func) => { - let span = Span { - expn_id: self.cx.codemap().record_expansion(ExpnInfo { - call_site: span, - callee: NameAndSpan { - format: MacroAttribute(pretty_name), - span: None, - allow_internal_unstable: true, - }, - }), - ..span - }; + expn_info.callee.allow_internal_unstable = true; + invoc.expansion_data.mark.set_expn_info(expn_info); + let span = Span { ctxt: self.cx.backtrace(), ..span }; let mut items = Vec::new(); func(self.cx, span, &attr.meta().unwrap(), &item, &mut |a| items.push(a)); - return kind.expect_from_annotatables(items); + kind.expect_from_annotatables(items) } _ => { let msg = &format!("macro `{}` may not be used for derive attributes", attr.path); @@ -753,10 +737,9 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { // Detect use of feature-gated or invalid attributes on macro invocations // since they will not be detected after macro expansion. fn check_attributes(&mut self, attrs: &[ast::Attribute]) { - let codemap = &self.cx.parse_sess.codemap(); let features = self.cx.ecfg.features.unwrap(); for attr in attrs.iter() { - feature_gate::check_attribute(&attr, &self.cx.parse_sess, codemap, features); + feature_gate::check_attribute(&attr, &self.cx.parse_sess, features); } } } @@ -1065,23 +1048,21 @@ impl<'feat> ExpansionConfig<'feat> { } } -// A Marker adds the given mark to the syntax context and -// sets spans' `expn_id` to the given expn_id (unless it is `None`). -struct Marker { mark: Mark, expn_id: Option } +// A Marker adds the given mark to the syntax context. +struct Marker(Mark); impl Folder for Marker { fn fold_ident(&mut self, mut ident: Ident) -> Ident { - ident.ctxt = ident.ctxt.apply_mark(self.mark); + ident.ctxt = ident.ctxt.apply_mark(self.0); ident } - fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { - noop_fold_mac(mac, self) - } fn new_span(&mut self, mut span: Span) -> Span { - if let Some(expn_id) = self.expn_id { - span.expn_id = expn_id; - } + span.ctxt = span.ctxt.apply_mark(self.0); span } + + fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { + noop_fold_mac(mac, self) + } } diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index 39b92c7d007de..0103d6ea959dd 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -185,7 +185,7 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::Toke fn res_rel_file(cx: &mut ExtCtxt, sp: syntax_pos::Span, arg: &Path) -> PathBuf { // NB: relative paths are resolved relative to the compilation unit if !arg.is_absolute() { - let callsite = cx.codemap().source_callsite(sp); + let callsite = sp.source_callsite(); let mut cu = PathBuf::from(&cx.codemap().span_to_filename(callsite)); cu.pop(); cu.push(arg); diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index d56859d805c87..12e746e024d3b 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -34,17 +34,19 @@ impl Delimited { } pub fn open_tt(&self, span: Span) -> TokenTree { - let open_span = match span { - DUMMY_SP => DUMMY_SP, - _ => Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span }, + let open_span = if span == DUMMY_SP { + DUMMY_SP + } else { + Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span } }; TokenTree::Token(open_span, self.open_token()) } pub fn close_tt(&self, span: Span) -> TokenTree { - let close_span = match span { - DUMMY_SP => DUMMY_SP, - _ => Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span }, + let close_span = if span == DUMMY_SP { + DUMMY_SP + } else { + Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span } }; TokenTree::Token(close_span, self.close_token()) } diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index 9d280a413e666..12d25ca4274fe 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -28,7 +28,7 @@ use self::AttributeGate::*; use abi::Abi; use ast::{self, NodeId, PatKind, RangeEnd}; use attr; -use codemap::{CodeMap, Spanned}; +use codemap::Spanned; use syntax_pos::Span; use errors::{DiagnosticBuilder, Handler, FatalError}; use visit::{self, FnKind, Visitor}; @@ -831,7 +831,7 @@ impl GatedCfg { pub fn check_and_emit(&self, sess: &ParseSess, features: &Features) { let (cfg, feature, has_feature) = GATED_CFGS[self.index]; - if !has_feature(features) && !sess.codemap().span_allows_unstable(self.span) { + if !has_feature(features) && !self.span.allows_unstable() { let explain = format!("`cfg({})` is experimental and subject to change", cfg); emit_feature_err(sess, feature, self.span, GateIssue::Language, &explain); } @@ -841,7 +841,6 @@ impl GatedCfg { struct Context<'a> { features: &'a Features, parse_sess: &'a ParseSess, - cm: &'a CodeMap, plugin_attributes: &'a [(String, AttributeType)], } @@ -850,7 +849,7 @@ macro_rules! gate_feature_fn { let (cx, has_feature, span, name, explain) = ($cx, $has_feature, $span, $name, $explain); let has_feature: bool = has_feature(&$cx.features); debug!("gate_feature(feature = {:?}, span = {:?}); has? {}", name, span, has_feature); - if !has_feature && !cx.cm.span_allows_unstable(span) { + if !has_feature && !span.allows_unstable() { emit_feature_err(cx.parse_sess, name, span, GateIssue::Language, explain); } }} @@ -908,12 +907,8 @@ impl<'a> Context<'a> { } } -pub fn check_attribute(attr: &ast::Attribute, parse_sess: &ParseSess, - cm: &CodeMap, features: &Features) { - let cx = Context { - features: features, parse_sess: parse_sess, - cm: cm, plugin_attributes: &[] - }; +pub fn check_attribute(attr: &ast::Attribute, parse_sess: &ParseSess, features: &Features) { + let cx = Context { features: features, parse_sess: parse_sess, plugin_attributes: &[] }; cx.check_attribute(attr, true); } @@ -1016,7 +1011,7 @@ struct PostExpansionVisitor<'a> { macro_rules! gate_feature_post { ($cx: expr, $feature: ident, $span: expr, $explain: expr) => {{ let (cx, span) = ($cx, $span); - if !cx.context.cm.span_allows_unstable(span) { + if !span.allows_unstable() { gate_feature!(cx.context, $feature, span, $explain) } }} @@ -1096,7 +1091,7 @@ fn starts_with_digit(s: &str) -> bool { impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { fn visit_attribute(&mut self, attr: &ast::Attribute) { - if !self.context.cm.span_allows_unstable(attr.span) { + if !attr.span.allows_unstable() { // check for gated attributes self.context.check_attribute(attr, false); } @@ -1530,7 +1525,6 @@ pub fn check_crate(krate: &ast::Crate, let ctx = Context { features: features, parse_sess: sess, - cm: sess.codemap(), plugin_attributes: plugin_attributes, }; visit::walk_crate(&mut PostExpansionVisitor { context: &ctx }, krate); diff --git a/src/libsyntax/json.rs b/src/libsyntax/json.rs index fd762552248b4..dec1b7d1d87be 100644 --- a/src/libsyntax/json.rs +++ b/src/libsyntax/json.rs @@ -202,7 +202,7 @@ impl DiagnosticSpan { // backtrace ourselves, but the `macro_backtrace` helper makes // some decision, such as dropping some frames, and I don't // want to duplicate that logic here. - let backtrace = je.cm.macro_backtrace(span).into_iter(); + let backtrace = span.macro_backtrace().into_iter(); DiagnosticSpan::from_span_full(span, is_primary, label, diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 6c975f3fc4021..86ee1c5336dfe 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -125,7 +125,7 @@ pub mod ptr; pub mod show_span; pub mod std_inject; pub mod str; -pub mod symbol; +pub use syntax_pos::symbol; pub mod test; pub mod tokenstream; pub mod visit; diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 43a9d8c5f787c..e9eb4fbcc9162 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -5036,11 +5036,7 @@ impl<'a> Parser<'a> { the path:", path); self.expect(&token::CloseDelim(token::Paren))?; // `)` - let sp = Span { - lo: start_span.lo, - hi: self.prev_span.hi, - expn_id: start_span.expn_id, - }; + let sp = start_span.to(self.prev_span); let mut err = self.span_fatal_help(sp, &msg, &suggestion); err.span_suggestion(path_span, &help_msg, format!("in {}", path)); err.emit(); // emit diagnostic, but continue with public visibility diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs index c541df9230a64..c7820a15fb3d2 100644 --- a/src/libsyntax/std_inject.rs +++ b/src/libsyntax/std_inject.rs @@ -10,29 +10,27 @@ use ast; use attr; +use ext::hygiene::{Mark, SyntaxContext}; use symbol::{Symbol, keywords}; use syntax_pos::{DUMMY_SP, Span}; use codemap::{self, ExpnInfo, NameAndSpan, MacroAttribute}; -use parse::ParseSess; use ptr::P; use tokenstream::TokenStream; /// Craft a span that will be ignored by the stability lint's /// call to codemap's is_internal check. /// The expanded code uses the unstable `#[prelude_import]` attribute. -fn ignored_span(sess: &ParseSess, sp: Span) -> Span { - let info = ExpnInfo { +fn ignored_span(sp: Span) -> Span { + let mark = Mark::fresh(); + mark.set_expn_info(ExpnInfo { call_site: DUMMY_SP, callee: NameAndSpan { format: MacroAttribute(Symbol::intern("std_inject")), span: None, allow_internal_unstable: true, } - }; - let expn_id = sess.codemap().record_expansion(info); - let mut sp = sp; - sp.expn_id = expn_id; - return sp; + }); + Span { ctxt: SyntaxContext::empty().apply_mark(mark), ..sp } } pub fn injected_crate_name(krate: &ast::Crate) -> Option<&'static str> { @@ -45,10 +43,7 @@ pub fn injected_crate_name(krate: &ast::Crate) -> Option<&'static str> { } } -pub fn maybe_inject_crates_ref(sess: &ParseSess, - mut krate: ast::Crate, - alt_std_name: Option) - -> ast::Crate { +pub fn maybe_inject_crates_ref(mut krate: ast::Crate, alt_std_name: Option) -> ast::Crate { let name = match injected_crate_name(&krate) { Some(name) => name, None => return krate, @@ -67,7 +62,7 @@ pub fn maybe_inject_crates_ref(sess: &ParseSess, span: DUMMY_SP, })); - let span = ignored_span(sess, DUMMY_SP); + let span = ignored_span(DUMMY_SP); krate.module.items.insert(0, P(ast::Item { attrs: vec![ast::Attribute { style: ast::AttrStyle::Outer, diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs index 6fb6db9ca0282..50380626d7f0d 100644 --- a/src/libsyntax/test.rs +++ b/src/libsyntax/test.rs @@ -31,6 +31,7 @@ use entry::{self, EntryPointType}; use ext::base::{ExtCtxt, Resolver}; use ext::build::AstBuilder; use ext::expand::ExpansionConfig; +use ext::hygiene::{Mark, SyntaxContext}; use fold::Folder; use util::move_map::MoveMap; use fold; @@ -62,6 +63,7 @@ struct TestCtxt<'a> { testfns: Vec, reexport_test_harness_main: Option, is_test_crate: bool, + ctxt: SyntaxContext, // top-level re-export submodule, filled out after folding is finished toplevel_reexport: Option, @@ -275,6 +277,7 @@ fn generate_test_harness(sess: &ParseSess, let mut cleaner = EntryPointCleaner { depth: 0 }; let krate = cleaner.fold_crate(krate); + let mark = Mark::fresh(); let mut cx: TestCtxt = TestCtxt { sess: sess, span_diagnostic: sd, @@ -284,15 +287,16 @@ fn generate_test_harness(sess: &ParseSess, reexport_test_harness_main: reexport_test_harness_main, is_test_crate: is_test_crate(&krate), toplevel_reexport: None, + ctxt: SyntaxContext::empty().apply_mark(mark), }; cx.ext_cx.crate_root = Some("std"); - cx.ext_cx.bt_push(ExpnInfo { + mark.set_expn_info(ExpnInfo { call_site: DUMMY_SP, callee: NameAndSpan { format: MacroAttribute(Symbol::intern("test")), span: None, - allow_internal_unstable: false, + allow_internal_unstable: true, } }); @@ -307,18 +311,7 @@ fn generate_test_harness(sess: &ParseSess, /// call to codemap's is_internal check. /// The expanded code calls some unstable functions in the test crate. fn ignored_span(cx: &TestCtxt, sp: Span) -> Span { - let info = ExpnInfo { - call_site: sp, - callee: NameAndSpan { - format: MacroAttribute(Symbol::intern("test")), - span: None, - allow_internal_unstable: true, - } - }; - let expn_id = cx.sess.codemap().record_expansion(info); - let mut sp = sp; - sp.expn_id = expn_id; - return sp; + Span { ctxt: cx.ctxt, ..sp } } #[derive(PartialEq)] diff --git a/src/libsyntax/test_snippet.rs b/src/libsyntax/test_snippet.rs index c6d6e6237f2ed..c537a0ee16644 100644 --- a/src/libsyntax/test_snippet.rs +++ b/src/libsyntax/test_snippet.rs @@ -83,7 +83,7 @@ fn make_span(file_text: &str, start: &Position, end: &Position) -> Span { Span { lo: BytePos(start as u32), hi: BytePos(end as u32), - expn_id: NO_EXPANSION, + ctxt: NO_EXPANSION, } } diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index b75b3efda36c8..86bfdebe42b00 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -56,18 +56,20 @@ impl Delimited { /// Returns the opening delimiter as a token tree. pub fn open_tt(&self, span: Span) -> TokenTree { - let open_span = match span { - DUMMY_SP => DUMMY_SP, - _ => Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span }, + let open_span = if span == DUMMY_SP { + DUMMY_SP + } else { + Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span } }; TokenTree::Token(open_span, self.open_token()) } /// Returns the closing delimiter as a token tree. pub fn close_tt(&self, span: Span) -> TokenTree { - let close_span = match span { - DUMMY_SP => DUMMY_SP, - _ => Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span }, + let close_span = if span == DUMMY_SP { + DUMMY_SP + } else { + Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span } }; TokenTree::Token(close_span, self.close_token()) } @@ -425,7 +427,7 @@ mod tests { Span { lo: BytePos(a), hi: BytePos(b), - expn_id: NO_EXPANSION, + ctxt: NO_EXPANSION, } } diff --git a/src/libsyntax_ext/asm.rs b/src/libsyntax_ext/asm.rs index 767ec94a0ce61..923e8072f4346 100644 --- a/src/libsyntax_ext/asm.rs +++ b/src/libsyntax_ext/asm.rs @@ -13,7 +13,6 @@ use self::State::*; use syntax::ast; -use syntax::codemap; use syntax::ext::base; use syntax::ext::base::*; use syntax::feature_gate; @@ -240,15 +239,6 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, } } - let expn_id = cx.codemap().record_expansion(codemap::ExpnInfo { - call_site: sp, - callee: codemap::NameAndSpan { - format: codemap::MacroBang(Symbol::intern("asm")), - span: None, - allow_internal_unstable: false, - }, - }); - MacEager::expr(P(ast::Expr { id: ast::DUMMY_NODE_ID, node: ast::ExprKind::InlineAsm(P(ast::InlineAsm { @@ -260,7 +250,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, volatile: volatile, alignstack: alignstack, dialect: dialect, - expn_id: expn_id, + ctxt: cx.backtrace(), })), span: sp, attrs: ast::ThinVec::new(), diff --git a/src/libsyntax_ext/deriving/clone.rs b/src/libsyntax_ext/deriving/clone.rs index d14b59d6c70e2..1993d6ebe5b49 100644 --- a/src/libsyntax_ext/deriving/clone.rs +++ b/src/libsyntax_ext/deriving/clone.rs @@ -111,7 +111,7 @@ fn cs_clone_shallow(name: &str, ty: P, span: Span, helper_name: &str) { // Generate statement `let _: helper_name;`, // set the expn ID so we can use the unstable struct. - let span = super::allow_unstable(cx, span, "derive(Clone)"); + let span = Span { ctxt: cx.backtrace(), ..span}; let assert_path = cx.path_all(span, true, cx.std_path(&["clone", helper_name]), vec![], vec![ty], vec![]); diff --git a/src/libsyntax_ext/deriving/cmp/eq.rs b/src/libsyntax_ext/deriving/cmp/eq.rs index 6ab5987a159ca..eef21492debc3 100644 --- a/src/libsyntax_ext/deriving/cmp/eq.rs +++ b/src/libsyntax_ext/deriving/cmp/eq.rs @@ -58,7 +58,7 @@ fn cs_total_eq_assert(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) ty: P, span: Span, helper_name: &str) { // Generate statement `let _: helper_name;`, // set the expn ID so we can use the unstable struct. - let span = super::allow_unstable(cx, span, "derive(Eq)"); + let span = Span { ctxt: cx.backtrace(), ..span }; let assert_path = cx.path_all(span, true, cx.std_path(&["cmp", helper_name]), vec![], vec![ty], vec![]); diff --git a/src/libsyntax_ext/deriving/debug.rs b/src/libsyntax_ext/deriving/debug.rs index a767716466cb1..ec4cb815960d1 100644 --- a/src/libsyntax_ext/deriving/debug.rs +++ b/src/libsyntax_ext/deriving/debug.rs @@ -66,8 +66,8 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P cx.span_bug(span, "nonsensical .fields in `#[derive(Debug)]`"), }; - // We want to make sure we have the expn_id set so that we can use unstable methods - let span = Span { expn_id: cx.backtrace(), ..span }; + // We want to make sure we have the ctxt set so that we can use unstable methods + let span = Span { ctxt: cx.backtrace(), ..span }; let name = cx.expr_lit(span, ast::LitKind::Str(ident.name, ast::StrStyle::Cooked)); let builder = Ident::from_str("builder"); let builder_expr = cx.expr_ident(span, builder.clone()); diff --git a/src/libsyntax_ext/deriving/generic/mod.rs b/src/libsyntax_ext/deriving/generic/mod.rs index 48e7ff0d24370..1ff0fec1c96a6 100644 --- a/src/libsyntax_ext/deriving/generic/mod.rs +++ b/src/libsyntax_ext/deriving/generic/mod.rs @@ -375,7 +375,7 @@ fn find_type_parameters(ty: &ast::Ty, } fn visit_mac(&mut self, mac: &ast::Mac) { - let span = Span { expn_id: self.span.expn_id, ..mac.span }; + let span = Span { ctxt: self.span.ctxt, ..mac.span }; self.cx.span_err(span, "`derive` cannot be used on items with type macros"); } } @@ -1458,7 +1458,7 @@ impl<'a> MethodDef<'a> { .iter() .map(|v| { let ident = v.node.name; - let sp = Span { expn_id: trait_.span.expn_id, ..v.span }; + let sp = Span { ctxt: trait_.span.ctxt, ..v.span }; let summary = trait_.summarise_struct(cx, &v.node.data); (ident, sp, summary) }) @@ -1478,7 +1478,7 @@ impl<'a> TraitDef<'a> { let mut named_idents = Vec::new(); let mut just_spans = Vec::new(); for field in struct_def.fields() { - let sp = Span { expn_id: self.span.expn_id, ..field.span }; + let sp = Span { ctxt: self.span.ctxt, ..field.span }; match field.ident { Some(ident) => named_idents.push((ident, sp)), _ => just_spans.push(sp), @@ -1523,7 +1523,7 @@ impl<'a> TraitDef<'a> { let mut paths = Vec::new(); let mut ident_exprs = Vec::new(); for (i, struct_field) in struct_def.fields().iter().enumerate() { - let sp = Span { expn_id: self.span.expn_id, ..struct_field.span }; + let sp = Span { ctxt: self.span.ctxt, ..struct_field.span }; let ident = cx.ident_of(&format!("{}_{}", prefix, i)); paths.push(codemap::Spanned { span: sp, @@ -1544,7 +1544,7 @@ impl<'a> TraitDef<'a> { cx.span_bug(sp, "a braced struct with unnamed fields in `derive`"); } codemap::Spanned { - span: Span { expn_id: self.span.expn_id, ..pat.span }, + span: Span { ctxt: self.span.ctxt, ..pat.span }, node: ast::FieldPat { ident: ident.unwrap(), pat: pat, @@ -1576,7 +1576,7 @@ impl<'a> TraitDef<'a> { mutbl: ast::Mutability) -> (P, Vec<(Span, Option, P, &'a [ast::Attribute])>) { let variant_ident = variant.node.name; - let sp = Span { expn_id: self.span.expn_id, ..variant.span }; + let sp = Span { ctxt: self.span.ctxt, ..variant.span }; let variant_path = cx.path(sp, vec![enum_ident, variant_ident]); self.create_struct_pattern(cx, variant_path, &variant.node.data, prefix, mutbl) } diff --git a/src/libsyntax_ext/deriving/mod.rs b/src/libsyntax_ext/deriving/mod.rs index b51591bf89d5e..b2bb43e41ed9e 100644 --- a/src/libsyntax_ext/deriving/mod.rs +++ b/src/libsyntax_ext/deriving/mod.rs @@ -12,9 +12,9 @@ use std::rc::Rc; use syntax::ast; -use syntax::codemap; use syntax::ext::base::{Annotatable, ExtCtxt, SyntaxExtension, Resolver}; use syntax::ext::build::AstBuilder; +use syntax::ext::hygiene::{Mark, SyntaxContext}; use syntax::ptr::P; use syntax::symbol::Symbol; use syntax_pos::Span; @@ -74,20 +74,6 @@ pub mod ord; pub mod generic; -fn allow_unstable(cx: &mut ExtCtxt, span: Span, attr_name: &str) -> Span { - Span { - expn_id: cx.codemap().record_expansion(codemap::ExpnInfo { - call_site: span, - callee: codemap::NameAndSpan { - format: codemap::MacroAttribute(Symbol::intern(attr_name)), - span: Some(span), - allow_internal_unstable: true, - }, - }), - ..span - } -} - macro_rules! derive_traits { ($( $name:expr => $func:path, )+) => { pub fn is_builtin_trait(name: ast::Name) -> bool { @@ -177,15 +163,15 @@ fn call_intrinsic(cx: &ExtCtxt, intrinsic: &str, args: Vec>) -> P { - span.expn_id = cx.codemap().record_expansion(codemap::ExpnInfo { - call_site: span, - callee: codemap::NameAndSpan { - format: codemap::MacroAttribute(Symbol::intern("derive")), - span: Some(span), - allow_internal_unstable: true, - }, - }); - + if cx.current_expansion.mark.expn_info().unwrap().callee.allow_internal_unstable { + span.ctxt = cx.backtrace(); + } else { // Avoid instability errors with user defined curstom derives, cc #36316 + let mut info = cx.current_expansion.mark.expn_info().unwrap(); + info.callee.allow_internal_unstable = true; + let mark = Mark::fresh(); + mark.set_expn_info(info); + span.ctxt = SyntaxContext::empty().apply_mark(mark); + } let path = cx.std_path(&["intrinsics", intrinsic]); let call = cx.expr_call_global(span, path, args); diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs index d2afa08cadaf4..aeb5b1e0a532b 100644 --- a/src/libsyntax_ext/format.rs +++ b/src/libsyntax_ext/format.rs @@ -641,10 +641,11 @@ impl<'a, 'b> Context<'a, 'b> { fn format_arg(ecx: &ExtCtxt, macsp: Span, - sp: Span, + mut sp: Span, ty: &ArgumentType, arg: P) -> P { + sp.ctxt = sp.ctxt.apply_mark(ecx.current_expansion.mark); let trait_ = match *ty { Placeholder(ref tyname) => { match &tyname[..] { diff --git a/src/libsyntax_ext/proc_macro_registrar.rs b/src/libsyntax_ext/proc_macro_registrar.rs index 2d815b3f1bb7d..bb89caab709b0 100644 --- a/src/libsyntax_ext/proc_macro_registrar.rs +++ b/src/libsyntax_ext/proc_macro_registrar.rs @@ -17,6 +17,7 @@ use syntax::codemap::{ExpnInfo, NameAndSpan, MacroAttribute}; use syntax::ext::base::ExtCtxt; use syntax::ext::build::AstBuilder; use syntax::ext::expand::ExpansionConfig; +use syntax::ext::hygiene::{Mark, SyntaxContext}; use syntax::fold::Folder; use syntax::parse::ParseSess; use syntax::ptr::P; @@ -360,7 +361,8 @@ fn mk_registrar(cx: &mut ExtCtxt, custom_derives: &[ProcMacroDerive], custom_attrs: &[ProcMacroDef], custom_macros: &[ProcMacroDef]) -> P { - let eid = cx.codemap().record_expansion(ExpnInfo { + let mark = Mark::fresh(); + mark.set_expn_info(ExpnInfo { call_site: DUMMY_SP, callee: NameAndSpan { format: MacroAttribute(Symbol::intern("proc_macro")), @@ -368,7 +370,7 @@ fn mk_registrar(cx: &mut ExtCtxt, allow_internal_unstable: true, } }); - let span = Span { expn_id: eid, ..DUMMY_SP }; + let span = Span { ctxt: SyntaxContext::empty().apply_mark(mark), ..DUMMY_SP }; let proc_macro = Ident::from_str("proc_macro"); let krate = cx.item(span, diff --git a/src/libsyntax_pos/hygiene.rs b/src/libsyntax_pos/hygiene.rs index feebbcd6f03b6..8a9ff647b3ea1 100644 --- a/src/libsyntax_pos/hygiene.rs +++ b/src/libsyntax_pos/hygiene.rs @@ -15,12 +15,16 @@ //! and definition contexts*. J. Funct. Program. 22, 2 (March 2012), 181-216. //! DOI=10.1017/S0956796812000093 http://dx.doi.org/10.1017/S0956796812000093 +use Span; +use symbol::Symbol; + +use serialize::{Encodable, Decodable, Encoder, Decoder}; use std::cell::RefCell; use std::collections::HashMap; use std::fmt; /// A SyntaxContext represents a chain of macro expansions (represented by marks). -#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Default)] +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct SyntaxContext(u32); #[derive(Copy, Clone)] @@ -36,8 +40,8 @@ pub struct Mark(u32); impl Mark { pub fn fresh() -> Self { HygieneData::with(|data| { - let next_mark = Mark(data.next_mark.0 + 1); - ::std::mem::replace(&mut data.next_mark, next_mark) + data.marks.push(None); + Mark(data.marks.len() as u32 - 1) }) } @@ -53,23 +57,31 @@ impl Mark { pub fn from_u32(raw: u32) -> Mark { Mark(raw) } + + pub fn expn_info(self) -> Option { + HygieneData::with(|data| data.marks[self.0 as usize].clone()) + } + + pub fn set_expn_info(self, info: ExpnInfo) { + HygieneData::with(|data| data.marks[self.0 as usize] = Some(info)) + } } struct HygieneData { + marks: Vec>, syntax_contexts: Vec, markings: HashMap<(SyntaxContext, Mark), SyntaxContext>, - next_mark: Mark, } impl HygieneData { fn new() -> Self { HygieneData { + marks: vec![None], syntax_contexts: vec![SyntaxContextData { outer_mark: Mark::root(), prev_ctxt: SyntaxContext::empty(), }], markings: HashMap::new(), - next_mark: Mark(1), } } @@ -81,8 +93,8 @@ impl HygieneData { } } -pub fn reset_hygiene_data() { - HygieneData::with(|data| *data = HygieneData::new()) +pub fn clear_markings() { + HygieneData::with(|data| data.markings = HashMap::new()); } impl SyntaxContext { @@ -113,6 +125,10 @@ impl SyntaxContext { }) }) } + + pub fn outer(self) -> Mark { + HygieneData::with(|data| data.syntax_contexts[self.0 as usize].outer_mark) + } } impl fmt::Debug for SyntaxContext { @@ -120,3 +136,67 @@ impl fmt::Debug for SyntaxContext { write!(f, "#{}", self.0) } } + +/// Extra information for tracking spans of macro and syntax sugar expansion +#[derive(Clone, Hash, Debug)] +pub struct ExpnInfo { + /// The location of the actual macro invocation or syntax sugar , e.g. + /// `let x = foo!();` or `if let Some(y) = x {}` + /// + /// This may recursively refer to other macro invocations, e.g. if + /// `foo!()` invoked `bar!()` internally, and there was an + /// expression inside `bar!`; the call_site of the expression in + /// the expansion would point to the `bar!` invocation; that + /// call_site span would have its own ExpnInfo, with the call_site + /// pointing to the `foo!` invocation. + pub call_site: Span, + /// Information about the expansion. + pub callee: NameAndSpan +} + +#[derive(Clone, Hash, Debug)] +pub struct NameAndSpan { + /// The format with which the macro was invoked. + pub format: ExpnFormat, + /// Whether the macro is allowed to use #[unstable]/feature-gated + /// features internally without forcing the whole crate to opt-in + /// to them. + pub allow_internal_unstable: bool, + /// The span of the macro definition itself. The macro may not + /// have a sensible definition span (e.g. something defined + /// completely inside libsyntax) in which case this is None. + pub span: Option +} + +impl NameAndSpan { + pub fn name(&self) -> Symbol { + match self.format { + ExpnFormat::MacroAttribute(s) | + ExpnFormat::MacroBang(s) | + ExpnFormat::CompilerDesugaring(s) => s, + } + } +} + +/// The source of expansion. +#[derive(Clone, Hash, Debug, PartialEq, Eq)] +pub enum ExpnFormat { + /// e.g. #[derive(...)] + MacroAttribute(Symbol), + /// e.g. `format!()` + MacroBang(Symbol), + /// Desugaring done by the compiler during HIR lowering. + CompilerDesugaring(Symbol) +} + +impl Encodable for SyntaxContext { + fn encode(&self, _: &mut E) -> Result<(), E::Error> { + Ok(()) // FIXME(jseyfried) intercrate hygiene + } +} + +impl Decodable for SyntaxContext { + fn decode(_: &mut D) -> Result { + Ok(SyntaxContext::empty()) // FIXME(jseyfried) intercrate hygiene + } +} diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index 1b62d62348bc8..9b45e364ecf34 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -25,6 +25,7 @@ #![feature(const_fn)] #![feature(custom_attribute)] +#![feature(optin_builtin_traits)] #![allow(unused_attributes)] #![feature(rustc_private)] #![feature(staged_api)] @@ -43,6 +44,9 @@ extern crate serialize; extern crate serialize as rustc_serialize; // used by deriving pub mod hygiene; +pub use hygiene::{SyntaxContext, ExpnInfo, ExpnFormat, NameAndSpan}; + +pub mod symbol; pub type FileName = String; @@ -60,7 +64,7 @@ pub struct Span { pub hi: BytePos, /// Information about where the macro came from, if this piece of /// code was created by a macro expansion. - pub expn_id: ExpnId + pub ctxt: SyntaxContext, } /// A collection of spans. Spans have two orthogonal attributes: @@ -79,7 +83,7 @@ impl Span { /// Returns a new span representing just the end-point of this span pub fn end_point(self) -> Span { let lo = cmp::max(self.hi.0 - 1, self.lo.0); - Span { lo: BytePos(lo), hi: self.hi, expn_id: self.expn_id} + Span { lo: BytePos(lo), hi: self.hi, ctxt: self.ctxt } } /// Returns `self` if `self` is not the dummy span, and `other` otherwise. @@ -107,6 +111,69 @@ impl Span { None } } + + /// Return the source span - this is either the supplied span, or the span for + /// the macro callsite that expanded to it. + pub fn source_callsite(self) -> Span { + self.ctxt.outer().expn_info().map(|info| info.call_site.source_callsite()).unwrap_or(self) + } + + /// Return the source callee. + /// + /// Returns None if the supplied span has no expansion trace, + /// else returns the NameAndSpan for the macro definition + /// corresponding to the source callsite. + pub fn source_callee(self) -> Option { + fn source_callee(info: ExpnInfo) -> NameAndSpan { + match info.call_site.ctxt.outer().expn_info() { + Some(info) => source_callee(info), + None => info.callee, + } + } + self.ctxt.outer().expn_info().map(source_callee) + } + + /// Check if a span is "internal" to a macro in which #[unstable] + /// items can be used (that is, a macro marked with + /// `#[allow_internal_unstable]`). + pub fn allows_unstable(&self) -> bool { + match self.ctxt.outer().expn_info() { + Some(info) => info.callee.allow_internal_unstable, + None => false, + } + } + + pub fn macro_backtrace(mut self) -> Vec { + let mut prev_span = DUMMY_SP; + let mut result = vec![]; + loop { + let info = match self.ctxt.outer().expn_info() { + Some(info) => info, + None => break, + }; + + let (pre, post) = match info.callee.format { + ExpnFormat::MacroAttribute(..) => ("#[", "]"), + ExpnFormat::MacroBang(..) => ("", "!"), + ExpnFormat::CompilerDesugaring(..) => ("desugaring of `", "`"), + }; + let macro_decl_name = format!("{}{}{}", pre, info.callee.name(), post); + let def_site_span = info.callee.span; + + // Don't print recursive invocations + if !info.call_site.source_equal(&prev_span) { + result.push(MacroBacktrace { + call_site: info.call_site, + macro_decl_name: macro_decl_name, + def_site_span: def_site_span, + }); + } + + prev_span = self; + self = info.call_site; + } + result + } } #[derive(Clone, Debug)] @@ -147,8 +214,8 @@ impl serialize::UseSpecializedDecodable for Span { } fn default_span_debug(span: Span, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "Span {{ lo: {:?}, hi: {:?}, expn_id: {:?} }}", - span.lo, span.hi, span.expn_id) + write!(f, "Span {{ lo: {:?}, hi: {:?}, ctxt: {:?} }}", + span.lo, span.hi, span.ctxt) } impl fmt::Debug for Span { @@ -157,12 +224,7 @@ impl fmt::Debug for Span { } } -pub const DUMMY_SP: Span = Span { lo: BytePos(0), hi: BytePos(0), expn_id: NO_EXPANSION }; - -// Generic span to be used for code originating from the command line -pub const COMMAND_LINE_SP: Span = Span { lo: BytePos(0), - hi: BytePos(0), - expn_id: COMMAND_LINE_EXPN }; +pub const DUMMY_SP: Span = Span { lo: BytePos(0), hi: BytePos(0), ctxt: NO_EXPANSION }; impl MultiSpan { pub fn new() -> MultiSpan { @@ -256,22 +318,7 @@ impl From for MultiSpan { } } -#[derive(PartialEq, Eq, Clone, Debug, Hash, RustcEncodable, RustcDecodable, Copy, Ord, PartialOrd)] -pub struct ExpnId(pub u32); - -pub const NO_EXPANSION: ExpnId = ExpnId(!0); -// For code appearing from the command line -pub const COMMAND_LINE_EXPN: ExpnId = ExpnId(!1); - -impl ExpnId { - pub fn from_u32(id: u32) -> ExpnId { - ExpnId(id) - } - - pub fn into_u32(self) -> u32 { - self.0 - } -} +pub const NO_EXPANSION: SyntaxContext = SyntaxContext::empty(); /// Identifies an offset of a multi-byte character in a FileMap #[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq)] @@ -651,7 +698,7 @@ thread_local!(pub static SPAN_DEBUG: Cell fmt:: /* assuming that we're not in macro expansion */ pub fn mk_sp(lo: BytePos, hi: BytePos) -> Span { - Span {lo: lo, hi: hi, expn_id: NO_EXPANSION} + Span {lo: lo, hi: hi, ctxt: NO_EXPANSION} } pub struct MacroBacktrace { diff --git a/src/libsyntax/symbol.rs b/src/libsyntax_pos/symbol.rs similarity index 87% rename from src/libsyntax/symbol.rs rename to src/libsyntax_pos/symbol.rs index 2acbeee426bee..b866652c49f85 100644 --- a/src/libsyntax/symbol.rs +++ b/src/libsyntax_pos/symbol.rs @@ -12,11 +12,58 @@ //! allows bidirectional lookup; i.e. given a value, one can easily find the //! type, and vice versa. +use hygiene::SyntaxContext; + use serialize::{Decodable, Decoder, Encodable, Encoder}; use std::cell::RefCell; use std::collections::HashMap; use std::fmt; +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub struct Ident { + pub name: Symbol, + pub ctxt: SyntaxContext, +} + +impl Ident { + pub const fn with_empty_ctxt(name: Symbol) -> Ident { + Ident { name: name, ctxt: SyntaxContext::empty() } + } + + /// Maps a string to an identifier with an empty syntax context. + pub fn from_str(string: &str) -> Ident { + Ident::with_empty_ctxt(Symbol::intern(string)) + } + + pub fn unhygienize(self) -> Ident { + Ident { name: self.name, ctxt: SyntaxContext::empty() } + } +} + +impl fmt::Debug for Ident { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}{:?}", self.name, self.ctxt) + } +} + +impl fmt::Display for Ident { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Display::fmt(&self.name, f) + } +} + +impl Encodable for Ident { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + self.name.encode(s) + } +} + +impl Decodable for Ident { + fn decode(d: &mut D) -> Result { + Ok(Ident::with_empty_ctxt(Symbol::decode(d)?)) + } +} + /// A symbol is an interned or gensymed string. #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct Symbol(u32); @@ -128,19 +175,19 @@ macro_rules! declare_keywords {( $( ($index: expr, $konst: ident, $string: expr) )* ) => { pub mod keywords { - use ast; + use super::{Symbol, Ident}; #[derive(Clone, Copy, PartialEq, Eq)] pub struct Keyword { - ident: ast::Ident, + ident: Ident, } impl Keyword { - #[inline] pub fn ident(self) -> ast::Ident { self.ident } - #[inline] pub fn name(self) -> ast::Name { self.ident.name } + #[inline] pub fn ident(self) -> Ident { self.ident } + #[inline] pub fn name(self) -> Symbol { self.ident.name } } $( #[allow(non_upper_case_globals)] pub const $konst: Keyword = Keyword { - ident: ast::Ident::with_empty_ctxt(super::Symbol($index)) + ident: Ident::with_empty_ctxt(super::Symbol($index)) }; )* } diff --git a/src/test/compile-fail-fulldeps/qquote.rs b/src/test/compile-fail-fulldeps/qquote.rs index bd25561065bea..272bf1150cacb 100644 --- a/src/test/compile-fail-fulldeps/qquote.rs +++ b/src/test/compile-fail-fulldeps/qquote.rs @@ -27,14 +27,6 @@ fn main() { &ps, syntax::ext::expand::ExpansionConfig::default("qquote".to_string()), &mut resolver); - cx.bt_push(syntax::codemap::ExpnInfo { - call_site: DUMMY_SP, - callee: syntax::codemap::NameAndSpan { - format: syntax::codemap::MacroBang(Symbol::intern("")), - allow_internal_unstable: false, - span: None, - } - }); let cx = &mut cx; assert_eq!(pprust::expr_to_string(&*quote_expr!(&cx, 23)), "23"); diff --git a/src/test/run-fail-fulldeps/qquote.rs b/src/test/run-fail-fulldeps/qquote.rs index d692bb519c149..5518ab47c2bd2 100644 --- a/src/test/run-fail-fulldeps/qquote.rs +++ b/src/test/run-fail-fulldeps/qquote.rs @@ -30,14 +30,6 @@ fn main() { &ps, syntax::ext::expand::ExpansionConfig::default("qquote".to_string()), &mut resolver); - cx.bt_push(syntax::codemap::ExpnInfo { - call_site: DUMMY_SP, - callee: syntax::codemap::NameAndSpan { - format: syntax::codemap::MacroBang(Symbol::intern("")), - allow_internal_unstable: false, - span: None, - } - }); let cx = &mut cx; println!("{}", pprust::expr_to_string(&*quote_expr!(&cx, 23))); diff --git a/src/test/run-pass-fulldeps/qquote.rs b/src/test/run-pass-fulldeps/qquote.rs index b4ed57192ccf6..4a8246ec429c0 100644 --- a/src/test/run-pass-fulldeps/qquote.rs +++ b/src/test/run-pass-fulldeps/qquote.rs @@ -26,14 +26,6 @@ fn main() { &ps, syntax::ext::expand::ExpansionConfig::default("qquote".to_string()), &mut resolver); - cx.bt_push(syntax::codemap::ExpnInfo { - call_site: DUMMY_SP, - callee: syntax::codemap::NameAndSpan { - format: syntax::codemap::MacroBang(Symbol::intern("")), - allow_internal_unstable: false, - span: None, - } - }); let cx = &mut cx; macro_rules! check { From f08d5ad4c59ca5fc1c961a94c53807d70959c375 Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Wed, 15 Mar 2017 00:22:48 +0000 Subject: [PATCH 4/5] Refactor how spans are combined in the parser. --- src/librustc/middle/resolve_lifetime.rs | 5 +- src/librustc_metadata/cstore_impl.rs | 4 +- src/librustc_metadata/decoder.rs | 6 +- src/librustc_save_analysis/lib.rs | 2 +- src/librustc_save_analysis/span_utils.rs | 4 +- src/libsyntax/ast.rs | 10 +- src/libsyntax/attr.rs | 15 +- src/libsyntax/codemap.rs | 4 - src/libsyntax/ext/tt/macro_parser.rs | 9 +- src/libsyntax/parse/attr.rs | 29 +- src/libsyntax/parse/lexer/mod.rs | 42 +- src/libsyntax/parse/lexer/unicode_chars.rs | 4 +- src/libsyntax/parse/mod.rs | 6 +- src/libsyntax/parse/parser.rs | 617 ++++++++---------- src/libsyntax_ext/format.rs | 15 +- src/libsyntax_pos/lib.rs | 16 +- src/test/compile-fail/imports/macro-paths.rs | 2 - src/test/compile-fail/imports/macros.rs | 2 - .../imports/shadow_builtin_macros.rs | 1 - src/test/compile-fail/issue-25385.rs | 1 - .../run-pass/syntax-extension-source-utils.rs | 2 +- .../macros/macro_path_as_generic_bound.stderr | 5 +- 22 files changed, 363 insertions(+), 438 deletions(-) diff --git a/src/librustc/middle/resolve_lifetime.rs b/src/librustc/middle/resolve_lifetime.rs index 5094e28475b26..8037570d24a80 100644 --- a/src/librustc/middle/resolve_lifetime.rs +++ b/src/librustc/middle/resolve_lifetime.rs @@ -29,7 +29,7 @@ use syntax::ast; use syntax::attr; use syntax::ptr::P; use syntax::symbol::keywords; -use syntax_pos::{mk_sp, Span}; +use syntax_pos::Span; use errors::DiagnosticBuilder; use util::nodemap::{NodeMap, NodeSet, FxHashSet, FxHashMap, DefIdMap}; use rustc_back::slice; @@ -1468,8 +1468,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { self.resolve_lifetime_ref(bound); } else { self.insert_lifetime(bound, Region::Static); - let full_span = mk_sp(lifetime_i.lifetime.span.lo, bound.span.hi); - self.sess.struct_span_warn(full_span, + self.sess.struct_span_warn(lifetime_i.lifetime.span.to(bound.span), &format!("unnecessary lifetime parameter `{}`", lifetime_i.lifetime.name)) .help(&format!("you can use the `'static` lifetime directly, in place \ of `{}`", lifetime_i.lifetime.name)) diff --git a/src/librustc_metadata/cstore_impl.rs b/src/librustc_metadata/cstore_impl.rs index 3e9b6a6226ab5..41a2e8a8d55e3 100644 --- a/src/librustc_metadata/cstore_impl.rs +++ b/src/librustc_metadata/cstore_impl.rs @@ -36,7 +36,7 @@ use syntax::ast; use syntax::attr; use syntax::parse::filemap_to_stream; use syntax::symbol::Symbol; -use syntax_pos::{mk_sp, Span}; +use syntax_pos::{Span, NO_EXPANSION}; use rustc::hir::svh::Svh; use rustc_back::target::Target; use rustc::hir; @@ -395,7 +395,7 @@ impl CrateStore for cstore::CStore { let source_name = format!("<{} macros>", name); let filemap = sess.parse_sess.codemap().new_filemap(source_name, None, def.body); - let local_span = mk_sp(filemap.start_pos, filemap.end_pos); + let local_span = Span { lo: filemap.start_pos, hi: filemap.end_pos, ctxt: NO_EXPANSION }; let body = filemap_to_stream(&sess.parse_sess, filemap); // Mark the attrs as used diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index 3de1e3442c69d..43e076e799b3d 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -39,7 +39,7 @@ use syntax::attr; use syntax::ast; use syntax::codemap; use syntax::ext::base::MacroKind; -use syntax_pos::{self, Span, BytePos, Pos, DUMMY_SP}; +use syntax_pos::{self, Span, BytePos, Pos, DUMMY_SP, NO_EXPANSION}; pub struct DecodeContext<'a, 'tcx: 'a> { opaque: opaque::Decoder<'a>, @@ -243,7 +243,7 @@ impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { let sess = if let Some(sess) = self.sess { sess } else { - return Ok(syntax_pos::mk_sp(lo, hi)); + return Ok(Span { lo: lo, hi: hi, ctxt: NO_EXPANSION }); }; let (lo, hi) = if lo > hi { @@ -290,7 +290,7 @@ impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { let lo = (lo - filemap.original_start_pos) + filemap.translated_filemap.start_pos; let hi = (hi - filemap.original_start_pos) + filemap.translated_filemap.start_pos; - Ok(syntax_pos::mk_sp(lo, hi)) + Ok(Span { lo: lo, hi: hi, ctxt: NO_EXPANSION }) } } diff --git a/src/librustc_save_analysis/lib.rs b/src/librustc_save_analysis/lib.rs index fd6803e087a08..1de9fbc8e4941 100644 --- a/src/librustc_save_analysis/lib.rs +++ b/src/librustc_save_analysis/lib.rs @@ -742,7 +742,7 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { let ident_start = text.find(&name).expect("Name not in signature?"); let ident_end = ident_start + name.len(); Signature { - span: mk_sp(item.span.lo, item.span.lo + BytePos(text.len() as u32)), + span: Span { hi: item.span.lo + BytePos(text.len() as u32), ..item.span }, text: text, ident_start: ident_start, ident_end: ident_end, diff --git a/src/librustc_save_analysis/span_utils.rs b/src/librustc_save_analysis/span_utils.rs index c19f805a28575..af3efb4809081 100644 --- a/src/librustc_save_analysis/span_utils.rs +++ b/src/librustc_save_analysis/span_utils.rs @@ -305,10 +305,10 @@ impl<'a> SpanUtils<'a> { continue; } if let TokenTree::Token(_, token::Semi) = tok { - return self.snippet(mk_sp(first_span.lo, prev.span().hi)); + return self.snippet(first_span.to(prev.span())); } else if let TokenTree::Delimited(_, ref d) = tok { if d.delim == token::Brace { - return self.snippet(mk_sp(first_span.lo, prev.span().hi)); + return self.snippet(first_span.to(prev.span())); } } prev = tok; diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index a4bebd311ded2..9eb86aa006d17 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -17,7 +17,7 @@ pub use self::PathParameters::*; pub use symbol::{Ident, Symbol as Name}; pub use util::ThinVec; -use syntax_pos::{mk_sp, BytePos, Span, DUMMY_SP}; +use syntax_pos::{Span, DUMMY_SP}; use codemap::{respan, Spanned}; use abi::Abi; use ext::hygiene::{Mark, SyntaxContext}; @@ -1433,7 +1433,7 @@ impl Arg { TyKind::Rptr(lt, MutTy{ref ty, mutbl}) if ty.node == TyKind::ImplicitSelf => { Some(respan(self.pat.span, SelfKind::Region(lt, mutbl))) } - _ => Some(respan(mk_sp(self.pat.span.lo, self.ty.span.hi), + _ => Some(respan(self.pat.span.to(self.ty.span), SelfKind::Explicit(self.ty.clone(), mutbl))), } } @@ -1450,7 +1450,7 @@ impl Arg { } pub fn from_self(eself: ExplicitSelf, eself_ident: SpannedIdent) -> Arg { - let span = mk_sp(eself.span.lo, eself_ident.span.hi); + let span = eself.span.to(eself_ident.span); let infer_ty = P(Ty { id: DUMMY_NODE_ID, node: TyKind::ImplicitSelf, @@ -1687,11 +1687,11 @@ pub struct PolyTraitRef { } impl PolyTraitRef { - pub fn new(lifetimes: Vec, path: Path, lo: BytePos, hi: BytePos) -> Self { + pub fn new(lifetimes: Vec, path: Path, span: Span) -> Self { PolyTraitRef { bound_lifetimes: lifetimes, trait_ref: TraitRef { path: path, ref_id: DUMMY_NODE_ID }, - span: mk_sp(lo, hi), + span: span, } } } diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 2f1efd6ad00ee..5dcce2572af87 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -18,8 +18,8 @@ use ast; use ast::{AttrId, Attribute, Name, Ident}; use ast::{MetaItem, MetaItemKind, NestedMetaItem, NestedMetaItemKind}; use ast::{Lit, LitKind, Expr, ExprKind, Item, Local, Stmt, StmtKind}; -use codemap::{Spanned, spanned, dummy_spanned, mk_sp}; -use syntax_pos::{Span, BytePos, DUMMY_SP}; +use codemap::{Spanned, respan, dummy_spanned}; +use syntax_pos::{Span, DUMMY_SP}; use errors::Handler; use feature_gate::{Features, GatedCfg}; use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; @@ -447,17 +447,16 @@ pub fn mk_spanned_attr_outer(sp: Span, id: AttrId, item: MetaItem) -> Attribute } } -pub fn mk_sugared_doc_attr(id: AttrId, text: Symbol, lo: BytePos, hi: BytePos) - -> Attribute { +pub fn mk_sugared_doc_attr(id: AttrId, text: Symbol, span: Span) -> Attribute { let style = doc_comment_style(&text.as_str()); - let lit = spanned(lo, hi, LitKind::Str(text, ast::StrStyle::Cooked)); + let lit = respan(span, LitKind::Str(text, ast::StrStyle::Cooked)); Attribute { id: id, style: style, - path: ast::Path::from_ident(mk_sp(lo, hi), ast::Ident::from_str("doc")), - tokens: MetaItemKind::NameValue(lit).tokens(mk_sp(lo, hi)), + path: ast::Path::from_ident(span, ast::Ident::from_str("doc")), + tokens: MetaItemKind::NameValue(lit).tokens(span), is_sugared_doc: true, - span: mk_sp(lo, hi), + span: span, } } diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index ba199eacb6276..4d67390d44234 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -49,10 +49,6 @@ pub struct Spanned { pub span: Span, } -pub fn spanned(lo: BytePos, hi: BytePos, t: T) -> Spanned { - respan(mk_sp(lo, hi), t) -} - pub fn respan(sp: Span, t: T) -> Spanned { Spanned {node: t, span: sp} } diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index ed17f0f956cf3..9ee427eed3556 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -79,7 +79,7 @@ pub use self::ParseResult::*; use self::TokenTreeOrTokenTreeVec::*; use ast::Ident; -use syntax_pos::{self, BytePos, mk_sp, Span}; +use syntax_pos::{self, BytePos, Span}; use codemap::Spanned; use errors::FatalError; use ext::tt::quoted::{self, TokenTree}; @@ -285,7 +285,7 @@ fn inner_parse_loop(sess: &ParseSess, eof_eis: &mut SmallVector>, bb_eis: &mut SmallVector>, token: &Token, - span: &syntax_pos::Span) + span: syntax_pos::Span) -> ParseResult<()> { while let Some(mut ei) = cur_eis.pop() { // When unzipped trees end, remove them @@ -323,8 +323,7 @@ fn inner_parse_loop(sess: &ParseSess, for idx in ei.match_lo..ei.match_hi { let sub = ei.matches[idx].clone(); new_pos.matches[idx] - .push(Rc::new(MatchedSeq(sub, mk_sp(ei.sp_lo, - span.hi)))); + .push(Rc::new(MatchedSeq(sub, Span { lo: ei.sp_lo, ..span }))); } new_pos.match_cur = ei.match_hi; @@ -426,7 +425,7 @@ pub fn parse(sess: &ParseSess, tts: TokenStream, ms: &[TokenTree], directory: Op assert!(next_eis.is_empty()); match inner_parse_loop(sess, &mut cur_eis, &mut next_eis, &mut eof_eis, &mut bb_eis, - &parser.token, &parser.span) { + &parser.token, parser.span) { Success(_) => {}, Failure(sp, tok) => return Failure(sp, tok), Error(sp, msg) => return Error(sp, msg), diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 53106214fa310..92cec462ffb7c 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -10,8 +10,7 @@ use attr; use ast; -use syntax_pos::{mk_sp, Span}; -use codemap::spanned; +use codemap::respan; use parse::common::SeqSep; use parse::PResult; use parse::token::{self, Nonterminal}; @@ -49,8 +48,7 @@ impl<'a> Parser<'a> { just_parsed_doc_comment = false; } token::DocComment(s) => { - let Span { lo, hi, .. } = self.span; - let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, lo, hi); + let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.span); if attr.style != ast::AttrStyle::Outer { let mut err = self.fatal("expected outer doc comment"); err.note("inner doc comments like this (starting with \ @@ -94,7 +92,7 @@ impl<'a> Parser<'a> { self.token); let (span, path, tokens, mut style) = match self.token { token::Pound => { - let lo = self.span.lo; + let lo = self.span; self.bump(); if inner_parse_policy == InnerAttributeParsePolicy::Permitted { @@ -122,9 +120,9 @@ impl<'a> Parser<'a> { self.expect(&token::OpenDelim(token::Bracket))?; let (path, tokens) = self.parse_path_and_tokens()?; self.expect(&token::CloseDelim(token::Bracket))?; - let hi = self.prev_span.hi; + let hi = self.prev_span; - (mk_sp(lo, hi), path, tokens, style) + (lo.to(hi), path, tokens, style) } _ => { let token_str = self.this_token_to_string(); @@ -189,8 +187,7 @@ impl<'a> Parser<'a> { } token::DocComment(s) => { // we need to get the position of this token before we bump. - let Span { lo, hi, .. } = self.span; - let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, lo, hi); + let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.span); if attr.style == ast::AttrStyle::Inner { attrs.push(attr); self.bump(); @@ -238,11 +235,10 @@ impl<'a> Parser<'a> { return Ok(meta); } - let lo = self.span.lo; + let lo = self.span; let ident = self.parse_ident()?; let node = self.parse_meta_item_kind()?; - let hi = self.prev_span.hi; - Ok(ast::MetaItem { name: ident.name, node: node, span: mk_sp(lo, hi) }) + Ok(ast::MetaItem { name: ident.name, node: node, span: lo.to(self.prev_span) }) } pub fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> { @@ -258,26 +254,25 @@ impl<'a> Parser<'a> { /// matches meta_item_inner : (meta_item | UNSUFFIXED_LIT) ; fn parse_meta_item_inner(&mut self) -> PResult<'a, ast::NestedMetaItem> { - let sp = self.span; - let lo = self.span.lo; + let lo = self.span; match self.parse_unsuffixed_lit() { Ok(lit) => { - return Ok(spanned(lo, self.prev_span.hi, ast::NestedMetaItemKind::Literal(lit))) + return Ok(respan(lo.to(self.prev_span), ast::NestedMetaItemKind::Literal(lit))) } Err(ref mut err) => self.diagnostic().cancel(err) } match self.parse_meta_item() { Ok(mi) => { - return Ok(spanned(lo, self.prev_span.hi, ast::NestedMetaItemKind::MetaItem(mi))) + return Ok(respan(lo.to(self.prev_span), ast::NestedMetaItemKind::MetaItem(mi))) } Err(ref mut err) => self.diagnostic().cancel(err) } let found = self.this_token_to_string(); let msg = format!("expected unsuffixed literal or identifier, found {}", found); - Err(self.diagnostic().struct_span_err(sp, &msg)) + Err(self.diagnostic().struct_span_err(lo, &msg)) } /// matches meta_seq = ( COMMASEP(meta_item_inner) ) diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index d48cf6911ed37..920b2c401e2bd 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -9,7 +9,7 @@ // except according to those terms. use ast::{self, Ident}; -use syntax_pos::{self, BytePos, CharPos, Pos, Span}; +use syntax_pos::{self, BytePos, CharPos, Pos, Span, NO_EXPANSION}; use codemap::CodeMap; use errors::{FatalError, DiagnosticBuilder}; use parse::{token, ParseSess}; @@ -68,6 +68,10 @@ pub struct StringReader<'a> { open_braces: Vec<(token::DelimToken, Span)>, } +fn mk_sp(lo: BytePos, hi: BytePos) -> Span { + Span { lo: lo, hi: hi, ctxt: NO_EXPANSION } +} + impl<'a> StringReader<'a> { fn next_token(&mut self) -> TokenAndSpan where Self: Sized { let res = self.try_next_token(); @@ -225,12 +229,12 @@ impl<'a> StringReader<'a> { /// Report a fatal error spanning [`from_pos`, `to_pos`). fn fatal_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) -> FatalError { - self.fatal_span(syntax_pos::mk_sp(from_pos, to_pos), m) + self.fatal_span(mk_sp(from_pos, to_pos), m) } /// Report a lexical error spanning [`from_pos`, `to_pos`). fn err_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) { - self.err_span(syntax_pos::mk_sp(from_pos, to_pos), m) + self.err_span(mk_sp(from_pos, to_pos), m) } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an @@ -254,7 +258,7 @@ impl<'a> StringReader<'a> { for c in c.escape_default() { m.push(c) } - self.sess.span_diagnostic.struct_span_fatal(syntax_pos::mk_sp(from_pos, to_pos), &m[..]) + self.sess.span_diagnostic.struct_span_fatal(mk_sp(from_pos, to_pos), &m[..]) } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an @@ -278,7 +282,7 @@ impl<'a> StringReader<'a> { for c in c.escape_default() { m.push(c) } - self.sess.span_diagnostic.struct_span_err(syntax_pos::mk_sp(from_pos, to_pos), &m[..]) + self.sess.span_diagnostic.struct_span_err(mk_sp(from_pos, to_pos), &m[..]) } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending the @@ -302,11 +306,11 @@ impl<'a> StringReader<'a> { None => { if self.is_eof() { self.peek_tok = token::Eof; - self.peek_span = syntax_pos::mk_sp(self.filemap.end_pos, self.filemap.end_pos); + self.peek_span = mk_sp(self.filemap.end_pos, self.filemap.end_pos); } else { let start_bytepos = self.pos; self.peek_tok = self.next_token_inner()?; - self.peek_span = syntax_pos::mk_sp(start_bytepos, self.pos); + self.peek_span = mk_sp(start_bytepos, self.pos); }; } } @@ -489,7 +493,7 @@ impl<'a> StringReader<'a> { if let Some(c) = self.ch { if c.is_whitespace() { let msg = "called consume_any_line_comment, but there was whitespace"; - self.sess.span_diagnostic.span_err(syntax_pos::mk_sp(self.pos, self.pos), msg); + self.sess.span_diagnostic.span_err(mk_sp(self.pos, self.pos), msg); } } @@ -532,13 +536,13 @@ impl<'a> StringReader<'a> { Some(TokenAndSpan { tok: tok, - sp: syntax_pos::mk_sp(start_bpos, self.pos), + sp: mk_sp(start_bpos, self.pos), }) }) } else { Some(TokenAndSpan { tok: token::Comment, - sp: syntax_pos::mk_sp(start_bpos, self.pos), + sp: mk_sp(start_bpos, self.pos), }) }; } @@ -571,7 +575,7 @@ impl<'a> StringReader<'a> { } return Some(TokenAndSpan { tok: token::Shebang(self.name_from(start)), - sp: syntax_pos::mk_sp(start, self.pos), + sp: mk_sp(start, self.pos), }); } } @@ -599,7 +603,7 @@ impl<'a> StringReader<'a> { } let c = Some(TokenAndSpan { tok: token::Whitespace, - sp: syntax_pos::mk_sp(start_bpos, self.pos), + sp: mk_sp(start_bpos, self.pos), }); debug!("scanning whitespace: {:?}", c); c @@ -661,7 +665,7 @@ impl<'a> StringReader<'a> { Some(TokenAndSpan { tok: tok, - sp: syntax_pos::mk_sp(start_bpos, self.pos), + sp: mk_sp(start_bpos, self.pos), }) }) } @@ -858,7 +862,7 @@ impl<'a> StringReader<'a> { let valid = if self.ch_is('{') { self.scan_unicode_escape(delim) && !ascii_only } else { - let span = syntax_pos::mk_sp(start, self.pos); + let span = mk_sp(start, self.pos); self.sess.span_diagnostic .struct_span_err(span, "incorrect unicode escape sequence") .span_help(span, @@ -896,13 +900,13 @@ impl<'a> StringReader<'a> { }, c); if e == '\r' { - err.span_help(syntax_pos::mk_sp(escaped_pos, pos), + err.span_help(mk_sp(escaped_pos, pos), "this is an isolated carriage return; consider \ checking your editor and version control \ settings"); } if (e == '{' || e == '}') && !ascii_only { - err.span_help(syntax_pos::mk_sp(escaped_pos, pos), + err.span_help(mk_sp(escaped_pos, pos), "if used in a formatting string, curly braces \ are escaped with `{{` and `}}`"); } @@ -1735,7 +1739,7 @@ mod tests { sp: Span { lo: BytePos(21), hi: BytePos(23), - expn_id: NO_EXPANSION, + ctxt: NO_EXPANSION, }, }; assert_eq!(tok1, tok2); @@ -1749,7 +1753,7 @@ mod tests { sp: Span { lo: BytePos(24), hi: BytePos(28), - expn_id: NO_EXPANSION, + ctxt: NO_EXPANSION, }, }; assert_eq!(tok3, tok4); @@ -1908,7 +1912,7 @@ mod tests { let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string()); let comment = lexer.next_token(); assert_eq!(comment.tok, token::Comment); - assert_eq!(comment.sp, ::syntax_pos::mk_sp(BytePos(0), BytePos(7))); + assert_eq!((comment.sp.lo, comment.sp.hi), (BytePos(0), BytePos(7))); assert_eq!(lexer.next_token().tok, token::Whitespace); assert_eq!(lexer.next_token().tok, token::DocComment(Symbol::intern("/// test"))); diff --git a/src/libsyntax/parse/lexer/unicode_chars.rs b/src/libsyntax/parse/lexer/unicode_chars.rs index 6da3e5de75cdc..4df23da3c9ce3 100644 --- a/src/libsyntax/parse/lexer/unicode_chars.rs +++ b/src/libsyntax/parse/lexer/unicode_chars.rs @@ -11,7 +11,7 @@ // Characters and their corresponding confusables were collected from // http://www.unicode.org/Public/security/revision-06/confusables.txt -use syntax_pos::mk_sp as make_span; +use syntax_pos::{Span, NO_EXPANSION}; use errors::DiagnosticBuilder; use super::StringReader; @@ -234,7 +234,7 @@ pub fn check_for_substitution<'a>(reader: &StringReader<'a>, .iter() .find(|&&(c, _, _)| c == ch) .map(|&(_, u_name, ascii_char)| { - let span = make_span(reader.pos, reader.next_pos); + let span = Span { lo: reader.pos, hi: reader.next_pos, ctxt: NO_EXPANSION }; match ASCII_ARRAY.iter().find(|&&(c, _)| c == ascii_char) { Some(&(ascii_char, ascii_name)) => { let msg = diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index e188bcaf105f2..b5d0a46de4926 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -12,7 +12,7 @@ use ast::{self, CrateConfig}; use codemap::CodeMap; -use syntax_pos::{self, Span, FileMap}; +use syntax_pos::{self, Span, FileMap, NO_EXPANSION}; use errors::{Handler, ColorConfig, DiagnosticBuilder}; use feature_gate::UnstableFeatures; use parse::parser::Parser; @@ -178,7 +178,7 @@ pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc, ) -> Par let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap)); if parser.token == token::Eof && parser.span == syntax_pos::DUMMY_SP { - parser.span = syntax_pos::mk_sp(end_pos, end_pos); + parser.span = Span { lo: end_pos, hi: end_pos, ctxt: NO_EXPANSION }; } parser @@ -665,7 +665,7 @@ mod tests { // produce a syntax_pos::span fn sp(a: u32, b: u32) -> Span { - Span {lo: BytePos(a), hi: BytePos(b), expn_id: NO_EXPANSION} + Span {lo: BytePos(a), hi: BytePos(b), ctxt: NO_EXPANSION} } fn str2seg(s: &str, lo: u32, hi: u32) -> ast::PathSegment { diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index e9eb4fbcc9162..b0611d7529062 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -40,8 +40,8 @@ use ast::{Visibility, WhereClause}; use ast::{BinOpKind, UnOp}; use ast::RangeEnd; use {ast, attr}; -use codemap::{self, CodeMap, Spanned, spanned, respan}; -use syntax_pos::{self, Span, BytePos, mk_sp}; +use codemap::{self, CodeMap, Spanned, respan}; +use syntax_pos::{self, Span, BytePos}; use errors::{self, DiagnosticBuilder}; use parse::{self, classify, token}; use parse::common::SeqSep; @@ -108,13 +108,13 @@ macro_rules! maybe_whole_expr { $p.bump(); let span = $p.span; let kind = ExprKind::Path(None, (*path).clone()); - return Ok($p.mk_expr(span.lo, span.hi, kind, ThinVec::new())); + return Ok($p.mk_expr(span, kind, ThinVec::new())); } token::NtBlock(ref block) => { $p.bump(); let span = $p.span; let kind = ExprKind::Block((*block).clone()); - return Ok($p.mk_expr(span.lo, span.hi, kind, ThinVec::new())); + return Ok($p.mk_expr(span, kind, ThinVec::new())); } _ => {}, }; @@ -731,7 +731,7 @@ impl<'a> Parser<'a> { token::AndAnd => { let span = self.span; let lo = span.lo + BytePos(1); - Ok(self.bump_with(token::BinOp(token::And), lo, span.hi)) + Ok(self.bump_with(token::BinOp(token::And), Span { lo: lo, ..span })) } _ => self.unexpected() } @@ -765,7 +765,7 @@ impl<'a> Parser<'a> { token::BinOp(token::Shl) => { let span = self.span; let lo = span.lo + BytePos(1); - self.bump_with(token::Lt, lo, span.hi); + self.bump_with(token::Lt, Span { lo: lo, ..span }); true } _ => false, @@ -793,17 +793,17 @@ impl<'a> Parser<'a> { token::BinOp(token::Shr) => { let span = self.span; let lo = span.lo + BytePos(1); - Ok(self.bump_with(token::Gt, lo, span.hi)) + Ok(self.bump_with(token::Gt, Span { lo: lo, ..span })) } token::BinOpEq(token::Shr) => { let span = self.span; let lo = span.lo + BytePos(1); - Ok(self.bump_with(token::Ge, lo, span.hi)) + Ok(self.bump_with(token::Ge, Span { lo: lo, ..span })) } token::Ge => { let span = self.span; let lo = span.lo + BytePos(1); - Ok(self.bump_with(token::Eq, lo, span.hi)) + Ok(self.bump_with(token::Eq, Span { lo: lo, ..span })) } _ => self.unexpected() } @@ -997,12 +997,12 @@ impl<'a> Parser<'a> { -> PResult<'a, Spanned>> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, { - let lo = self.span.lo; + let lo = self.span; self.expect(bra)?; let result = self.parse_seq_to_before_end(ket, sep, f); - let hi = self.span.hi; + let hi = self.span; self.bump(); - Ok(spanned(lo, hi, result)) + Ok(respan(lo.to(hi), result)) } /// Advance the parser by one token @@ -1033,16 +1033,13 @@ impl<'a> Parser<'a> { /// Advance the parser using provided token as a next one. Use this when /// consuming a part of a token. For example a single `<` from `<<`. - pub fn bump_with(&mut self, - next: token::Token, - lo: BytePos, - hi: BytePos) { - self.prev_span = mk_sp(self.span.lo, lo); + pub fn bump_with(&mut self, next: token::Token, span: Span) { + self.prev_span = Span { hi: span.lo, ..self.span }; // It would be incorrect to record the kind of the current token, but // fortunately for tokens currently using `bump_with`, the // prev_token_kind will be of no use anyway. self.prev_token_kind = PrevTokenKind::Other; - self.span = mk_sp(lo, hi); + self.span = span; self.token = next; self.expected_tokens.clear(); } @@ -1173,7 +1170,7 @@ impl<'a> Parser<'a> { pub fn parse_trait_item(&mut self) -> PResult<'a, TraitItem> { maybe_whole!(self, NtTraitItem, |x| x); let mut attrs = self.parse_outer_attributes()?; - let lo = self.span.lo; + let lo = self.span; let (name, node) = if self.eat_keyword(keywords::Type) { let TyParam {ident, bounds, default, ..} = self.parse_ty_param(vec![])?; @@ -1197,7 +1194,7 @@ impl<'a> Parser<'a> { } else if self.token.is_path_start() { // trait item macro. // code copied from parse_macro_use_or_failure... abstraction! - let lo = self.span.lo; + let lo = self.span; let pth = self.parse_path(PathStyle::Mod)?; self.expect(&token::Not)?; @@ -1207,7 +1204,7 @@ impl<'a> Parser<'a> { self.expect(&token::Semi)? } - let mac = spanned(lo, self.prev_span.hi, Mac_ { path: pth, tts: tts }); + let mac = respan(lo.to(self.prev_span), Mac_ { path: pth, tts: tts }); (keywords::Invalid.ident(), ast::TraitItemKind::Macro(mac)) } else { let (constness, unsafety, abi) = match self.parse_fn_front_matter() { @@ -1277,7 +1274,7 @@ impl<'a> Parser<'a> { ident: name, attrs: attrs, node: node, - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), }) } @@ -1298,8 +1295,7 @@ impl<'a> Parser<'a> { if self.eat(&token::RArrow) { Ok(FunctionRetTy::Ty(self.parse_ty_no_plus()?)) } else { - let pos = self.span.lo; - Ok(FunctionRetTy::Default(mk_sp(pos, pos))) + Ok(FunctionRetTy::Default(Span { hi: self.span.lo, ..self.span })) } } @@ -1320,7 +1316,7 @@ impl<'a> Parser<'a> { fn parse_ty_common(&mut self, allow_plus: bool) -> PResult<'a, P> { maybe_whole!(self, NtTy, |x| x); - let lo = self.span.lo; + let lo = self.span; let node = if self.eat(&token::OpenDelim(token::Paren)) { // `(TYPE)` is a parenthesized type. // `(TYPE,)` is a tuple with a single field of type TYPE. @@ -1344,7 +1340,7 @@ impl<'a> Parser<'a> { TyKind::Path(None, ref path) if allow_plus && self.token == token::BinOp(token::Plus) => { self.bump(); // `+` - let pt = PolyTraitRef::new(Vec::new(), path.clone(), lo, self.prev_span.hi); + let pt = PolyTraitRef::new(Vec::new(), path.clone(), lo.to(self.prev_span)); let mut bounds = vec![TraitTyParamBound(pt, TraitBoundModifier::None)]; bounds.append(&mut self.parse_ty_param_bounds()?); TyKind::TraitObject(bounds) @@ -1394,13 +1390,13 @@ impl<'a> Parser<'a> { if self.eat(&token::Not) { // Macro invocation in type position let (_, tts) = self.expect_delimited_token_tree()?; - TyKind::Mac(spanned(lo, self.span.hi, Mac_ { path: path, tts: tts })) + TyKind::Mac(respan(lo.to(self.span), Mac_ { path: path, tts: tts })) } else { // Just a type path or bound list (trait object type) starting with a trait. // `Type` // `Trait1 + Trait2 + 'a` if allow_plus && self.eat(&token::BinOp(token::Plus)) { - let poly_trait = PolyTraitRef::new(Vec::new(), path, lo, self.prev_span.hi); + let poly_trait = PolyTraitRef::new(Vec::new(), path, lo.to(self.prev_span)); let mut bounds = vec![TraitTyParamBound(poly_trait, TraitBoundModifier::None)]; bounds.append(&mut self.parse_ty_param_bounds()?); TyKind::TraitObject(bounds) @@ -1415,13 +1411,13 @@ impl<'a> Parser<'a> { // Function pointer type or bound list (trait object type) starting with a poly-trait. // `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T` // `for<'lt> Trait1<'lt> + Trait2 + 'a` - let lo = self.span.lo; + let lo = self.span; let lifetime_defs = self.parse_late_bound_lifetime_defs()?; if self.token_is_bare_fn_keyword() { self.parse_ty_bare_fn(lifetime_defs)? } else { let path = self.parse_path(PathStyle::Type)?; - let poly_trait = PolyTraitRef::new(lifetime_defs, path, lo, self.prev_span.hi); + let poly_trait = PolyTraitRef::new(lifetime_defs, path, lo.to(self.prev_span)); let mut bounds = vec![TraitTyParamBound(poly_trait, TraitBoundModifier::None)]; if allow_plus && self.eat(&token::BinOp(token::Plus)) { bounds.append(&mut self.parse_ty_param_bounds()?) @@ -1440,7 +1436,7 @@ impl<'a> Parser<'a> { return Err(self.fatal(&msg)); }; - let span = mk_sp(lo, self.prev_span.hi); + let span = lo.to(self.prev_span); let ty = Ty { node: node, span: span, id: ast::DUMMY_NODE_ID }; // Try to recover from use of `+` with incorrect priority. @@ -1457,7 +1453,7 @@ impl<'a> Parser<'a> { self.bump(); // `+` let bounds = self.parse_ty_param_bounds()?; - let sum_span = mk_sp(ty.span.lo, self.prev_span.hi); + let sum_span = ty.span.to(self.prev_span); let mut err = struct_span_err!(self.sess.span_diagnostic, ty.span, E0178, "expected a path on the left-hand side of `+`, not `{}`", pprust::ty_to_string(&ty)); @@ -1577,7 +1573,7 @@ impl<'a> Parser<'a> { P(Ty { id: ast::DUMMY_NODE_ID, node: TyKind::Infer, - span: mk_sp(self.span.lo, self.span.hi), + span: self.span, }) }; Ok(Arg { @@ -1625,7 +1621,7 @@ impl<'a> Parser<'a> { /// Matches lit = true | false | token_lit pub fn parse_lit(&mut self) -> PResult<'a, Lit> { - let lo = self.span.lo; + let lo = self.span; let lit = if self.eat_keyword(keywords::True) { LitKind::Bool(true) } else if self.eat_keyword(keywords::False) { @@ -1634,22 +1630,22 @@ impl<'a> Parser<'a> { let lit = self.parse_lit_token()?; lit }; - Ok(codemap::Spanned { node: lit, span: mk_sp(lo, self.prev_span.hi) }) + Ok(codemap::Spanned { node: lit, span: lo.to(self.prev_span) }) } /// matches '-' lit | lit pub fn parse_pat_literal_maybe_minus(&mut self) -> PResult<'a, P> { - let minus_lo = self.span.lo; + let minus_lo = self.span; let minus_present = self.eat(&token::BinOp(token::Minus)); - let lo = self.span.lo; + let lo = self.span; let literal = P(self.parse_lit()?); - let hi = self.prev_span.hi; - let expr = self.mk_expr(lo, hi, ExprKind::Lit(literal), ThinVec::new()); + let hi = self.prev_span; + let expr = self.mk_expr(lo.to(hi), ExprKind::Lit(literal), ThinVec::new()); if minus_present { - let minus_hi = self.prev_span.hi; + let minus_hi = self.prev_span; let unary = self.mk_unary(UnOp::Neg, expr); - Ok(self.mk_expr(minus_lo, minus_hi, unary, ThinVec::new())) + Ok(self.mk_expr(minus_lo.to(minus_hi), unary, ThinVec::new())) } else { Ok(expr) } @@ -1726,7 +1722,7 @@ impl<'a> Parser<'a> { pub fn parse_path(&mut self, mode: PathStyle) -> PResult<'a, ast::Path> { maybe_whole!(self, NtPath, |x| x); - let lo = self.span.lo; + let lo = self.span; let is_global = self.eat(&token::ModSep); // Parse any number of segments and bound sets. A segment is an @@ -1750,7 +1746,7 @@ impl<'a> Parser<'a> { // Assemble the span. // FIXME(#39450) This is bogus if part of the path is macro generated. - let span = mk_sp(lo, self.prev_span.hi); + let span = lo.to(self.prev_span); // Assemble the result. Ok(ast::Path { @@ -1791,7 +1787,7 @@ impl<'a> Parser<'a> { bindings: bindings, }.into() } else if self.eat(&token::OpenDelim(token::Paren)) { - let lo = self.prev_span.lo; + let lo = self.prev_span; let inputs = self.parse_seq_to_end( &token::CloseDelim(token::Paren), @@ -1804,10 +1800,10 @@ impl<'a> Parser<'a> { None }; - let hi = self.prev_span.hi; + let hi = self.prev_span; Some(P(ast::PathParameters::Parenthesized(ast::ParenthesizedParameterData { - span: mk_sp(lo, hi), + span: lo.to(hi), inputs: inputs, output: output_ty, }))) @@ -1928,38 +1924,37 @@ impl<'a> Parser<'a> { /// Parse ident (COLON expr)? pub fn parse_field(&mut self) -> PResult<'a, Field> { let attrs = self.parse_outer_attributes()?; - let lo = self.span.lo; + let lo = self.span; let hi; // Check if a colon exists one ahead. This means we're parsing a fieldname. let (fieldname, expr, is_shorthand) = if self.look_ahead(1, |t| t == &token::Colon) { let fieldname = self.parse_field_name()?; self.bump(); - hi = self.prev_span.hi; + hi = self.prev_span; (fieldname, self.parse_expr()?, false) } else { let fieldname = self.parse_ident()?; - hi = self.prev_span.hi; + hi = self.prev_span; // Mimic `x: x` for the `x` field shorthand. - let path = ast::Path::from_ident(mk_sp(lo, hi), fieldname); - (fieldname, self.mk_expr(lo, hi, ExprKind::Path(None, path), ThinVec::new()), true) + let path = ast::Path::from_ident(lo.to(hi), fieldname); + (fieldname, self.mk_expr(lo.to(hi), ExprKind::Path(None, path), ThinVec::new()), true) }; Ok(ast::Field { - ident: spanned(lo, hi, fieldname), - span: mk_sp(lo, expr.span.hi), + ident: respan(lo.to(hi), fieldname), + span: lo.to(expr.span), expr: expr, is_shorthand: is_shorthand, attrs: attrs.into(), }) } - pub fn mk_expr(&mut self, lo: BytePos, hi: BytePos, node: ExprKind, attrs: ThinVec) - -> P { + pub fn mk_expr(&mut self, span: Span, node: ExprKind, attrs: ThinVec) -> P { P(Expr { id: ast::DUMMY_NODE_ID, node: node, - span: mk_sp(lo, hi), + span: span, attrs: attrs.into(), }) } @@ -2013,12 +2008,11 @@ impl<'a> Parser<'a> { ExprKind::AssignOp(binop, lhs, rhs) } - pub fn mk_mac_expr(&mut self, lo: BytePos, hi: BytePos, - m: Mac_, attrs: ThinVec) -> P { + pub fn mk_mac_expr(&mut self, span: Span, m: Mac_, attrs: ThinVec) -> P { P(Expr { id: ast::DUMMY_NODE_ID, - node: ExprKind::Mac(codemap::Spanned {node: m, span: mk_sp(lo, hi)}), - span: mk_sp(lo, hi), + node: ExprKind::Mac(codemap::Spanned {node: m, span: span}), + span: span, attrs: attrs, }) } @@ -2065,8 +2059,8 @@ impl<'a> Parser<'a> { // attributes by giving them a empty "already parsed" list. let mut attrs = ThinVec::new(); - let lo = self.span.lo; - let mut hi = self.span.hi; + let lo = self.span; + let mut hi = self.span; let ex: ExprKind; @@ -2095,18 +2089,19 @@ impl<'a> Parser<'a> { } self.bump(); - hi = self.prev_span.hi; + hi = self.prev_span; + let span = lo.to(hi); return if es.len() == 1 && !trailing_comma { - Ok(self.mk_expr(lo, hi, ExprKind::Paren(es.into_iter().nth(0).unwrap()), attrs)) + Ok(self.mk_expr(span, ExprKind::Paren(es.into_iter().nth(0).unwrap()), attrs)) } else { - Ok(self.mk_expr(lo, hi, ExprKind::Tup(es), attrs)) + Ok(self.mk_expr(span, ExprKind::Tup(es), attrs)) } }, token::OpenDelim(token::Brace) => { return self.parse_block_expr(lo, BlockCheckMode::Default, attrs); }, token::BinOp(token::Or) | token::OrOr => { - let lo = self.span.lo; + let lo = self.span; return self.parse_lambda_expr(lo, CaptureBy::Ref, attrs); }, token::OpenDelim(token::Bracket) => { @@ -2144,34 +2139,34 @@ impl<'a> Parser<'a> { ex = ExprKind::Array(vec![first_expr]); } } - hi = self.prev_span.hi; + hi = self.prev_span; } _ => { if self.eat_lt() { let (qself, path) = self.parse_qualified_path(PathStyle::Expr)?; - hi = path.span.hi; - return Ok(self.mk_expr(lo, hi, ExprKind::Path(Some(qself), path), attrs)); + hi = path.span; + return Ok(self.mk_expr(lo.to(hi), ExprKind::Path(Some(qself), path), attrs)); } if self.eat_keyword(keywords::Move) { - let lo = self.prev_span.lo; + let lo = self.prev_span; return self.parse_lambda_expr(lo, CaptureBy::Value, attrs); } if self.eat_keyword(keywords::If) { return self.parse_if_expr(attrs); } if self.eat_keyword(keywords::For) { - let lo = self.prev_span.lo; + let lo = self.prev_span; return self.parse_for_expr(None, lo, attrs); } if self.eat_keyword(keywords::While) { - let lo = self.prev_span.lo; + let lo = self.prev_span; return self.parse_while_expr(None, lo, attrs); } if self.token.is_lifetime() { let label = Spanned { node: self.get_label(), span: self.span }; - let lo = self.span.lo; + let lo = self.span; self.bump(); self.expect(&token::Colon)?; if self.eat_keyword(keywords::While) { @@ -2186,7 +2181,7 @@ impl<'a> Parser<'a> { return Err(self.fatal("expected `while`, `for`, or `loop` after a label")) } if self.eat_keyword(keywords::Loop) { - let lo = self.prev_span.lo; + let lo = self.prev_span; return self.parse_loop_expr(None, lo, attrs); } if self.eat_keyword(keywords::Continue) { @@ -2200,8 +2195,8 @@ impl<'a> Parser<'a> { } else { ExprKind::Continue(None) }; - let hi = self.prev_span.hi; - return Ok(self.mk_expr(lo, hi, ex, attrs)); + let hi = self.prev_span; + return Ok(self.mk_expr(lo.to(hi), ex, attrs)); } if self.eat_keyword(keywords::Match) { return self.parse_match_expr(attrs); @@ -2215,13 +2210,13 @@ impl<'a> Parser<'a> { if self.is_catch_expr() { assert!(self.eat_keyword(keywords::Do)); assert!(self.eat_keyword(keywords::Catch)); - let lo = self.prev_span.lo; + let lo = self.prev_span; return self.parse_catch_expr(lo, attrs); } if self.eat_keyword(keywords::Return) { if self.token.can_begin_expr() { let e = self.parse_expr()?; - hi = e.span.hi; + hi = e.span; ex = ExprKind::Ret(Some(e)); } else { ex = ExprKind::Ret(None); @@ -2246,7 +2241,7 @@ impl<'a> Parser<'a> { None }; ex = ExprKind::Break(lt, e); - hi = self.prev_span.hi; + hi = self.prev_span; } else if self.token.is_keyword(keywords::Let) { // Catch this syntax error here, instead of in `check_strict_keywords`, so // that we can explicitly mention that let is not to be used as an expression @@ -2260,8 +2255,8 @@ impl<'a> Parser<'a> { if self.eat(&token::Not) { // MACRO INVOCATION expression let (_, tts) = self.expect_delimited_token_tree()?; - let hi = self.prev_span.hi; - return Ok(self.mk_mac_expr(lo, hi, Mac_ { path: pth, tts: tts }, attrs)); + let hi = self.prev_span; + return Ok(self.mk_mac_expr(lo.to(hi), Mac_ { path: pth, tts: tts }, attrs)); } if self.check(&token::OpenDelim(token::Brace)) { // This is a struct literal, unless we're prohibited @@ -2274,12 +2269,12 @@ impl<'a> Parser<'a> { } } - hi = pth.span.hi; + hi = pth.span; ex = ExprKind::Path(None, pth); } else { match self.parse_lit() { Ok(lit) => { - hi = lit.span.hi; + hi = lit.span; ex = ExprKind::Lit(P(lit)); } Err(mut err) => { @@ -2293,10 +2288,10 @@ impl<'a> Parser<'a> { } } - return Ok(self.mk_expr(lo, hi, ex, attrs)); + return Ok(self.mk_expr(lo.to(hi), ex, attrs)); } - fn parse_struct_expr(&mut self, lo: BytePos, pth: ast::Path, mut attrs: ThinVec) + fn parse_struct_expr(&mut self, lo: Span, pth: ast::Path, mut attrs: ThinVec) -> PResult<'a, P> { self.bump(); let mut fields = Vec::new(); @@ -2338,9 +2333,9 @@ impl<'a> Parser<'a> { } } - let hi = self.span.hi; + let span = lo.to(self.span); self.expect(&token::CloseDelim(token::Brace))?; - return Ok(self.mk_expr(lo, hi, ExprKind::Struct(pth, fields, base), attrs)); + return Ok(self.mk_expr(span, ExprKind::Struct(pth, fields, base), attrs)); } fn parse_or_use_outer_attributes(&mut self, @@ -2354,7 +2349,7 @@ impl<'a> Parser<'a> { } /// Parse a block or unsafe block - pub fn parse_block_expr(&mut self, lo: BytePos, blk_mode: BlockCheckMode, + pub fn parse_block_expr(&mut self, lo: Span, blk_mode: BlockCheckMode, outer_attrs: ThinVec) -> PResult<'a, P> { @@ -2364,7 +2359,7 @@ impl<'a> Parser<'a> { attrs.extend(self.parse_inner_attributes()?); let blk = self.parse_block_tail(lo, blk_mode)?; - return Ok(self.mk_expr(blk.span.lo, blk.span.hi, ExprKind::Block(blk), attrs)); + return Ok(self.mk_expr(blk.span, ExprKind::Block(blk), attrs)); } /// parse a.b or a(13) or a[4] or just a @@ -2375,12 +2370,12 @@ impl<'a> Parser<'a> { let b = self.parse_bottom_expr(); let (span, b) = self.interpolated_or_expr_span(b)?; - self.parse_dot_or_call_expr_with(b, span.lo, attrs) + self.parse_dot_or_call_expr_with(b, span, attrs) } pub fn parse_dot_or_call_expr_with(&mut self, e0: P, - lo: BytePos, + lo: Span, mut attrs: ThinVec) -> PResult<'a, P> { // Stitch the list of outer attributes onto the return value. @@ -2411,11 +2406,7 @@ impl<'a> Parser<'a> { // Assuming we have just parsed `.foo` (i.e., a dot and an ident), continue // parsing into an expression. - fn parse_dot_suffix(&mut self, - ident: Ident, - ident_span: Span, - self_value: P, - lo: BytePos) + fn parse_dot_suffix(&mut self, ident: Ident, ident_span: Span, self_value: P, lo: Span) -> PResult<'a, P> { let (_, tys, bindings) = if self.eat(&token::ModSep) { self.expect_lt()?; @@ -2440,12 +2431,12 @@ impl<'a> Parser<'a> { SeqSep::trailing_allowed(token::Comma), |p| Ok(p.parse_expr()?) )?; - let hi = self.prev_span.hi; + let hi = self.prev_span; es.insert(0, self_value); - let id = spanned(ident_span.lo, ident_span.hi, ident); + let id = respan(ident_span.to(ident_span), ident); let nd = self.mk_method_call(id, tys, es); - self.mk_expr(lo, hi, nd, ThinVec::new()) + self.mk_expr(lo.to(hi), nd, ThinVec::new()) } // Field access. _ => { @@ -2456,32 +2447,30 @@ impl<'a> Parser<'a> { have type parameters"); } - let id = spanned(ident_span.lo, ident_span.hi, ident); + let id = respan(ident_span.to(ident_span), ident); let field = self.mk_field(self_value, id); - self.mk_expr(lo, ident_span.hi, field, ThinVec::new()) + self.mk_expr(lo.to(ident_span), field, ThinVec::new()) } }) } - fn parse_dot_or_call_expr_with_(&mut self, e0: P, lo: BytePos) -> PResult<'a, P> { + fn parse_dot_or_call_expr_with_(&mut self, e0: P, lo: Span) -> PResult<'a, P> { let mut e = e0; let mut hi; loop { // expr? while self.eat(&token::Question) { - let hi = self.prev_span.hi; - e = self.mk_expr(lo, hi, ExprKind::Try(e), ThinVec::new()); + let hi = self.prev_span; + e = self.mk_expr(lo.to(hi), ExprKind::Try(e), ThinVec::new()); } // expr.f if self.eat(&token::Dot) { match self.token { token::Ident(i) => { - let dot_pos = self.prev_span.hi; - hi = self.span.hi; + let ident_span = self.span; self.bump(); - - e = self.parse_dot_suffix(i, mk_sp(dot_pos, hi), e, lo)?; + e = self.parse_dot_suffix(i, ident_span, e, lo)?; } token::Literal(token::Integer(n), suf) => { let sp = self.span; @@ -2489,16 +2478,16 @@ impl<'a> Parser<'a> { // A tuple index may not have a suffix self.expect_no_suffix(sp, "tuple index", suf); - let dot = self.prev_span.hi; - hi = self.span.hi; + let dot_span = self.prev_span; + hi = self.span; self.bump(); let index = n.as_str().parse::().ok(); match index { Some(n) => { - let id = spanned(dot, hi, n); + let id = respan(dot_span.to(hi), n); let field = self.mk_tup_field(e, id); - e = self.mk_expr(lo, hi, field, ThinVec::new()); + e = self.mk_expr(lo.to(hi), field, ThinVec::new()); } None => { let prev_span = self.prev_span; @@ -2541,10 +2530,8 @@ impl<'a> Parser<'a> { let actual = self.this_token_to_string(); self.span_err(self.span, &format!("unexpected token: `{}`", actual)); - let dot_pos = self.prev_span.hi; - e = self.parse_dot_suffix(keywords::Invalid.ident(), - mk_sp(dot_pos, dot_pos), - e, lo)?; + let dot_span = self.prev_span; + e = self.parse_dot_suffix(keywords::Invalid.ident(), dot_span, e, lo)?; } } continue; @@ -2559,10 +2546,10 @@ impl<'a> Parser<'a> { SeqSep::trailing_allowed(token::Comma), |p| Ok(p.parse_expr()?) )?; - hi = self.prev_span.hi; + hi = self.prev_span; let nd = self.mk_call(e, es); - e = self.mk_expr(lo, hi, nd, ThinVec::new()); + e = self.mk_expr(lo.to(hi), nd, ThinVec::new()); } // expr[...] @@ -2570,10 +2557,10 @@ impl<'a> Parser<'a> { token::OpenDelim(token::Bracket) => { self.bump(); let ix = self.parse_expr()?; - hi = self.span.hi; + hi = self.span; self.expect(&token::CloseDelim(token::Bracket))?; let index = self.mk_index(e, ix); - e = self.mk_expr(lo, hi, index, ThinVec::new()) + e = self.mk_expr(lo.to(hi), index, ThinVec::new()) } _ => return Ok(e) } @@ -2635,38 +2622,33 @@ impl<'a> Parser<'a> { already_parsed_attrs: Option>) -> PResult<'a, P> { let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?; - let lo = self.span.lo; - let hi; + let lo = self.span; // Note: when adding new unary operators, don't forget to adjust Token::can_begin_expr() - let ex = match self.token { + let (hi, ex) = match self.token { token::Not => { self.bump(); let e = self.parse_prefix_expr(None); let (span, e) = self.interpolated_or_expr_span(e)?; - hi = span.hi; - self.mk_unary(UnOp::Not, e) + (span, self.mk_unary(UnOp::Not, e)) } token::BinOp(token::Minus) => { self.bump(); let e = self.parse_prefix_expr(None); let (span, e) = self.interpolated_or_expr_span(e)?; - hi = span.hi; - self.mk_unary(UnOp::Neg, e) + (span, self.mk_unary(UnOp::Neg, e)) } token::BinOp(token::Star) => { self.bump(); let e = self.parse_prefix_expr(None); let (span, e) = self.interpolated_or_expr_span(e)?; - hi = span.hi; - self.mk_unary(UnOp::Deref, e) + (span, self.mk_unary(UnOp::Deref, e)) } token::BinOp(token::And) | token::AndAnd => { self.expect_and()?; let m = self.parse_mutability(); let e = self.parse_prefix_expr(None); let (span, e) = self.interpolated_or_expr_span(e)?; - hi = span.hi; - ExprKind::AddrOf(m, e) + (span, ExprKind::AddrOf(m, e)) } token::Ident(..) if self.token.is_keyword(keywords::In) => { self.bump(); @@ -2676,20 +2658,18 @@ impl<'a> Parser<'a> { )?; let blk = self.parse_block()?; let span = blk.span; - hi = span.hi; - let blk_expr = self.mk_expr(span.lo, hi, ExprKind::Block(blk), ThinVec::new()); - ExprKind::InPlace(place, blk_expr) + let blk_expr = self.mk_expr(span, ExprKind::Block(blk), ThinVec::new()); + (span, ExprKind::InPlace(place, blk_expr)) } token::Ident(..) if self.token.is_keyword(keywords::Box) => { self.bump(); let e = self.parse_prefix_expr(None); let (span, e) = self.interpolated_or_expr_span(e)?; - hi = span.hi; - ExprKind::Box(e) + (span, ExprKind::Box(e)) } _ => return self.parse_dot_or_call_expr(Some(attrs)) }; - return Ok(self.mk_expr(lo, hi, ex, attrs)); + return Ok(self.mk_expr(lo.to(hi), ex, attrs)); } /// Parse an associative expression @@ -2750,13 +2730,11 @@ impl<'a> Parser<'a> { // Special cases: if op == AssocOp::As { let rhs = self.parse_ty_no_plus()?; - let (lo, hi) = (lhs_span.lo, rhs.span.hi); - lhs = self.mk_expr(lo, hi, ExprKind::Cast(lhs, rhs), ThinVec::new()); + lhs = self.mk_expr(lhs_span.to(rhs.span), ExprKind::Cast(lhs, rhs), ThinVec::new()); continue } else if op == AssocOp::Colon { let rhs = self.parse_ty_no_plus()?; - let (lo, hi) = (lhs_span.lo, rhs.span.hi); - lhs = self.mk_expr(lo, hi, ExprKind::Type(lhs, rhs), ThinVec::new()); + lhs = self.mk_expr(lhs_span.to(rhs.span), ExprKind::Type(lhs, rhs), ThinVec::new()); continue } else if op == AssocOp::DotDot || op == AssocOp::DotDotDot { // If we didn’t have to handle `x..`/`x...`, it would be pretty easy to @@ -2782,7 +2760,7 @@ impl<'a> Parser<'a> { }; let r = try!(self.mk_range(Some(lhs), rhs, limits)); - lhs = self.mk_expr(lhs_span.lo, rhs_span.hi, r, ThinVec::new()); + lhs = self.mk_expr(lhs_span.to(rhs_span), r, ThinVec::new()); break } @@ -2809,7 +2787,7 @@ impl<'a> Parser<'a> { }), }?; - let (lo, hi) = (lhs_span.lo, rhs.span.hi); + let span = lhs_span.to(rhs.span); lhs = match op { AssocOp::Add | AssocOp::Subtract | AssocOp::Multiply | AssocOp::Divide | AssocOp::Modulus | AssocOp::LAnd | AssocOp::LOr | AssocOp::BitXor | @@ -2818,12 +2796,12 @@ impl<'a> Parser<'a> { AssocOp::Greater | AssocOp::GreaterEqual => { let ast_op = op.to_ast_binop().unwrap(); let binary = self.mk_binary(codemap::respan(cur_op_span, ast_op), lhs, rhs); - self.mk_expr(lo, hi, binary, ThinVec::new()) + self.mk_expr(span, binary, ThinVec::new()) } AssocOp::Assign => - self.mk_expr(lo, hi, ExprKind::Assign(lhs, rhs), ThinVec::new()), + self.mk_expr(span, ExprKind::Assign(lhs, rhs), ThinVec::new()), AssocOp::Inplace => - self.mk_expr(lo, hi, ExprKind::InPlace(lhs, rhs), ThinVec::new()), + self.mk_expr(span, ExprKind::InPlace(lhs, rhs), ThinVec::new()), AssocOp::AssignOp(k) => { let aop = match k { token::Plus => BinOpKind::Add, @@ -2838,7 +2816,7 @@ impl<'a> Parser<'a> { token::Shr => BinOpKind::Shr, }; let aopexpr = self.mk_assign_op(codemap::respan(cur_op_span, aop), lhs, rhs); - self.mk_expr(lo, hi, aopexpr, ThinVec::new()) + self.mk_expr(span, aopexpr, ThinVec::new()) } AssocOp::As | AssocOp::Colon | AssocOp::DotDot | AssocOp::DotDotDot => { self.bug("As, Colon, DotDot or DotDotDot branch reached") @@ -2858,7 +2836,7 @@ impl<'a> Parser<'a> { match lhs.node { ExprKind::Binary(op, _, _) if op.node.is_comparison() => { // respan to include both operators - let op_span = mk_sp(op.span.lo, self.span.hi); + let op_span = op.span.to(self.span); let mut err = self.diagnostic().struct_span_err(op_span, "chained comparison operators require parentheses"); if op.node == BinOpKind::Lt && @@ -2881,8 +2859,8 @@ impl<'a> Parser<'a> { debug_assert!(self.token == token::DotDot || self.token == token::DotDotDot); let tok = self.token.clone(); let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?; - let lo = self.span.lo; - let mut hi = self.span.hi; + let lo = self.span; + let mut hi = self.span; self.bump(); let opt_end = if self.is_at_start_of_range_notation_rhs() { // RHS must be parsed with more associativity than the dots. @@ -2890,7 +2868,7 @@ impl<'a> Parser<'a> { Some(self.parse_assoc_expr_with(next_prec, LhsExpr::NotYetParsed) .map(|x|{ - hi = x.span.hi; + hi = x.span; x })?) } else { @@ -2905,7 +2883,7 @@ impl<'a> Parser<'a> { let r = try!(self.mk_range(None, opt_end, limits)); - Ok(self.mk_expr(lo, hi, r, attrs)) + Ok(self.mk_expr(lo.to(hi), r, attrs)) } fn is_at_start_of_range_notation_rhs(&self) -> bool { @@ -2925,23 +2903,23 @@ impl<'a> Parser<'a> { if self.check_keyword(keywords::Let) { return self.parse_if_let_expr(attrs); } - let lo = self.prev_span.lo; + let lo = self.prev_span; let cond = self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL, None)?; let thn = self.parse_block()?; let mut els: Option> = None; - let mut hi = thn.span.hi; + let mut hi = thn.span; if self.eat_keyword(keywords::Else) { let elexpr = self.parse_else_expr()?; - hi = elexpr.span.hi; + hi = elexpr.span; els = Some(elexpr); } - Ok(self.mk_expr(lo, hi, ExprKind::If(cond, thn, els), attrs)) + Ok(self.mk_expr(lo.to(hi), ExprKind::If(cond, thn, els), attrs)) } /// Parse an 'if let' expression ('if' token already eaten) pub fn parse_if_let_expr(&mut self, attrs: ThinVec) -> PResult<'a, P> { - let lo = self.prev_span.lo; + let lo = self.prev_span; self.expect_keyword(keywords::Let)?; let pat = self.parse_pat()?; self.expect(&token::Eq)?; @@ -2949,36 +2927,35 @@ impl<'a> Parser<'a> { let thn = self.parse_block()?; let (hi, els) = if self.eat_keyword(keywords::Else) { let expr = self.parse_else_expr()?; - (expr.span.hi, Some(expr)) + (expr.span, Some(expr)) } else { - (thn.span.hi, None) + (thn.span, None) }; - Ok(self.mk_expr(lo, hi, ExprKind::IfLet(pat, expr, thn, els), attrs)) + Ok(self.mk_expr(lo.to(hi), ExprKind::IfLet(pat, expr, thn, els), attrs)) } // `move |args| expr` pub fn parse_lambda_expr(&mut self, - lo: BytePos, + lo: Span, capture_clause: CaptureBy, attrs: ThinVec) -> PResult<'a, P> { let decl = self.parse_fn_block_decl()?; - let decl_hi = self.prev_span.hi; + let decl_hi = self.prev_span; let body = match decl.output { FunctionRetTy::Default(_) => self.parse_expr()?, _ => { // If an explicit return type is given, require a // block to appear (RFC 968). - let body_lo = self.span.lo; + let body_lo = self.span; self.parse_block_expr(body_lo, BlockCheckMode::Default, ThinVec::new())? } }; Ok(self.mk_expr( - lo, - body.span.hi, - ExprKind::Closure(capture_clause, decl, body, mk_sp(lo, decl_hi)), + lo.to(body.span), + ExprKind::Closure(capture_clause, decl, body, lo.to(decl_hi)), attrs)) } @@ -2988,13 +2965,13 @@ impl<'a> Parser<'a> { return self.parse_if_expr(ThinVec::new()); } else { let blk = self.parse_block()?; - return Ok(self.mk_expr(blk.span.lo, blk.span.hi, ExprKind::Block(blk), ThinVec::new())); + return Ok(self.mk_expr(blk.span, ExprKind::Block(blk), ThinVec::new())); } } /// Parse a 'for' .. 'in' expression ('for' token already eaten) pub fn parse_for_expr(&mut self, opt_ident: Option, - span_lo: BytePos, + span_lo: Span, mut attrs: ThinVec) -> PResult<'a, P> { // Parse: `for in ` @@ -3004,16 +2981,13 @@ impl<'a> Parser<'a> { let (iattrs, loop_block) = self.parse_inner_attrs_and_block()?; attrs.extend(iattrs); - let hi = self.prev_span.hi; - - Ok(self.mk_expr(span_lo, hi, - ExprKind::ForLoop(pat, expr, loop_block, opt_ident), - attrs)) + let hi = self.prev_span; + Ok(self.mk_expr(span_lo.to(hi), ExprKind::ForLoop(pat, expr, loop_block, opt_ident), attrs)) } /// Parse a 'while' or 'while let' expression ('while' token already eaten) pub fn parse_while_expr(&mut self, opt_ident: Option, - span_lo: BytePos, + span_lo: Span, mut attrs: ThinVec) -> PResult<'a, P> { if self.token.is_keyword(keywords::Let) { return self.parse_while_let_expr(opt_ident, span_lo, attrs); @@ -3021,14 +2995,13 @@ impl<'a> Parser<'a> { let cond = self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL, None)?; let (iattrs, body) = self.parse_inner_attrs_and_block()?; attrs.extend(iattrs); - let hi = body.span.hi; - return Ok(self.mk_expr(span_lo, hi, ExprKind::While(cond, body, opt_ident), - attrs)); + let span = span_lo.to(body.span); + return Ok(self.mk_expr(span, ExprKind::While(cond, body, opt_ident), attrs)); } /// Parse a 'while let' expression ('while' token already eaten) pub fn parse_while_let_expr(&mut self, opt_ident: Option, - span_lo: BytePos, + span_lo: Span, mut attrs: ThinVec) -> PResult<'a, P> { self.expect_keyword(keywords::Let)?; let pat = self.parse_pat()?; @@ -3036,34 +3009,33 @@ impl<'a> Parser<'a> { let expr = self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL, None)?; let (iattrs, body) = self.parse_inner_attrs_and_block()?; attrs.extend(iattrs); - let hi = body.span.hi; - return Ok(self.mk_expr(span_lo, hi, ExprKind::WhileLet(pat, expr, body, opt_ident), attrs)); + let span = span_lo.to(body.span); + return Ok(self.mk_expr(span, ExprKind::WhileLet(pat, expr, body, opt_ident), attrs)); } // parse `loop {...}`, `loop` token already eaten pub fn parse_loop_expr(&mut self, opt_ident: Option, - span_lo: BytePos, + span_lo: Span, mut attrs: ThinVec) -> PResult<'a, P> { let (iattrs, body) = self.parse_inner_attrs_and_block()?; attrs.extend(iattrs); - let hi = body.span.hi; - Ok(self.mk_expr(span_lo, hi, ExprKind::Loop(body, opt_ident), attrs)) + let span = span_lo.to(body.span); + Ok(self.mk_expr(span, ExprKind::Loop(body, opt_ident), attrs)) } /// Parse a `do catch {...}` expression (`do catch` token already eaten) - pub fn parse_catch_expr(&mut self, span_lo: BytePos, mut attrs: ThinVec) + pub fn parse_catch_expr(&mut self, span_lo: Span, mut attrs: ThinVec) -> PResult<'a, P> { let (iattrs, body) = self.parse_inner_attrs_and_block()?; attrs.extend(iattrs); - let hi = body.span.hi; - Ok(self.mk_expr(span_lo, hi, ExprKind::Catch(body), attrs)) + Ok(self.mk_expr(span_lo.to(body.span), ExprKind::Catch(body), attrs)) } // `match` token already eaten fn parse_match_expr(&mut self, mut attrs: ThinVec) -> PResult<'a, P> { let match_span = self.prev_span; - let lo = self.prev_span.lo; + let lo = self.prev_span; let discriminant = self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL, None)?; if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) { @@ -3082,17 +3054,17 @@ impl<'a> Parser<'a> { // Recover by skipping to the end of the block. e.emit(); self.recover_stmt(); - let hi = self.span.hi; + let span = lo.to(self.span); if self.token == token::CloseDelim(token::Brace) { self.bump(); } - return Ok(self.mk_expr(lo, hi, ExprKind::Match(discriminant, arms), attrs)); + return Ok(self.mk_expr(span, ExprKind::Match(discriminant, arms), attrs)); } } } - let hi = self.span.hi; + let hi = self.span; self.bump(); - return Ok(self.mk_expr(lo, hi, ExprKind::Match(discriminant, arms), attrs)); + return Ok(self.mk_expr(lo.to(hi), ExprKind::Match(discriminant, arms), attrs)); } pub fn parse_arm(&mut self) -> PResult<'a, Arm> { @@ -3266,7 +3238,7 @@ impl<'a> Parser<'a> { } let attrs = self.parse_outer_attributes()?; - let lo = self.span.lo; + let lo = self.span; let hi; if self.check(&token::DotDot) { @@ -3286,16 +3258,16 @@ impl<'a> Parser<'a> { let fieldname = self.parse_field_name()?; self.bump(); let pat = self.parse_pat()?; - hi = pat.span.hi; + hi = pat.span; (pat, fieldname, false) } else { // Parsing a pattern of the form "(box) (ref) (mut) fieldname" let is_box = self.eat_keyword(keywords::Box); - let boxed_span_lo = self.span.lo; + let boxed_span = self.span; let is_ref = self.eat_keyword(keywords::Ref); let is_mut = self.eat_keyword(keywords::Mut); let fieldname = self.parse_ident()?; - hi = self.prev_span.hi; + hi = self.prev_span; let bind_type = match (is_ref, is_mut) { (true, true) => BindingMode::ByRef(Mutability::Mutable), @@ -3307,14 +3279,14 @@ impl<'a> Parser<'a> { let fieldpat = P(ast::Pat{ id: ast::DUMMY_NODE_ID, node: PatKind::Ident(bind_type, fieldpath, None), - span: mk_sp(boxed_span_lo, hi), + span: boxed_span.to(hi), }); let subpat = if is_box { P(ast::Pat{ id: ast::DUMMY_NODE_ID, node: PatKind::Box(fieldpat), - span: mk_sp(lo, hi), + span: lo.to(hi), }) } else { fieldpat @@ -3322,7 +3294,7 @@ impl<'a> Parser<'a> { (subpat, fieldname, true) }; - fields.push(codemap::Spanned { span: mk_sp(lo, hi), + fields.push(codemap::Spanned { span: lo.to(hi), node: ast::FieldPat { ident: fieldname, pat: subpat, @@ -3336,7 +3308,7 @@ impl<'a> Parser<'a> { fn parse_pat_range_end(&mut self) -> PResult<'a, P> { if self.token.is_path_start() { - let lo = self.span.lo; + let lo = self.span; let (qself, path) = if self.eat_lt() { // Parse a qualified path let (qself, path) = @@ -3346,8 +3318,8 @@ impl<'a> Parser<'a> { // Parse an unqualified path (None, self.parse_path(PathStyle::Expr)?) }; - let hi = self.prev_span.hi; - Ok(self.mk_expr(lo, hi, ExprKind::Path(qself, path), ThinVec::new())) + let hi = self.prev_span; + Ok(self.mk_expr(lo.to(hi), ExprKind::Path(qself, path), ThinVec::new())) } else { self.parse_pat_literal_maybe_minus() } @@ -3373,7 +3345,7 @@ impl<'a> Parser<'a> { pub fn parse_pat(&mut self) -> PResult<'a, P> { maybe_whole!(self, NtPat, |x| x); - let lo = self.span.lo; + let lo = self.span; let pat; match self.token { token::Underscore => { @@ -3439,7 +3411,7 @@ impl<'a> Parser<'a> { // Parse macro invocation self.bump(); let (_, tts) = self.expect_delimited_token_tree()?; - let mac = spanned(lo, self.prev_span.hi, Mac_ { path: path, tts: tts }); + let mac = respan(lo.to(self.prev_span), Mac_ { path: path, tts: tts }); pat = PatKind::Mac(mac); } token::DotDotDot | token::DotDot => { @@ -3449,9 +3421,8 @@ impl<'a> Parser<'a> { _ => panic!("can only parse `..` or `...` for ranges (checked above)"), }; // Parse range - let hi = self.prev_span.hi; - let begin = - self.mk_expr(lo, hi, ExprKind::Path(qself, path), ThinVec::new()); + let span = lo.to(self.prev_span); + let begin = self.mk_expr(span, ExprKind::Path(qself, path), ThinVec::new()); self.bump(); let end = self.parse_pat_range_end()?; pat = PatKind::Range(begin, end, end_kind); @@ -3505,11 +3476,10 @@ impl<'a> Parser<'a> { } } - let hi = self.prev_span.hi; Ok(P(ast::Pat { id: ast::DUMMY_NODE_ID, node: pat, - span: mk_sp(lo, hi), + span: lo.to(self.prev_span), })) } @@ -3545,7 +3515,7 @@ impl<'a> Parser<'a> { /// Parse a local variable declaration fn parse_local(&mut self, attrs: ThinVec) -> PResult<'a, P> { - let lo = self.span.lo; + let lo = self.span; let pat = self.parse_pat()?; let mut ty = None; @@ -3558,14 +3528,14 @@ impl<'a> Parser<'a> { pat: pat, init: init, id: ast::DUMMY_NODE_ID, - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), attrs: attrs, })) } /// Parse a structure field fn parse_name_and_ty(&mut self, - lo: BytePos, + lo: Span, vis: Visibility, attrs: Vec) -> PResult<'a, StructField> { @@ -3573,7 +3543,7 @@ impl<'a> Parser<'a> { self.expect(&token::Colon)?; let ty = self.parse_ty()?; Ok(StructField { - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), ident: Some(name), vis: vis, id: ast::DUMMY_NODE_ID, @@ -3683,7 +3653,7 @@ impl<'a> Parser<'a> { fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility) -> PResult<'a, Option>> { - let lo = self.span.lo; + let lo = self.span; match self.token { token::Ident(ident) if ident.name == "macro_rules" => { if self.look_ahead(1, |t| *t == token::Not) { @@ -3706,9 +3676,9 @@ impl<'a> Parser<'a> { } } - let hi = self.prev_span.hi; + let span = lo.to(self.prev_span); let kind = ItemKind::MacroDef(tts); - Ok(Some(self.mk_item(lo, hi, id, kind, Visibility::Inherited, attrs.to_owned()))) + Ok(Some(self.mk_item(span, id, kind, Visibility::Inherited, attrs.to_owned()))) } fn parse_stmt_without_recovery(&mut self, @@ -3717,19 +3687,19 @@ impl<'a> Parser<'a> { maybe_whole!(self, NtStmt, |x| Some(x)); let attrs = self.parse_outer_attributes()?; - let lo = self.span.lo; + let lo = self.span; Ok(Some(if self.eat_keyword(keywords::Let) { Stmt { id: ast::DUMMY_NODE_ID, node: StmtKind::Local(self.parse_local(attrs.into())?), - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), } } else if let Some(macro_def) = self.eat_macro_def(&attrs, &Visibility::Inherited)? { Stmt { id: ast::DUMMY_NODE_ID, node: StmtKind::Item(macro_def), - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), } // Starts like a simple path, but not a union item. } else if self.token.is_path_start() && @@ -3741,8 +3711,8 @@ impl<'a> Parser<'a> { let expr = if self.check(&token::OpenDelim(token::Brace)) { self.parse_struct_expr(lo, pth, ThinVec::new())? } else { - let hi = self.prev_span.hi; - self.mk_expr(lo, hi, ExprKind::Path(None, pth), ThinVec::new()) + let hi = self.prev_span; + self.mk_expr(lo.to(hi), ExprKind::Path(None, pth), ThinVec::new()) }; let expr = self.with_res(Restrictions::RESTRICTION_STMT_EXPR, |this| { @@ -3753,7 +3723,7 @@ impl<'a> Parser<'a> { return Ok(Some(Stmt { id: ast::DUMMY_NODE_ID, node: StmtKind::Expr(expr), - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), })); } @@ -3784,7 +3754,7 @@ impl<'a> Parser<'a> { }; let (_, tts) = self.expect_delimited_token_tree()?; - let hi = self.prev_span.hi; + let hi = self.prev_span; let style = if delim == token::Brace { MacStmtStyle::Braces @@ -3793,7 +3763,7 @@ impl<'a> Parser<'a> { }; if id.name == keywords::Invalid.name() { - let mac = spanned(lo, hi, Mac_ { path: pth, tts: tts }); + let mac = respan(lo.to(hi), Mac_ { path: pth, tts: tts }); let node = if delim == token::Brace || self.token == token::Semi || self.token == token::Eof { StmtKind::Mac(P((mac, style, attrs.into()))) @@ -3813,14 +3783,14 @@ impl<'a> Parser<'a> { self.warn_missing_semicolon(); StmtKind::Mac(P((mac, style, attrs.into()))) } else { - let e = self.mk_mac_expr(lo, hi, mac.node, ThinVec::new()); + let e = self.mk_mac_expr(lo.to(hi), mac.node, ThinVec::new()); let e = self.parse_dot_or_call_expr_with(e, lo, attrs.into())?; let e = self.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(e))?; StmtKind::Expr(e) }; Stmt { id: ast::DUMMY_NODE_ID, - span: mk_sp(lo, hi), + span: lo.to(hi), node: node, } } else { @@ -3835,13 +3805,14 @@ impl<'a> Parser<'a> { followed by a semicolon"); } } + let span = lo.to(hi); Stmt { id: ast::DUMMY_NODE_ID, - span: mk_sp(lo, hi), + span: span, node: StmtKind::Item({ self.mk_item( - lo, hi, id /*id is good here*/, - ItemKind::Mac(spanned(lo, hi, Mac_ { path: pth, tts: tts })), + span, id /*id is good here*/, + ItemKind::Mac(respan(span, Mac_ { path: pth, tts: tts })), Visibility::Inherited, attrs) }), @@ -3856,7 +3827,7 @@ impl<'a> Parser<'a> { match item { Some(i) => Stmt { id: ast::DUMMY_NODE_ID, - span: mk_sp(lo, i.span.hi), + span: lo.to(i.span), node: StmtKind::Item(i), }, None => { @@ -3887,7 +3858,7 @@ impl<'a> Parser<'a> { Restrictions::RESTRICTION_STMT_EXPR, Some(attrs.into()))?; Stmt { id: ast::DUMMY_NODE_ID, - span: mk_sp(lo, e.span.hi), + span: lo.to(e.span), node: StmtKind::Expr(e), } } @@ -3905,7 +3876,7 @@ impl<'a> Parser<'a> { pub fn parse_block(&mut self) -> PResult<'a, P> { maybe_whole!(self, NtBlock, |x| x); - let lo = self.span.lo; + let lo = self.span; if !self.eat(&token::OpenDelim(token::Brace)) { let sp = self.span; @@ -3950,7 +3921,7 @@ impl<'a> Parser<'a> { fn parse_inner_attrs_and_block(&mut self) -> PResult<'a, (Vec, P)> { maybe_whole!(self, NtBlock, |x| (Vec::new(), x)); - let lo = self.span.lo; + let lo = self.span; self.expect(&token::OpenDelim(token::Brace))?; Ok((self.parse_inner_attributes()?, self.parse_block_tail(lo, BlockCheckMode::Default)?)) @@ -3958,7 +3929,7 @@ impl<'a> Parser<'a> { /// Parse the rest of a block expression or function body /// Precondition: already parsed the '{'. - fn parse_block_tail(&mut self, lo: BytePos, s: BlockCheckMode) -> PResult<'a, P> { + fn parse_block_tail(&mut self, lo: Span, s: BlockCheckMode) -> PResult<'a, P> { let mut stmts = vec![]; while !self.eat(&token::CloseDelim(token::Brace)) { @@ -3976,7 +3947,7 @@ impl<'a> Parser<'a> { stmts: stmts, id: ast::DUMMY_NODE_ID, rules: s, - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), })) } @@ -4042,10 +4013,10 @@ impl<'a> Parser<'a> { } bounds.push(RegionTyParamBound(self.expect_lifetime())); } else if self.check_keyword(keywords::For) || self.check_path() { - let lo = self.span.lo; + let lo = self.span; let lifetime_defs = self.parse_late_bound_lifetime_defs()?; let path = self.parse_path(PathStyle::Type)?; - let poly_trait = PolyTraitRef::new(lifetime_defs, path, lo, self.prev_span.hi); + let poly_trait = PolyTraitRef::new(lifetime_defs, path, lo.to(self.prev_span)); let modifier = if question.is_some() { TraitBoundModifier::Maybe } else { @@ -4166,7 +4137,7 @@ impl<'a> Parser<'a> { pub fn parse_generics(&mut self) -> PResult<'a, ast::Generics> { maybe_whole!(self, NtGenerics, |x| x); - let span_lo = self.span.lo; + let span_lo = self.span; if self.eat_lt() { let (lifetime_defs, ty_params) = self.parse_generic_params()?; self.expect_gt()?; @@ -4177,7 +4148,7 @@ impl<'a> Parser<'a> { id: ast::DUMMY_NODE_ID, predicates: Vec::new(), }, - span: mk_sp(span_lo, self.prev_span.hi), + span: span_lo.to(self.prev_span), }) } else { Ok(ast::Generics::default()) @@ -4202,7 +4173,7 @@ impl<'a> Parser<'a> { } } else if self.check_ident() && self.look_ahead(1, |t| t == &token::Eq) { // Parse associated type binding. - let lo = self.span.lo; + let lo = self.span; let ident = self.parse_ident()?; self.bump(); let ty = self.parse_ty()?; @@ -4210,7 +4181,7 @@ impl<'a> Parser<'a> { id: ast::DUMMY_NODE_ID, ident: ident, ty: ty, - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), }); seen_binding = true; } else if self.check_type() { @@ -4267,7 +4238,7 @@ impl<'a> Parser<'a> { } loop { - let lo = self.span.lo; + let lo = self.span; if self.check_lifetime() && self.look_ahead(1, |t| t != &token::BinOp(token::Plus)) { let lifetime = self.expect_lifetime(); // Bounds starting with a colon are mandatory, but possibly empty. @@ -4275,7 +4246,7 @@ impl<'a> Parser<'a> { let bounds = self.parse_lt_param_bounds(); where_clause.predicates.push(ast::WherePredicate::RegionPredicate( ast::WhereRegionPredicate { - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), lifetime: lifetime, bounds: bounds, } @@ -4296,7 +4267,7 @@ impl<'a> Parser<'a> { let bounds = self.parse_ty_param_bounds()?; where_clause.predicates.push(ast::WherePredicate::BoundPredicate( ast::WhereBoundPredicate { - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), bound_lifetimes: lifetime_defs, bounded_ty: ty, bounds: bounds, @@ -4307,7 +4278,7 @@ impl<'a> Parser<'a> { let rhs_ty = self.parse_ty()?; where_clause.predicates.push(ast::WherePredicate::EqPredicate( ast::WhereEqPredicate { - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), lhs_ty: ty, rhs_ty: rhs_ty, id: ast::DUMMY_NODE_ID, @@ -4404,7 +4375,7 @@ impl<'a> Parser<'a> { // Parse optional self parameter of a method. // Only a limited set of initial token sequences is considered self parameters, anything // else is parsed as a normal function parameter list, so some lookahead is required. - let eself_lo = self.span.lo; + let eself_lo = self.span; let (eself, eself_ident) = match self.token { token::BinOp(token::And) => { // &self @@ -4486,7 +4457,7 @@ impl<'a> Parser<'a> { _ => return Ok(None), }; - let eself = codemap::respan(mk_sp(eself_lo, self.prev_span.hi), eself); + let eself = codemap::respan(eself_lo.to(self.prev_span), eself); Ok(Some(Arg::from_self(eself, eself_ident))) } @@ -4558,8 +4529,7 @@ impl<'a> Parser<'a> { Ok((id, generics)) } - fn mk_item(&mut self, lo: BytePos, hi: BytePos, ident: Ident, - node: ItemKind, vis: Visibility, + fn mk_item(&mut self, span: Span, ident: Ident, node: ItemKind, vis: Visibility, attrs: Vec) -> P { P(Item { ident: ident, @@ -4567,7 +4537,7 @@ impl<'a> Parser<'a> { id: ast::DUMMY_NODE_ID, node: node, vis: vis, - span: mk_sp(lo, hi) + span: span, }) } @@ -4625,7 +4595,7 @@ impl<'a> Parser<'a> { maybe_whole!(self, NtImplItem, |x| x); let mut attrs = self.parse_outer_attributes()?; - let lo = self.span.lo; + let lo = self.span; let vis = self.parse_visibility(false)?; let defaultness = self.parse_defaultness()?; let (name, node) = if self.eat_keyword(keywords::Type) { @@ -4651,7 +4621,7 @@ impl<'a> Parser<'a> { Ok(ImplItem { id: ast::DUMMY_NODE_ID, - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), ident: name, vis: vis, defaultness: defaultness, @@ -4694,7 +4664,7 @@ impl<'a> Parser<'a> { let prev_span = self.prev_span; self.complain_if_pub_macro(&vis, prev_span); - let lo = self.span.lo; + let lo = self.span; let pth = self.parse_path(PathStyle::Mod)?; self.expect(&token::Not)?; @@ -4704,7 +4674,7 @@ impl<'a> Parser<'a> { self.expect(&token::Semi)? } - let mac = spanned(lo, self.prev_span.hi, Mac_ { path: pth, tts: tts }); + let mac = respan(lo.to(self.prev_span), Mac_ { path: pth, tts: tts }); Ok((keywords::Invalid.ident(), vec![], ast::ImplItemKind::Macro(mac))) } else { let (constness, unsafety, abi) = self.parse_fn_front_matter()?; @@ -4938,11 +4908,11 @@ impl<'a> Parser<'a> { SeqSep::trailing_allowed(token::Comma), |p| { let attrs = p.parse_outer_attributes()?; - let lo = p.span.lo; + let lo = p.span; let vis = p.parse_visibility(true)?; let ty = p.parse_ty()?; Ok(StructField { - span: mk_sp(lo, p.span.hi), + span: lo.to(p.span), vis: vis, ident: None, id: ast::DUMMY_NODE_ID, @@ -4956,7 +4926,7 @@ impl<'a> Parser<'a> { /// Parse a structure field declaration pub fn parse_single_struct_field(&mut self, - lo: BytePos, + lo: Span, vis: Visibility, attrs: Vec ) -> PResult<'a, StructField> { @@ -4978,7 +4948,7 @@ impl<'a> Parser<'a> { /// Parse an element of a struct definition fn parse_struct_decl_field(&mut self) -> PResult<'a, StructField> { let attrs = self.parse_outer_attributes()?; - let lo = self.span.lo; + let lo = self.span; let vis = self.parse_visibility(false)?; self.parse_single_struct_field(lo, vis, attrs) } @@ -5056,7 +5026,7 @@ impl<'a> Parser<'a> { } /// Given a termination token, parse all of the items in a module - fn parse_mod_items(&mut self, term: &token::Token, inner_lo: BytePos) -> PResult<'a, Mod> { + fn parse_mod_items(&mut self, term: &token::Token, inner_lo: Span) -> PResult<'a, Mod> { let mut items = vec![]; while let Some(item) = self.parse_item()? { items.push(item); @@ -5070,11 +5040,11 @@ impl<'a> Parser<'a> { let hi = if self.span == syntax_pos::DUMMY_SP { inner_lo } else { - self.prev_span.hi + self.prev_span }; Ok(ast::Mod { - inner: mk_sp(inner_lo, hi), + inner: inner_lo.to(hi), items: items }) } @@ -5137,7 +5107,7 @@ impl<'a> Parser<'a> { let old_directory = self.directory.clone(); self.push_directory(id, &outer_attrs); self.expect(&token::OpenDelim(token::Brace))?; - let mod_inner_lo = self.span.lo; + let mod_inner_lo = self.span; let attrs = self.parse_inner_attributes()?; let module = self.parse_mod_items(&token::CloseDelim(token::Brace), mod_inner_lo)?; self.directory = old_directory; @@ -5280,7 +5250,7 @@ impl<'a> Parser<'a> { let mut p0 = new_sub_parser_from_file(self.sess, &path, directory_ownership, Some(name), id_sp); p0.cfg_mods = self.cfg_mods; - let mod_inner_lo = p0.span.lo; + let mod_inner_lo = p0.span; let mod_attrs = p0.parse_inner_attributes()?; let m0 = p0.parse_mod_items(&token::Eof, mod_inner_lo)?; self.sess.included_mod_stack.borrow_mut().pop(); @@ -5288,42 +5258,42 @@ impl<'a> Parser<'a> { } /// Parse a function declaration from a foreign module - fn parse_item_foreign_fn(&mut self, vis: ast::Visibility, lo: BytePos, - attrs: Vec) -> PResult<'a, ForeignItem> { + fn parse_item_foreign_fn(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec) + -> PResult<'a, ForeignItem> { self.expect_keyword(keywords::Fn)?; let (ident, mut generics) = self.parse_fn_header()?; let decl = self.parse_fn_decl(true)?; generics.where_clause = self.parse_where_clause()?; - let hi = self.span.hi; + let hi = self.span; self.expect(&token::Semi)?; Ok(ast::ForeignItem { ident: ident, attrs: attrs, node: ForeignItemKind::Fn(decl, generics), id: ast::DUMMY_NODE_ID, - span: mk_sp(lo, hi), + span: lo.to(hi), vis: vis }) } /// Parse a static item from a foreign module - fn parse_item_foreign_static(&mut self, vis: ast::Visibility, lo: BytePos, - attrs: Vec) -> PResult<'a, ForeignItem> { + fn parse_item_foreign_static(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec) + -> PResult<'a, ForeignItem> { self.expect_keyword(keywords::Static)?; let mutbl = self.eat_keyword(keywords::Mut); let ident = self.parse_ident()?; self.expect(&token::Colon)?; let ty = self.parse_ty()?; - let hi = self.span.hi; + let hi = self.span; self.expect(&token::Semi)?; Ok(ForeignItem { ident: ident, attrs: attrs, node: ForeignItemKind::Static(ty, mutbl), id: ast::DUMMY_NODE_ID, - span: mk_sp(lo, hi), + span: lo.to(hi), vis: vis }) } @@ -5335,7 +5305,7 @@ impl<'a> Parser<'a> { /// extern crate foo; /// extern crate bar as foo; fn parse_item_extern_crate(&mut self, - lo: BytePos, + lo: Span, visibility: Visibility, attrs: Vec) -> PResult<'a, P> { @@ -5349,8 +5319,7 @@ impl<'a> Parser<'a> { self.expect(&token::Semi)?; let prev_span = self.prev_span; - Ok(self.mk_item(lo, - prev_span.hi, + Ok(self.mk_item(lo.to(prev_span), ident, ItemKind::ExternCrate(maybe_path), visibility, @@ -5368,7 +5337,7 @@ impl<'a> Parser<'a> { /// extern "C" {} /// extern {} fn parse_item_foreign_mod(&mut self, - lo: BytePos, + lo: Span, opt_abi: Option, visibility: Visibility, mut attrs: Vec) @@ -5390,12 +5359,8 @@ impl<'a> Parser<'a> { abi: abi, items: foreign_items }; - Ok(self.mk_item(lo, - prev_span.hi, - keywords::Invalid.ident(), - ItemKind::ForeignMod(m), - visibility, - attrs)) + let invalid = keywords::Invalid.ident(); + Ok(self.mk_item(lo.to(prev_span), invalid, ItemKind::ForeignMod(m), visibility, attrs)) } /// Parse type Foo = Bar; @@ -5416,7 +5381,7 @@ impl<'a> Parser<'a> { let mut any_disr = None; while self.token != token::CloseDelim(token::Brace) { let variant_attrs = self.parse_outer_attributes()?; - let vlo = self.span.lo; + let vlo = self.span; let struct_def; let mut disr_expr = None; @@ -5444,7 +5409,7 @@ impl<'a> Parser<'a> { data: struct_def, disr_expr: disr_expr, }; - variants.push(spanned(vlo, self.prev_span.hi, vr)); + variants.push(respan(vlo.to(self.prev_span), vr)); if !self.eat(&token::Comma) { break; } } @@ -5514,7 +5479,7 @@ impl<'a> Parser<'a> { Some(P(item)) }); - let lo = self.span.lo; + let lo = self.span; let visibility = self.parse_visibility(false)?; @@ -5524,12 +5489,8 @@ impl<'a> Parser<'a> { self.expect(&token::Semi)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, - keywords::Invalid.ident(), - item_, - visibility, - attrs); + let invalid = keywords::Invalid.ident(); + let item = self.mk_item(lo.to(prev_span), invalid, item_, visibility, attrs); return Ok(Some(item)); } @@ -5549,8 +5510,7 @@ impl<'a> Parser<'a> { respan(fn_span, Constness::NotConst), abi)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5572,8 +5532,7 @@ impl<'a> Parser<'a> { }; let (ident, item_, extra_attrs) = self.parse_item_const(Some(m))?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5597,8 +5556,7 @@ impl<'a> Parser<'a> { respan(const_span, Constness::Const), Abi::Rust)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5615,8 +5573,7 @@ impl<'a> Parser<'a> { } let (ident, item_, extra_attrs) = self.parse_item_const(None)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5632,8 +5589,7 @@ impl<'a> Parser<'a> { let (ident, item_, extra_attrs) = self.parse_item_trait(ast::Unsafety::Unsafe)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5648,8 +5604,7 @@ impl<'a> Parser<'a> { self.expect_keyword(keywords::Impl)?; let (ident, item_, extra_attrs) = self.parse_item_impl(ast::Unsafety::Unsafe)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5665,8 +5620,7 @@ impl<'a> Parser<'a> { respan(fn_span, Constness::NotConst), Abi::Rust)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5689,8 +5643,7 @@ impl<'a> Parser<'a> { respan(fn_span, Constness::NotConst), abi)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5702,8 +5655,7 @@ impl<'a> Parser<'a> { let (ident, item_, extra_attrs) = self.parse_item_mod(&attrs[..])?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5714,8 +5666,7 @@ impl<'a> Parser<'a> { // TYPE ITEM let (ident, item_, extra_attrs) = self.parse_item_type()?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5726,8 +5677,7 @@ impl<'a> Parser<'a> { // ENUM ITEM let (ident, item_, extra_attrs) = self.parse_item_enum()?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5739,8 +5689,7 @@ impl<'a> Parser<'a> { let (ident, item_, extra_attrs) = self.parse_item_trait(ast::Unsafety::Normal)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5751,8 +5700,7 @@ impl<'a> Parser<'a> { // IMPL ITEM let (ident, item_, extra_attrs) = self.parse_item_impl(ast::Unsafety::Normal)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5763,8 +5711,7 @@ impl<'a> Parser<'a> { // STRUCT ITEM let (ident, item_, extra_attrs) = self.parse_item_struct()?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5776,8 +5723,7 @@ impl<'a> Parser<'a> { self.bump(); let (ident, item_, extra_attrs) = self.parse_item_union()?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5794,7 +5740,7 @@ impl<'a> Parser<'a> { /// Parse a foreign item. fn parse_foreign_item(&mut self) -> PResult<'a, Option> { let attrs = self.parse_outer_attributes()?; - let lo = self.span.lo; + let lo = self.span; let visibility = self.parse_visibility(false)?; if self.check_keyword(keywords::Static) { @@ -5821,7 +5767,7 @@ impl<'a> Parser<'a> { attrs: Vec , macros_allowed: bool, attributes_allowed: bool, - lo: BytePos, + lo: Span, visibility: Visibility ) -> PResult<'a, Option>> { if macros_allowed && self.token.is_path_start() { @@ -5830,7 +5776,7 @@ impl<'a> Parser<'a> { let prev_span = self.prev_span; self.complain_if_pub_macro(&visibility, prev_span); - let mac_lo = self.span.lo; + let mac_lo = self.span; // item macro. let pth = self.parse_path(PathStyle::Mod)?; @@ -5856,9 +5802,9 @@ impl<'a> Parser<'a> { } } - let hi = self.prev_span.hi; - let mac = spanned(mac_lo, hi, Mac_ { path: pth, tts: tts }); - let item = self.mk_item(lo, hi, id, ItemKind::Mac(mac), visibility, attrs); + let hi = self.prev_span; + let mac = respan(mac_lo.to(hi), Mac_ { path: pth, tts: tts }); + let item = self.mk_item(lo.to(hi), id, ItemKind::Mac(mac), visibility, attrs); return Ok(Some(item)); } @@ -5886,7 +5832,7 @@ impl<'a> Parser<'a> { self.parse_unspanned_seq(&token::OpenDelim(token::Brace), &token::CloseDelim(token::Brace), SeqSep::trailing_allowed(token::Comma), |this| { - let lo = this.span.lo; + let lo = this.span; let ident = if this.eat_keyword(keywords::SelfValue) { keywords::SelfValue.ident() } else { @@ -5898,8 +5844,7 @@ impl<'a> Parser<'a> { rename: rename, id: ast::DUMMY_NODE_ID }; - let hi = this.prev_span.hi; - Ok(spanned(lo, hi, node)) + Ok(respan(lo.to(this.prev_span), node)) }) } @@ -5917,21 +5862,21 @@ impl<'a> Parser<'a> { /// MOD_SEP? non_global_path MOD_SEP LBRACE item_seq RBRACE /// MOD_SEP? LBRACE item_seq RBRACE fn parse_view_path(&mut self) -> PResult<'a, P> { - let lo = self.span.lo; + let lo = self.span; if self.check(&token::OpenDelim(token::Brace)) || self.check(&token::BinOp(token::Star)) || self.is_import_coupler() { // `{foo, bar}`, `::{foo, bar}`, `*`, or `::*`. self.eat(&token::ModSep); let prefix = ast::Path { segments: vec![PathSegment::crate_root()], - span: mk_sp(lo, self.span.hi), + span: lo.to(self.span), }; let view_path_kind = if self.eat(&token::BinOp(token::Star)) { ViewPathGlob(prefix) } else { ViewPathList(prefix, self.parse_path_list_items()?) }; - Ok(P(spanned(lo, self.span.hi, view_path_kind))) + Ok(P(respan(lo.to(self.span), view_path_kind))) } else { let prefix = self.parse_path(PathStyle::Mod)?.default_to_global(); if self.is_import_coupler() { @@ -5939,16 +5884,16 @@ impl<'a> Parser<'a> { self.bump(); if self.check(&token::BinOp(token::Star)) { self.bump(); - Ok(P(spanned(lo, self.span.hi, ViewPathGlob(prefix)))) + Ok(P(respan(lo.to(self.span), ViewPathGlob(prefix)))) } else { let items = self.parse_path_list_items()?; - Ok(P(spanned(lo, self.span.hi, ViewPathList(prefix, items)))) + Ok(P(respan(lo.to(self.span), ViewPathList(prefix, items)))) } } else { // `foo::bar` or `foo::bar as baz` let rename = self.parse_rename()?. unwrap_or(prefix.segments.last().unwrap().identifier); - Ok(P(spanned(lo, self.prev_span.hi, ViewPathSimple(rename, prefix)))) + Ok(P(respan(lo.to(self.prev_span), ViewPathSimple(rename, prefix)))) } } } @@ -5964,11 +5909,11 @@ impl<'a> Parser<'a> { /// Parses a source module as a crate. This is the main /// entry point for the parser. pub fn parse_crate_mod(&mut self) -> PResult<'a, Crate> { - let lo = self.span.lo; + let lo = self.span; Ok(ast::Crate { attrs: self.parse_inner_attributes()?, module: self.parse_mod_items(&token::Eof, lo)?, - span: mk_sp(lo, self.span.lo), + span: lo.to(self.span), }) } diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs index aeb5b1e0a532b..6f5ab50b2fe6f 100644 --- a/src/libsyntax_ext/format.rs +++ b/src/libsyntax_ext/format.rs @@ -559,11 +559,7 @@ impl<'a, 'b> Context<'a, 'b> { let name = self.ecx.ident_of(&format!("__arg{}", i)); pats.push(self.ecx.pat_ident(DUMMY_SP, name)); for ref arg_ty in self.arg_unique_types[i].iter() { - locals.push(Context::format_arg(self.ecx, - self.macsp, - e.span, - arg_ty, - self.ecx.expr_ident(e.span, name))); + locals.push(Context::format_arg(self.ecx, self.macsp, e.span, arg_ty, name)); } heads.push(self.ecx.expr_addr_of(e.span, e)); } @@ -576,11 +572,7 @@ impl<'a, 'b> Context<'a, 'b> { Exact(i) => spans_pos[i], _ => panic!("should never happen"), }; - counts.push(Context::format_arg(self.ecx, - self.macsp, - span, - &Count, - self.ecx.expr_ident(span, name))); + counts.push(Context::format_arg(self.ecx, self.macsp, span, &Count, name)); } // Now create a vector containing all the arguments @@ -643,9 +635,10 @@ impl<'a, 'b> Context<'a, 'b> { macsp: Span, mut sp: Span, ty: &ArgumentType, - arg: P) + arg: ast::Ident) -> P { sp.ctxt = sp.ctxt.apply_mark(ecx.current_expansion.mark); + let arg = ecx.expr_ident(sp, arg); let trait_ = match *ty { Placeholder(ref tyname) => { match &tyname[..] { diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index 9b45e364ecf34..947192a0a23e0 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -174,6 +174,15 @@ impl Span { } result } + + pub fn to(self, end: Span) -> Span { + // FIXME(jseyfried): self.ctxt should always equal end.ctxt here (c.f. issue #23480) + if end.ctxt == SyntaxContext::empty() { + Span { lo: self.lo, ..end } + } else { + Span { hi: end.hi, ..self } + } + } } #[derive(Clone, Debug)] @@ -208,7 +217,7 @@ impl serialize::UseSpecializedDecodable for Span { d.read_struct("Span", 2, |d| { let lo = d.read_struct_field("lo", 0, Decodable::decode)?; let hi = d.read_struct_field("hi", 1, Decodable::decode)?; - Ok(mk_sp(lo, hi)) + Ok(Span { lo: lo, hi: hi, ctxt: NO_EXPANSION }) }) } } @@ -696,11 +705,6 @@ pub struct FileLines { thread_local!(pub static SPAN_DEBUG: Cell fmt::Result> = Cell::new(default_span_debug)); -/* assuming that we're not in macro expansion */ -pub fn mk_sp(lo: BytePos, hi: BytePos) -> Span { - Span {lo: lo, hi: hi, ctxt: NO_EXPANSION} -} - pub struct MacroBacktrace { /// span where macro was applied to generate this code pub call_site: Span, diff --git a/src/test/compile-fail/imports/macro-paths.rs b/src/test/compile-fail/imports/macro-paths.rs index 48e7ca0eee454..7c19917acc476 100644 --- a/src/test/compile-fail/imports/macro-paths.rs +++ b/src/test/compile-fail/imports/macro-paths.rs @@ -25,7 +25,6 @@ fn f() { bar::m! { //~ ERROR ambiguous //~| NOTE macro-expanded items do not shadow when used in a macro invocation path mod bar { pub use two_macros::m; } //~ NOTE could refer to the name defined here - //~^^^ NOTE in this expansion } } @@ -37,6 +36,5 @@ fn g() { baz::m! { //~ ERROR ambiguous //~| NOTE macro-expanded items do not shadow when used in a macro invocation path mod baz { pub use two_macros::m; } //~ NOTE could refer to the name defined here - //~^^^ NOTE in this expansion } } diff --git a/src/test/compile-fail/imports/macros.rs b/src/test/compile-fail/imports/macros.rs index cfa7681dc2277..06b0964a3b145 100644 --- a/src/test/compile-fail/imports/macros.rs +++ b/src/test/compile-fail/imports/macros.rs @@ -28,7 +28,6 @@ mod m2 { m! { //~ ERROR ambiguous //~| NOTE macro-expanded macro imports do not shadow use foo::m; //~ NOTE could refer to the name imported here - //~^^^ NOTE in this expansion } } @@ -43,7 +42,6 @@ mod m3 { m! { //~ ERROR ambiguous //~| NOTE macro-expanded macro imports do not shadow use two_macros::n as m; //~ NOTE could refer to the name imported here - //~^^^ NOTE in this expansion } } } diff --git a/src/test/compile-fail/imports/shadow_builtin_macros.rs b/src/test/compile-fail/imports/shadow_builtin_macros.rs index 2b3ba1b4aa7a7..a7f1cf3c9d3ef 100644 --- a/src/test/compile-fail/imports/shadow_builtin_macros.rs +++ b/src/test/compile-fail/imports/shadow_builtin_macros.rs @@ -31,7 +31,6 @@ mod m2 { mod m3 { ::two_macros::m!(use foo::panic;); //~ NOTE `panic` could refer to the name imported here - //~| NOTE in this expansion fn f() { panic!(); } //~ ERROR ambiguous //~| NOTE `panic` is also a builtin macro //~| NOTE macro-expanded macro imports do not shadow diff --git a/src/test/compile-fail/issue-25385.rs b/src/test/compile-fail/issue-25385.rs index 51d7baaf3e915..4aacb6840e9d5 100644 --- a/src/test/compile-fail/issue-25385.rs +++ b/src/test/compile-fail/issue-25385.rs @@ -21,5 +21,4 @@ fn main() { foo!(1i32.foo()); //~^ ERROR no method named `foo` found for type `i32` in the current scope - //~^^ NOTE in this expansion of foo! } diff --git a/src/test/run-pass/syntax-extension-source-utils.rs b/src/test/run-pass/syntax-extension-source-utils.rs index 3b5f033d07b7d..25c7417f7eb21 100644 --- a/src/test/run-pass/syntax-extension-source-utils.rs +++ b/src/test/run-pass/syntax-extension-source-utils.rs @@ -22,7 +22,7 @@ macro_rules! indirect_line { () => ( line!() ) } pub fn main() { assert_eq!(line!(), 24); - assert_eq!(column!(), 4); + assert_eq!(column!(), 15); assert_eq!(indirect_line!(), 26); assert!((file!().ends_with("syntax-extension-source-utils.rs"))); assert_eq!(stringify!((2*3) + 5).to_string(), "( 2 * 3 ) + 5".to_string()); diff --git a/src/test/ui/macros/macro_path_as_generic_bound.stderr b/src/test/ui/macros/macro_path_as_generic_bound.stderr index 96635032105a8..e4044f5aaf2be 100644 --- a/src/test/ui/macros/macro_path_as_generic_bound.stderr +++ b/src/test/ui/macros/macro_path_as_generic_bound.stderr @@ -2,10 +2,7 @@ error[E0433]: failed to resolve. Use of undeclared type or module `m` --> $DIR/macro_path_as_generic_bound.rs:17:6 | 17 | foo!(m::m2::A); - | -----^^^^^^^^-- - | | | - | | Use of undeclared type or module `m` - | in this macro invocation + | ^^^^^^^^ Use of undeclared type or module `m` error: cannot continue compilation due to previous error From 8fde04b4a295792249d4a01f87a9f66143aa7c83 Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Wed, 29 Mar 2017 07:17:18 +0000 Subject: [PATCH 5/5] Improve `Path` spans. --- src/libsyntax/attr.rs | 7 ++-- src/libsyntax/ext/base.rs | 21 ++++++++++- src/libsyntax/ext/tt/macro_parser.rs | 2 +- src/libsyntax/ext/tt/macro_rules.rs | 2 +- src/libsyntax/ext/tt/quoted.rs | 13 ++++--- src/libsyntax/ext/tt/transcribe.rs | 9 +---- src/libsyntax/parse/mod.rs | 4 +- src/libsyntax/parse/parser.rs | 56 +++++++++++++++++----------- src/libsyntax/parse/token.rs | 48 ++++++++++++------------ 9 files changed, 95 insertions(+), 67 deletions(-) diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 5dcce2572af87..6f5f52ff1e953 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -1015,9 +1015,10 @@ impl MetaItem { { let (mut span, name) = match tokens.next() { Some(TokenTree::Token(span, Token::Ident(ident))) => (span, ident.name), - Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => return match **nt { - token::Nonterminal::NtMeta(ref meta) => Some(meta.clone()), - _ => None, + Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => match **nt { + token::Nonterminal::NtIdent(ident) => (ident.span, ident.node.name), + token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()), + _ => return None, }, _ => return None, }; diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index a2d54b62ec65d..fda026fec64ef 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -209,7 +209,26 @@ impl TTMacroExpander for F { fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, input: TokenStream) -> Box { - (*self)(ecx, span, &input.trees().collect::>()) + struct AvoidInterpolatedIdents; + + impl Folder for AvoidInterpolatedIdents { + fn fold_tt(&mut self, tt: tokenstream::TokenTree) -> tokenstream::TokenTree { + if let tokenstream::TokenTree::Token(_, token::Interpolated(ref nt)) = tt { + if let token::NtIdent(ident) = **nt { + return tokenstream::TokenTree::Token(ident.span, token::Ident(ident.node)); + } + } + fold::noop_fold_tt(tt, self) + } + + fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { + fold::noop_fold_mac(mac, self) + } + } + + let input: Vec<_> = + input.trees().map(|tt| AvoidInterpolatedIdents.fold_tt(tt)).collect(); + (*self)(ecx, span, &input) } } diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 9ee427eed3556..6cd1fea2e75e2 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -492,7 +492,7 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal { _ => {} } // check at the beginning and the parser checks after each bump - p.check_unknown_macro_variable(); + p.process_potential_macro_variable(); match name { "item" => match panictry!(p.parse_item()) { Some(i) => token::NtItem(i), diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 66f5520b88263..93348c8f08376 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -121,7 +121,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, p.root_module_name = cx.current_expansion.module.mod_path.last() .map(|id| id.name.as_str().to_string()); - p.check_unknown_macro_variable(); + p.process_potential_macro_variable(); // Let the context choose how to interpret the result. // Weird, but useful for X-macros. return Box::new(ParserAnyMacro { diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index 12e746e024d3b..d216effbd4508 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -136,11 +136,14 @@ pub fn parse(input: tokenstream::TokenStream, expect_matchers: bool, sess: &Pars TokenTree::Token(start_sp, token::SubstNt(ident)) if expect_matchers => { let span = match trees.next() { Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() { - Some(tokenstream::TokenTree::Token(end_sp, token::Ident(kind))) => { - let span = Span { lo: start_sp.lo, ..end_sp }; - result.push(TokenTree::MetaVarDecl(span, ident, kind)); - continue - } + Some(tokenstream::TokenTree::Token(end_sp, ref tok)) => match tok.ident() { + Some(kind) => { + let span = Span { lo: start_sp.lo, ..end_sp }; + result.push(TokenTree::MetaVarDecl(span, ident, kind)); + continue + } + _ => end_sp, + }, tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span), }, tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp), diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 24004492be2a0..947089b0b9ac4 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -12,7 +12,7 @@ use ast::Ident; use errors::Handler; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; use ext::tt::quoted; -use parse::token::{self, SubstNt, Token, NtIdent, NtTT}; +use parse::token::{self, SubstNt, Token, NtTT}; use syntax_pos::{Span, DUMMY_SP}; use tokenstream::{TokenStream, TokenTree, Delimited}; use util::small_vector::SmallVector; @@ -154,13 +154,6 @@ pub fn transcribe(sp_diag: &Handler, None => result.push(TokenTree::Token(sp, SubstNt(ident)).into()), Some(cur_matched) => if let MatchedNonterminal(ref nt) = *cur_matched { match **nt { - // sidestep the interpolation tricks for ident because - // (a) idents can be in lots of places, so it'd be a pain - // (b) we actually can, since it's a token. - NtIdent(ref sn) => { - let token = TokenTree::Token(sn.span, token::Ident(sn.node)); - result.push(token.into()); - } NtTT(ref tt) => result.push(tt.clone().into()), _ => { let token = TokenTree::Token(sp, token::Interpolated(nt.clone())); diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index b5d0a46de4926..c63a6524f7459 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -218,9 +218,7 @@ pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc) -> TokenStream /// Given stream and the ParseSess, produce a parser pub fn stream_to_parser<'a>(sess: &'a ParseSess, stream: TokenStream) -> Parser<'a> { - let mut p = Parser::new(sess, stream, None, false); - p.check_unknown_macro_variable(); - p + Parser::new(sess, stream, None, false) } /// Parse a string representing a character literal into its final form. diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index b0611d7529062..db2878c6b1e72 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -160,6 +160,7 @@ pub struct Parser<'a> { /// the span of the current token: pub span: Span, /// the span of the previous token: + pub meta_var_span: Option, pub prev_span: Span, /// the previous token kind prev_token_kind: PrevTokenKind, @@ -417,6 +418,7 @@ impl<'a> Parser<'a> { token: token::Underscore, span: syntax_pos::DUMMY_SP, prev_span: syntax_pos::DUMMY_SP, + meta_var_span: None, prev_token_kind: PrevTokenKind::Other, restrictions: Restrictions::empty(), obsolete_set: HashSet::new(), @@ -443,6 +445,7 @@ impl<'a> Parser<'a> { parser.directory.path = PathBuf::from(sess.codemap().span_to_filename(parser.span)); parser.directory.path.pop(); } + parser.process_potential_macro_variable(); parser } @@ -1012,7 +1015,7 @@ impl<'a> Parser<'a> { self.bug("attempted to bump the parser past EOF (may be stuck in a loop)"); } - self.prev_span = self.span; + self.prev_span = self.meta_var_span.take().unwrap_or(self.span); // Record last token kind for possible error recovery. self.prev_token_kind = match self.token { @@ -1028,7 +1031,7 @@ impl<'a> Parser<'a> { self.token = next.tok; self.expected_tokens.clear(); // check after each token - self.check_unknown_macro_variable(); + self.process_potential_macro_variable(); } /// Advance the parser using provided token as a next one. Use this when @@ -1722,7 +1725,7 @@ impl<'a> Parser<'a> { pub fn parse_path(&mut self, mode: PathStyle) -> PResult<'a, ast::Path> { maybe_whole!(self, NtPath, |x| x); - let lo = self.span; + let lo = self.meta_var_span.unwrap_or(self.span); let is_global = self.eat(&token::ModSep); // Parse any number of segments and bound sets. A segment is an @@ -1744,13 +1747,9 @@ impl<'a> Parser<'a> { segments.insert(0, PathSegment::crate_root()); } - // Assemble the span. - // FIXME(#39450) This is bogus if part of the path is macro generated. - let span = lo.to(self.prev_span); - // Assemble the result. Ok(ast::Path { - span: span, + span: lo.to(self.prev_span), segments: segments, }) } @@ -1763,8 +1762,8 @@ impl<'a> Parser<'a> { let mut segments = Vec::new(); loop { // First, parse an identifier. + let ident_span = self.span; let identifier = self.parse_path_segment_ident()?; - let ident_span = self.prev_span; if self.check(&token::ModSep) && self.look_ahead(1, |t| *t == token::Lt) { self.bump(); @@ -1831,8 +1830,8 @@ impl<'a> Parser<'a> { let mut segments = Vec::new(); loop { // First, parse an identifier. + let ident_span = self.span; let identifier = self.parse_path_segment_ident()?; - let ident_span = self.prev_span; // If we do not see a `::`, stop. if !self.eat(&token::ModSep) { @@ -1873,10 +1872,11 @@ impl<'a> Parser<'a> { let mut segments = Vec::new(); loop { // First, parse an identifier. + let ident_span = self.span; let identifier = self.parse_path_segment_ident()?; // Assemble and push the result. - segments.push(PathSegment::from_ident(identifier, self.prev_span)); + segments.push(PathSegment::from_ident(identifier, ident_span)); // If we do not see a `::` or see `::{`/`::*`, stop. if !self.check(&token::ModSep) || self.is_import_coupler() { @@ -1896,8 +1896,9 @@ impl<'a> Parser<'a> { fn expect_lifetime(&mut self) -> Lifetime { match self.token { token::Lifetime(ident) => { + let ident_span = self.span; self.bump(); - Lifetime { name: ident.name, span: self.prev_span, id: ast::DUMMY_NODE_ID } + Lifetime { name: ident.name, span: ident_span, id: ast::DUMMY_NODE_ID } } _ => self.span_bug(self.span, "not a lifetime") } @@ -2568,10 +2569,23 @@ impl<'a> Parser<'a> { return Ok(e); } - pub fn check_unknown_macro_variable(&mut self) { - if let token::SubstNt(name) = self.token { - self.fatal(&format!("unknown macro variable `{}`", name)).emit() - } + pub fn process_potential_macro_variable(&mut self) { + let ident = match self.token { + token::SubstNt(name) => { + self.fatal(&format!("unknown macro variable `{}`", name)).emit(); + return + } + token::Interpolated(ref nt) => { + self.meta_var_span = Some(self.span); + match **nt { + token::NtIdent(ident) => ident, + _ => return, + } + } + _ => return, + }; + self.token = token::Ident(ident.node); + self.span = ident.span; } /// parse a single token tree from the input. @@ -2589,9 +2603,9 @@ impl<'a> Parser<'a> { }, token::CloseDelim(_) | token::Eof => unreachable!(), _ => { - let token = mem::replace(&mut self.token, token::Underscore); + let (token, span) = (mem::replace(&mut self.token, token::Underscore), self.span); self.bump(); - TokenTree::Token(self.prev_span, token) + TokenTree::Token(span, token) } } } @@ -3489,9 +3503,9 @@ impl<'a> Parser<'a> { fn parse_pat_ident(&mut self, binding_mode: ast::BindingMode) -> PResult<'a, PatKind> { + let ident_span = self.span; let ident = self.parse_ident()?; - let prev_span = self.prev_span; - let name = codemap::Spanned{span: prev_span, node: ident}; + let name = codemap::Spanned{span: ident_span, node: ident}; let sub = if self.eat(&token::At) { Some(self.parse_pat()?) } else { @@ -4364,7 +4378,7 @@ impl<'a> Parser<'a> { fn parse_self_arg(&mut self) -> PResult<'a, Option> { let expect_ident = |this: &mut Self| match this.token { // Preserve hygienic context. - token::Ident(ident) => { this.bump(); codemap::respan(this.prev_span, ident) } + token::Ident(ident) => { let sp = this.span; this.bump(); codemap::respan(sp, ident) } _ => unreachable!() }; let isolated_self = |this: &mut Self, n| { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 519d5bd98e47f..74aa3984a9a42 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -211,9 +211,7 @@ impl Token { ModSep => true, // global path Pound => true, // expression attributes Interpolated(ref nt) => match **nt { - NtExpr(..) => true, - NtBlock(..) => true, - NtPath(..) => true, + NtIdent(..) | NtExpr(..) | NtBlock(..) | NtPath(..) => true, _ => false, }, _ => false, @@ -236,8 +234,7 @@ impl Token { Lt | BinOp(Shl) => true, // associated path ModSep => true, // global path Interpolated(ref nt) => match **nt { - NtTy(..) => true, - NtPath(..) => true, + NtIdent(..) | NtTy(..) | NtPath(..) => true, _ => false, }, _ => false, @@ -252,14 +249,22 @@ impl Token { } } - /// Returns `true` if the token is an identifier. - pub fn is_ident(&self) -> bool { + pub fn ident(&self) -> Option { match *self { - Ident(..) => true, - _ => false, + Ident(ident) => Some(ident), + Interpolated(ref nt) => match **nt { + NtIdent(ident) => Some(ident.node), + _ => None, + }, + _ => None, } } + /// Returns `true` if the token is an identifier. + pub fn is_ident(&self) -> bool { + self.ident().is_some() + } + /// Returns `true` if the token is a documentation comment. pub fn is_doc_comment(&self) -> bool { match *self { @@ -311,18 +316,15 @@ impl Token { /// Returns `true` if the token is a given keyword, `kw`. pub fn is_keyword(&self, kw: keywords::Keyword) -> bool { - match *self { - Ident(id) => id.name == kw.name(), - _ => false, - } + self.ident().map(|ident| ident.name == kw.name()).unwrap_or(false) } pub fn is_path_segment_keyword(&self) -> bool { - match *self { - Ident(id) => id.name == keywords::Super.name() || - id.name == keywords::SelfValue.name() || - id.name == keywords::SelfType.name(), - _ => false, + match self.ident() { + Some(id) => id.name == keywords::Super.name() || + id.name == keywords::SelfValue.name() || + id.name == keywords::SelfType.name(), + None => false, } } @@ -333,18 +335,16 @@ impl Token { /// Returns `true` if the token is a strict keyword. pub fn is_strict_keyword(&self) -> bool { - match *self { - Ident(id) => id.name >= keywords::As.name() && - id.name <= keywords::While.name(), + match self.ident() { + Some(id) => id.name >= keywords::As.name() && id.name <= keywords::While.name(), _ => false, } } /// Returns `true` if the token is a keyword reserved for possible future use. pub fn is_reserved_keyword(&self) -> bool { - match *self { - Ident(id) => id.name >= keywords::Abstract.name() && - id.name <= keywords::Yield.name(), + match self.ident() { + Some(id) => id.name >= keywords::Abstract.name() && id.name <= keywords::Yield.name(), _ => false, } }