Skip to content

Commit

Permalink
Rollup merge of #76057 - matklad:remove-retokenize, r=petrochenkov
Browse files Browse the repository at this point in the history
Move retokenize hack to save_analysis

closes #76046
  • Loading branch information
Dylan-DPC authored Aug 29, 2020
2 parents 11193ca + 6621895 commit 9d7d24d
Show file tree
Hide file tree
Showing 5 changed files with 36 additions and 88 deletions.
2 changes: 1 addition & 1 deletion Cargo.lock
Original file line number Diff line number Diff line change
Expand Up @@ -3927,8 +3927,8 @@ dependencies = [
"rustc_data_structures",
"rustc_hir",
"rustc_hir_pretty",
"rustc_lexer",
"rustc_middle",
"rustc_parse",
"rustc_session",
"rustc_span",
"serde_json",
Expand Down
30 changes: 2 additions & 28 deletions src/librustc_parse/lexer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,19 +46,10 @@ impl<'a> StringReader<'a> {
source_file: Lrc<rustc_span::SourceFile>,
override_span: Option<Span>,
) -> Self {
// Make sure external source is loaded first, before accessing it.
// While this can't show up during normal parsing, `retokenize` may
// be called with a source file from an external crate.
sess.source_map().ensure_source_file_source_present(Lrc::clone(&source_file));

let src = if let Some(src) = &source_file.src {
Lrc::clone(&src)
} else if let Some(src) = source_file.external_src.borrow().get_source() {
Lrc::clone(&src)
} else {
let src = source_file.src.clone().unwrap_or_else(|| {
sess.span_diagnostic
.bug(&format!("cannot lex `source_file` without source: {}", source_file.name));
};
});

StringReader {
sess,
Expand All @@ -70,23 +61,6 @@ impl<'a> StringReader<'a> {
}
}

pub fn retokenize(sess: &'a ParseSess, mut span: Span) -> Self {
let begin = sess.source_map().lookup_byte_offset(span.lo());
let end = sess.source_map().lookup_byte_offset(span.hi());

// Make the range zero-length if the span is invalid.
if begin.sf.start_pos != end.sf.start_pos {
span = span.shrink_to_lo();
}

let mut sr = StringReader::new(sess, begin.sf, None);

// Seek the lexer to the right byte range.
sr.end_src_index = sr.src_index(span.hi());

sr
}

fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span {
self.override_span.unwrap_or_else(|| Span::with_root_ctxt(lo, hi))
}
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_save_analysis/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ rustc_ast_pretty = { path = "../librustc_ast_pretty" }
rustc_data_structures = { path = "../librustc_data_structures" }
rustc_hir = { path = "../librustc_hir" }
rustc_hir_pretty = { path = "../librustc_hir_pretty" }
rustc_parse = { path = "../librustc_parse" }
rustc_lexer = { path = "../librustc_lexer" }
serde_json = "1"
rustc_session = { path = "../librustc_session" }
rustc_span = { path = "../librustc_span" }
Expand Down
6 changes: 2 additions & 4 deletions src/librustc_save_analysis/dump_visitor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
//! recording the output.

use rustc_ast as ast;
use rustc_ast::{token, walk_list};
use rustc_ast::walk_list;
use rustc_data_structures::fx::FxHashSet;
use rustc_hir as hir;
use rustc_hir::def::{DefKind as HirDefKind, Res};
Expand Down Expand Up @@ -1207,9 +1207,7 @@ impl<'tcx> Visitor<'tcx> for DumpVisitor<'tcx> {

// Otherwise it's a span with wrong macro expansion info, which
// we don't want to track anyway, since it's probably macro-internal `use`
if let Some(sub_span) =
self.span.sub_span_of_token(item.span, token::BinOp(token::Star))
{
if let Some(sub_span) = self.span.sub_span_of_star(item.span) {
if !self.span.filter_generated(item.span) {
let access = access_from!(self.save_ctxt, item, item.hir_id);
let span = self.span_from_span(sub_span);
Expand Down
84 changes: 30 additions & 54 deletions src/librustc_save_analysis/span_utils.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use crate::generated_code;
use rustc_ast::token::{self, TokenKind};
use rustc_parse::lexer::{self, StringReader};
use rustc_data_structures::sync::Lrc;
use rustc_lexer::{tokenize, TokenKind};
use rustc_session::Session;
use rustc_span::*;

Expand Down Expand Up @@ -43,61 +43,37 @@ impl<'a> SpanUtils<'a> {
}
}

pub fn retokenise_span(&self, span: Span) -> StringReader<'a> {
lexer::StringReader::retokenize(&self.sess.parse_sess, span)
}

pub fn sub_span_of_token(&self, span: Span, tok: TokenKind) -> Option<Span> {
let mut toks = self.retokenise_span(span);
loop {
let next = toks.next_token();
if next == token::Eof {
return None;
}
if next == tok {
return Some(next.span);
}
/// Finds the span of `*` token withing the larger `span`.
pub fn sub_span_of_star(&self, mut span: Span) -> Option<Span> {
let begin = self.sess.source_map().lookup_byte_offset(span.lo());
let end = self.sess.source_map().lookup_byte_offset(span.hi());
// Make the range zero-length if the span is invalid.
if begin.sf.start_pos != end.sf.start_pos {
span = span.shrink_to_lo();
}
}

// // Return the name for a macro definition (identifier after first `!`)
// pub fn span_for_macro_def_name(&self, span: Span) -> Option<Span> {
// let mut toks = self.retokenise_span(span);
// loop {
// let ts = toks.real_token();
// if ts == token::Eof {
// return None;
// }
// if ts == token::Not {
// let ts = toks.real_token();
// if ts.kind.is_ident() {
// return Some(ts.sp);
// } else {
// return None;
// }
// }
// }
// }
let sf = Lrc::clone(&begin.sf);

// // Return the name for a macro use (identifier before first `!`).
// pub fn span_for_macro_use_name(&self, span:Span) -> Option<Span> {
// let mut toks = self.retokenise_span(span);
// let mut prev = toks.real_token();
// loop {
// if prev == token::Eof {
// return None;
// }
// let ts = toks.real_token();
// if ts == token::Not {
// if prev.kind.is_ident() {
// return Some(prev.sp);
// } else {
// return None;
// }
// }
// prev = ts;
// }
// }
self.sess.source_map().ensure_source_file_source_present(Lrc::clone(&sf));
let src =
sf.src.clone().or_else(|| sf.external_src.borrow().get_source().map(Lrc::clone))?;
let to_index = |pos: BytePos| -> usize { (pos - sf.start_pos).0 as usize };
let text = &src[to_index(span.lo())..to_index(span.hi())];
let start_pos = {
let mut pos = 0;
tokenize(text)
.map(|token| {
let start = pos;
pos += token.len;
(start, token)
})
.find(|(_pos, token)| token.kind == TokenKind::Star)?
.0
};
let lo = span.lo() + BytePos(start_pos as u32);
let hi = lo + BytePos(1);
Some(span.with_lo(lo).with_hi(hi))
}

/// Return true if the span is generated code, and
/// it is not a subspan of the root callsite.
Expand Down

0 comments on commit 9d7d24d

Please sign in to comment.