Skip to content

Commit

Permalink
Refactor the parser to consume token trees.
Browse files Browse the repository at this point in the history
  • Loading branch information
jseyfried committed Jan 17, 2017
1 parent de46b24 commit debcbf0
Show file tree
Hide file tree
Showing 11 changed files with 59 additions and 213 deletions.
3 changes: 2 additions & 1 deletion src/librustc/session/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ use lint;
use middle::cstore;

use syntax::ast::{self, IntTy, UintTy};
use syntax::parse::token;
use syntax::parse;
use syntax::symbol::Symbol;
use syntax::feature_gate::UnstableFeatures;
Expand Down Expand Up @@ -1259,7 +1260,7 @@ pub fn parse_cfgspecs(cfgspecs: Vec<String> ) -> ast::CrateConfig {

let meta_item = panictry!(parser.parse_meta_item());

if !parser.reader.is_eof() {
if parser.token != token::Eof {
early_error(ErrorOutputType::default(), &format!("invalid --cfg argument: {}", s))
} else if meta_item.is_meta_item_list() {
let msg =
Expand Down
18 changes: 4 additions & 14 deletions src/librustc_metadata/cstore_impl.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ use rustc_back::PanicStrategy;

use syntax::ast;
use syntax::attr;
use syntax::parse::new_parser_from_source_str;
use syntax::parse::filemap_to_tts;
use syntax::symbol::Symbol;
use syntax_pos::{mk_sp, Span};
use rustc::hir::svh::Svh;
Expand Down Expand Up @@ -395,19 +395,9 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore {
let (name, def) = data.get_macro(id.index);
let source_name = format!("<{} macros>", name);

// NB: Don't use parse_tts_from_source_str because it parses with quote_depth > 0.
let mut parser = new_parser_from_source_str(&sess.parse_sess, source_name, def.body);

let lo = parser.span.lo;
let body = match parser.parse_all_token_trees() {
Ok(body) => body,
Err(mut err) => {
err.emit();
sess.abort_if_errors();
unreachable!();
}
};
let local_span = mk_sp(lo, parser.prev_span.hi);
let filemap = sess.parse_sess.codemap().new_filemap(source_name, None, def.body);
let local_span = mk_sp(filemap.start_pos, filemap.end_pos);
let body = filemap_to_tts(&sess.parse_sess, filemap);

// Mark the attrs as used
let attrs = data.get_item_attrs(id.index);
Expand Down
4 changes: 1 addition & 3 deletions src/libsyntax/ext/base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -615,9 +615,7 @@ impl<'a> ExtCtxt<'a> {

pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree])
-> parser::Parser<'a> {
let mut parser = parse::tts_to_parser(self.parse_sess, tts.to_vec());
parser.allow_interpolated_tts = false; // FIXME(jseyfried) `quote!` can't handle these yet
parser
parse::tts_to_parser(self.parse_sess, tts.to_vec())
}
pub fn codemap(&self) -> &'a CodeMap { self.parse_sess.codemap() }
pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess }
Expand Down
7 changes: 3 additions & 4 deletions src/libsyntax/ext/tt/macro_parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,6 @@ use ast::Ident;
use syntax_pos::{self, BytePos, mk_sp, Span};
use codemap::Spanned;
use errors::FatalError;
use parse::lexer::*; //resolve bug?
use parse::{Directory, ParseSess};
use parse::parser::{PathStyle, Parser};
use parse::token::{DocComment, MatchNt, SubstNt};
Expand Down Expand Up @@ -407,9 +406,9 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>,
Success(())
}

pub fn parse(sess: &ParseSess, rdr: TtReader, ms: &[TokenTree], directory: Option<Directory>)
pub fn parse(sess: &ParseSess, tts: Vec<TokenTree>, ms: &[TokenTree], directory: Option<Directory>)
-> NamedParseResult {
let mut parser = Parser::new(sess, Box::new(rdr), directory, true);
let mut parser = Parser::new(sess, tts, directory, true);
let mut cur_eis = SmallVector::one(initial_matcher_pos(ms.to_owned(), parser.span.lo));
let mut next_eis = Vec::new(); // or proceed normally

Expand Down Expand Up @@ -527,7 +526,7 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
"ident" => match p.token {
token::Ident(sn) => {
p.bump();
token::NtIdent(Spanned::<Ident>{node: sn, span: p.span})
token::NtIdent(Spanned::<Ident>{node: sn, span: p.prev_span})
}
_ => {
let token_str = pprust::token_to_string(&p.token);
Expand Down
20 changes: 13 additions & 7 deletions src/libsyntax/ext/tt/macro_rules.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ use ext::expand::{Expansion, ExpansionKind};
use ext::tt::macro_parser::{Success, Error, Failure};
use ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal};
use ext::tt::macro_parser::{parse, parse_failure_msg};
use ext::tt::transcribe::new_tt_reader;
use parse::{Directory, ParseSess};
use parse::lexer::new_tt_reader;
use parse::parser::Parser;
use parse::token::{self, NtTT, Token};
use parse::token::Token::*;
Expand Down Expand Up @@ -113,13 +113,21 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
_ => cx.span_bug(sp, "malformed macro rhs"),
};
// rhs has holes ( `$id` and `$(...)` that need filled)
let trncbr =
let mut trncbr =
new_tt_reader(&cx.parse_sess.span_diagnostic, Some(named_matches), rhs);
let mut tts = Vec::new();
loop {
let tok = trncbr.real_token();
if tok.tok == token::Eof {
break
}
tts.push(TokenTree::Token(tok.sp, tok.tok));
}
let directory = Directory {
path: cx.current_expansion.module.directory.clone(),
ownership: cx.current_expansion.directory_ownership,
};
let mut p = Parser::new(cx.parse_sess(), Box::new(trncbr), Some(directory), false);
let mut p = Parser::new(cx.parse_sess(), tts, Some(directory), false);
p.root_module_name = cx.current_expansion.module.mod_path.last()
.map(|id| (*id.name.as_str()).to_owned());

Expand Down Expand Up @@ -187,10 +195,8 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
})),
];

// Parse the macro_rules! invocation (`none` is for no interpolations):
let arg_reader = new_tt_reader(&sess.span_diagnostic, None, def.body.clone());

let argument_map = match parse(sess, arg_reader, &argument_gram, None) {
// Parse the macro_rules! invocation
let argument_map = match parse(sess, def.body.clone(), &argument_gram, None) {
Success(m) => m,
Failure(sp, tok) => {
let s = parse_failure_msg(tok);
Expand Down
11 changes: 7 additions & 4 deletions src/libsyntax/ext/tt/transcribe.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
use self::LockstepIterSize::*;

use ast::Ident;
use errors::{Handler, DiagnosticBuilder};
use errors::Handler;
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
use parse::token::{self, MatchNt, SubstNt, Token, NtIdent};
use parse::lexer::TokenAndSpan;
Expand Down Expand Up @@ -44,8 +44,12 @@ pub struct TtReader<'a> {
/* cached: */
pub cur_tok: Token,
pub cur_span: Span,
/// Transform doc comments. Only useful in macro invocations
pub fatal_errs: Vec<DiagnosticBuilder<'a>>,
}

impl<'a> TtReader<'a> {
pub fn real_token(&mut self) -> TokenAndSpan {
tt_next_token(self)
}
}

/// This can do Macro-By-Example transcription. On the other hand, if
Expand Down Expand Up @@ -76,7 +80,6 @@ pub fn new_tt_reader(sp_diag: &Handler,
/* dummy values, never read: */
cur_tok: token::Eof,
cur_span: DUMMY_SP,
fatal_errs: Vec::new(),
};
tt_next_token(&mut r); /* get cur_tok and cur_span set up */
r
Expand Down
74 changes: 0 additions & 74 deletions src/libsyntax/parse/lexer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ use ast::{self, Ident};
use syntax_pos::{self, BytePos, CharPos, Pos, Span};
use codemap::CodeMap;
use errors::{FatalError, DiagnosticBuilder};
use ext::tt::transcribe::tt_next_token;
use parse::{token, ParseSess};
use str::char_at;
use symbol::{Symbol, keywords};
Expand All @@ -23,53 +22,10 @@ use std::char;
use std::mem::replace;
use std::rc::Rc;

pub use ext::tt::transcribe::{TtReader, new_tt_reader};

pub mod comments;
mod tokentrees;
mod unicode_chars;

pub trait Reader {
fn is_eof(&self) -> bool;
fn try_next_token(&mut self) -> Result<TokenAndSpan, ()>;
fn next_token(&mut self) -> TokenAndSpan where Self: Sized {
let res = self.try_next_token();
self.unwrap_or_abort(res)
}
/// Report a fatal error with the current span.
fn fatal(&self, &str) -> FatalError;
/// Report a non-fatal error with the current span.
fn err(&self, &str);
fn emit_fatal_errors(&mut self);
fn unwrap_or_abort(&mut self, res: Result<TokenAndSpan, ()>) -> TokenAndSpan {
match res {
Ok(tok) => tok,
Err(_) => {
self.emit_fatal_errors();
panic!(FatalError);
}
}
}
fn peek(&self) -> TokenAndSpan;
/// Get a token the parser cares about.
fn try_real_token(&mut self) -> Result<TokenAndSpan, ()> {
let mut t = self.try_next_token()?;
loop {
match t.tok {
token::Whitespace | token::Comment | token::Shebang(_) => {
t = self.try_next_token()?;
}
_ => break,
}
}
Ok(t)
}
fn real_token(&mut self) -> TokenAndSpan {
let res = self.try_real_token();
self.unwrap_or_abort(res)
}
}

#[derive(Clone, PartialEq, Eq, Debug)]
pub struct TokenAndSpan {
pub tok: token::Token,
Expand Down Expand Up @@ -182,36 +138,6 @@ impl<'a> StringReader<'a> {
}
}

impl<'a> Reader for TtReader<'a> {
fn is_eof(&self) -> bool {
self.peek().tok == token::Eof
}
fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> {
assert!(self.fatal_errs.is_empty());
let r = tt_next_token(self);
debug!("TtReader: r={:?}", r);
Ok(r)
}
fn fatal(&self, m: &str) -> FatalError {
self.sp_diag.span_fatal(self.cur_span, m)
}
fn err(&self, m: &str) {
self.sp_diag.span_err(self.cur_span, m);
}
fn emit_fatal_errors(&mut self) {
for err in &mut self.fatal_errs {
err.emit();
}
self.fatal_errs.clear();
}
fn peek(&self) -> TokenAndSpan {
TokenAndSpan {
tok: self.cur_tok.clone(),
sp: self.cur_span,
}
}
}

impl<'a> StringReader<'a> {
/// For comments.rs, which hackily pokes into next_pos and ch
pub fn new_raw<'b>(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Self {
Expand Down
5 changes: 2 additions & 3 deletions src/libsyntax/parse/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ pub mod obsolete;

/// Info about a parsing session.
pub struct ParseSess {
pub span_diagnostic: Handler, // better be the same as the one in the reader!
pub span_diagnostic: Handler,
pub unstable_features: UnstableFeatures,
pub config: CrateConfig,
/// Used to determine and report recursive mod inclusions
Expand Down Expand Up @@ -227,8 +227,7 @@ pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>) -> Vec<tokenstream

/// Given tts and the ParseSess, produce a parser
pub fn tts_to_parser<'a>(sess: &'a ParseSess, tts: Vec<tokenstream::TokenTree>) -> Parser<'a> {
let trdr = lexer::new_tt_reader(&sess.span_diagnostic, None, tts);
let mut p = Parser::new(sess, Box::new(trdr), None, false);
let mut p = Parser::new(sess, tts, None, false);
p.check_unknown_macro_variable();
p
}
Expand Down
Loading

0 comments on commit debcbf0

Please sign in to comment.