Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Condense StringReader's API to a single function #76286

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
45 changes: 16 additions & 29 deletions compiler/rustc_parse/src/lexer/mod.rs
Original file line number Diff line number Diff line change
@@ -1,22 +1,19 @@
use rustc_ast::ast::AttrStyle;
use rustc_ast::token::{self, CommentKind, Token, TokenKind};
use rustc_ast::tokenstream::IsJoint;
use rustc_data_structures::sync::Lrc;
use rustc_errors::{error_code, Applicability, DiagnosticBuilder, FatalError};
use rustc_lexer::Base;
use rustc_lexer::{unescape, RawStrError};
use rustc_ast::tokenstream::{IsJoint, TokenStream};
use rustc_errors::{error_code, Applicability, DiagnosticBuilder, FatalError, PResult};
use rustc_lexer::unescape::{self, Mode};
use rustc_lexer::{Base, DocStyle, RawStrError};
use rustc_session::parse::ParseSess;
use rustc_span::symbol::{sym, Symbol};
use rustc_span::{BytePos, Pos, Span};

use std::char;
use tracing::debug;

mod tokentrees;
mod unescape_error_reporting;
mod unicode_chars;

use rustc_lexer::{unescape::Mode, DocStyle};
use unescape_error_reporting::{emit_unescape_error, push_escaped_char};

#[derive(Clone, Debug)]
Expand All @@ -28,7 +25,17 @@ pub struct UnmatchedBrace {
pub candidate_span: Option<Span>,
}

crate struct StringReader<'a> {
crate fn parse_token_trees<'a>(
sess: &'a ParseSess,
src: &'a str,
start_pos: BytePos,
override_span: Option<Span>,
) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
StringReader { sess, start_pos, pos: start_pos, end_src_index: src.len(), src, override_span }
.into_token_trees()
}

struct StringReader<'a> {
sess: &'a ParseSess,
/// Initial position, read-only.
start_pos: BytePos,
Expand All @@ -37,31 +44,11 @@ crate struct StringReader<'a> {
/// Stop reading src at this index.
end_src_index: usize,
/// Source text to tokenize.
src: Lrc<String>,
src: &'a str,
override_span: Option<Span>,
}

impl<'a> StringReader<'a> {
crate fn new(
sess: &'a ParseSess,
source_file: Lrc<rustc_span::SourceFile>,
override_span: Option<Span>,
) -> Self {
let src = source_file.src.clone().unwrap_or_else(|| {
sess.span_diagnostic
.bug(&format!("cannot lex `source_file` without source: {}", source_file.name));
});

StringReader {
sess,
start_pos: source_file.start_pos,
pos: source_file.start_pos,
end_src_index: src.len(),
src,
override_span,
}
}

fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span {
self.override_span.unwrap_or_else(|| Span::with_root_ctxt(lo, hi))
}
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_parse/src/lexer/tokentrees.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ use rustc_errors::PResult;
use rustc_span::Span;

impl<'a> StringReader<'a> {
crate fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
pub(super) fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
let mut tt_reader = TokenTreesReader {
string_reader: self,
token: Token::dummy(),
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_parse/src/lexer/unicode_chars.rs
Original file line number Diff line number Diff line change
Expand Up @@ -332,7 +332,7 @@ const ASCII_ARRAY: &[(char, &str, Option<token::TokenKind>)] = &[
('"', "Quotation Mark", None),
];

crate fn check_for_substitution<'a>(
pub(super) fn check_for_substitution<'a>(
reader: &StringReader<'a>,
pos: BytePos,
ch: char,
Expand Down
9 changes: 7 additions & 2 deletions compiler/rustc_parse/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -200,8 +200,13 @@ pub fn maybe_file_to_stream(
source_file: Lrc<SourceFile>,
override_span: Option<Span>,
) -> Result<(TokenStream, Vec<lexer::UnmatchedBrace>), Vec<Diagnostic>> {
let srdr = lexer::StringReader::new(sess, source_file, override_span);
let (token_trees, unmatched_braces) = srdr.into_token_trees();
let src = source_file.src.as_ref().unwrap_or_else(|| {
sess.span_diagnostic
.bug(&format!("cannot lex `source_file` without source: {}", source_file.name));
});

let (token_trees, unmatched_braces) =
lexer::parse_token_trees(sess, src.as_str(), source_file.start_pos, override_span);

match token_trees {
Ok(stream) => Ok((stream, unmatched_braces)),
Expand Down