Skip to content

Commit

Permalink
Rollup merge of rust-lang#59476 - nnethercote:TokenStreamBuilder-Smal…
Browse files Browse the repository at this point in the history
…lVec, r=petrochenkov

Use `SmallVec` in `TokenStreamBuilder`.

This reduces by 12% the number of allocations done for a "clean incremental" of `webrender_api`, which reduces the instruction count by about 0.5%.

r? @petrochenkov
  • Loading branch information
Centril authored Mar 29, 2019
2 parents c2d6c08 + 17a8aff commit 73f9832
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 7 deletions.
3 changes: 2 additions & 1 deletion src/libsyntax/parse/attr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ use crate::parse::parser::{Parser, TokenType, PathStyle};
use crate::tokenstream::{TokenStream, TokenTree};

use log::debug;
use smallvec::smallvec;

#[derive(Debug)]
enum InnerAttributeParsePolicy<'a> {
Expand Down Expand Up @@ -171,7 +172,7 @@ impl<'a> Parser<'a> {
} else {
self.parse_unsuffixed_lit()?.tokens()
};
TokenStream::from_streams(vec![eq.into(), tokens])
TokenStream::from_streams(smallvec![eq.into(), tokens])
} else {
TokenStream::empty()
};
Expand Down
14 changes: 8 additions & 6 deletions src/libsyntax/tokenstream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ use syntax_pos::{BytePos, Mark, Span, DUMMY_SP};
use rustc_data_structures::static_assert;
use rustc_data_structures::sync::Lrc;
use serialize::{Decoder, Decodable, Encoder, Encodable};
use smallvec::{SmallVec, smallvec};

use std::borrow::Cow;
use std::{fmt, iter, mem};
Expand Down Expand Up @@ -224,7 +225,7 @@ impl From<Token> for TokenStream {

impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream {
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
TokenStream::from_streams(iter.into_iter().map(Into::into).collect::<Vec<_>>())
TokenStream::from_streams(iter.into_iter().map(Into::into).collect::<SmallVec<_>>())
}
}

Expand Down Expand Up @@ -256,7 +257,7 @@ impl TokenStream {
}
}

pub(crate) fn from_streams(mut streams: Vec<TokenStream>) -> TokenStream {
pub(crate) fn from_streams(mut streams: SmallVec<[TokenStream; 2]>) -> TokenStream {
match streams.len() {
0 => TokenStream::empty(),
1 => streams.pop().unwrap(),
Expand Down Expand Up @@ -393,12 +394,13 @@ impl TokenStream {
}
}

// 99.5%+ of the time we have 1 or 2 elements in this vector.
#[derive(Clone)]
pub struct TokenStreamBuilder(Vec<TokenStream>);
pub struct TokenStreamBuilder(SmallVec<[TokenStream; 2]>);

impl TokenStreamBuilder {
pub fn new() -> TokenStreamBuilder {
TokenStreamBuilder(Vec::new())
TokenStreamBuilder(SmallVec::new())
}

pub fn push<T: Into<TokenStream>>(&mut self, stream: T) {
Expand Down Expand Up @@ -485,7 +487,7 @@ impl Cursor {
}
let index = self.index;
let stream = mem::replace(&mut self.stream, TokenStream(None));
*self = TokenStream::from_streams(vec![stream, new_stream]).into_trees();
*self = TokenStream::from_streams(smallvec![stream, new_stream]).into_trees();
self.index = index;
}

Expand Down Expand Up @@ -572,7 +574,7 @@ mod tests {
let test_res = string_to_ts("foo::bar::baz");
let test_fst = string_to_ts("foo::bar");
let test_snd = string_to_ts("::baz");
let eq_res = TokenStream::from_streams(vec![test_fst, test_snd]);
let eq_res = TokenStream::from_streams(smallvec![test_fst, test_snd]);
assert_eq!(test_res.trees().count(), 5);
assert_eq!(eq_res.trees().count(), 5);
assert_eq!(test_res.eq_unspanned(&eq_res), true);
Expand Down

0 comments on commit 73f9832

Please sign in to comment.