diff --git a/Cargo.lock b/Cargo.lock index ac6e7974a127e..233f4af1e24bc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2113,7 +2113,6 @@ dependencies = [ "fs-err", "getopts", "jsonpath_lib", - "once_cell", "regex", "serde_json", "shlex", @@ -2232,7 +2231,6 @@ name = "linkchecker" version = "0.1.0" dependencies = [ "html5ever", - "once_cell", "regex", ] @@ -2491,7 +2489,6 @@ dependencies = [ "directories", "getrandom", "jemalloc-sys", - "lazy_static", "libc", "libffi", "libloading", @@ -4791,12 +4788,10 @@ dependencies = [ "arrayvec", "askama", "base64", - "byteorder", "expect-test", "indexmap", "itertools 0.12.1", "minifier", - "once_cell", "regex", "rustdoc-json-types", "serde", @@ -4889,7 +4884,6 @@ dependencies = [ "getopts", "ignore", "itertools 0.11.0", - "lazy_static", "regex", "rustfmt-config_proc_macro", "serde", @@ -5351,7 +5345,6 @@ version = "0.1.0" dependencies = [ "build_helper", "glob", - "once_cell", ] [[package]] @@ -5596,7 +5589,6 @@ version = "0.1.0" dependencies = [ "cargo_metadata 0.15.4", "ignore", - "lazy_static", "miropt-test-tools", "regex", "rustc-hash", diff --git a/src/librustdoc/Cargo.toml b/src/librustdoc/Cargo.toml index 9a23811ed3f97..31222f213d800 100644 --- a/src/librustdoc/Cargo.toml +++ b/src/librustdoc/Cargo.toml @@ -10,11 +10,9 @@ path = "lib.rs" arrayvec = { version = "0.7", default-features = false } askama = { version = "0.12", default-features = false, features = ["config"] } base64 = "0.21.7" -byteorder = "1.5" itertools = "0.12" indexmap = "2" minifier = "0.3.0" -once_cell = "1.10.0" regex = "1" rustdoc-json-types = { path = "../rustdoc-json-types" } serde_json = "1.0" diff --git a/src/librustdoc/html/markdown.rs b/src/librustdoc/html/markdown.rs index 64f0e096cd02e..5c5651f3ef0e6 100644 --- a/src/librustdoc/html/markdown.rs +++ b/src/librustdoc/html/markdown.rs @@ -35,13 +35,13 @@ use rustc_resolve::rustdoc::may_be_doc_link; use rustc_span::edition::Edition; use rustc_span::{Span, Symbol}; -use once_cell::sync::Lazy; use std::borrow::Cow; use std::collections::VecDeque; use std::fmt::Write; use std::iter::Peekable; use std::ops::{ControlFlow, Range}; use std::str::{self, CharIndices}; +use std::sync::OnceLock; use crate::clean::RenderedLink; use crate::doctest; @@ -1994,7 +1994,7 @@ pub struct IdMap { } // The map is pre-initialized and cloned each time to avoid reinitializing it repeatedly. -static DEFAULT_ID_MAP: Lazy, usize>> = Lazy::new(|| init_id_map()); +static DEFAULT_ID_MAP: OnceLock, usize>> = OnceLock::new(); fn init_id_map() -> FxHashMap, usize> { let mut map = FxHashMap::default(); @@ -2051,7 +2051,7 @@ fn init_id_map() -> FxHashMap, usize> { impl IdMap { pub fn new() -> Self { - IdMap { map: DEFAULT_ID_MAP.clone() } + IdMap { map: DEFAULT_ID_MAP.get_or_init(init_id_map).clone() } } pub(crate) fn derive + ToString>(&mut self, candidate: S) -> String { diff --git a/src/librustdoc/html/render/search_index/encode.rs b/src/librustdoc/html/render/search_index/encode.rs index 54407c614c4c7..8d715814faad7 100644 --- a/src/librustdoc/html/render/search_index/encode.rs +++ b/src/librustdoc/html/render/search_index/encode.rs @@ -166,13 +166,12 @@ pub(crate) fn write_bitmap_to_bytes( containers.push(container); } // https://github.com/RoaringBitmap/RoaringFormatSpec - use byteorder::{WriteBytesExt, LE}; const SERIAL_COOKIE_NO_RUNCONTAINER: u32 = 12346; const SERIAL_COOKIE: u32 = 12347; const NO_OFFSET_THRESHOLD: u32 = 4; let size: u32 = containers.len().try_into().unwrap(); let start_offset = if has_run { - out.write_u32::(SERIAL_COOKIE | ((size - 1) << 16))?; + out.write_all(&u32::to_le_bytes(SERIAL_COOKIE | ((size - 1) << 16)))?; for set in containers.chunks(8) { let mut b = 0; for (i, container) in set.iter().enumerate() { @@ -180,7 +179,7 @@ pub(crate) fn write_bitmap_to_bytes( b |= 1 << i; } } - out.write_u8(b)?; + out.write_all(&[b])?; } if size < NO_OFFSET_THRESHOLD { 4 + 4 * size + ((size + 7) / 8) @@ -188,21 +187,21 @@ pub(crate) fn write_bitmap_to_bytes( 4 + 8 * size + ((size + 7) / 8) } } else { - out.write_u32::(SERIAL_COOKIE_NO_RUNCONTAINER)?; - out.write_u32::(containers.len().try_into().unwrap())?; + out.write_all(&u32::to_le_bytes(SERIAL_COOKIE_NO_RUNCONTAINER))?; + out.write_all(&u32::to_le_bytes(containers.len().try_into().unwrap()))?; 4 + 4 + 4 * size + 4 * size }; for (&key, container) in keys.iter().zip(&containers) { // descriptive header let key: u32 = key.into(); let count: u32 = container.popcount() - 1; - out.write_u32::((count << 16) | key)?; + out.write_all(&u32::to_le_bytes((count << 16) | key))?; } if !has_run || size >= NO_OFFSET_THRESHOLD { // offset header let mut starting_offset = start_offset; for container in &containers { - out.write_u32::(starting_offset)?; + out.write_all(&u32::to_le_bytes(starting_offset))?; starting_offset += match container { Container::Bits(_) => 8192u32, Container::Array(array) => u32::try_from(array.len()).unwrap() * 2, @@ -214,19 +213,19 @@ pub(crate) fn write_bitmap_to_bytes( match container { Container::Bits(bits) => { for chunk in bits.iter() { - out.write_u64::(*chunk)?; + out.write_all(&u64::to_le_bytes(*chunk))?; } } Container::Array(array) => { for value in array.iter() { - out.write_u16::(*value)?; + out.write_all(&u16::to_le_bytes(*value))?; } } Container::Run(runs) => { - out.write_u16::((runs.len()).try_into().unwrap())?; + out.write_all(&u16::to_le_bytes(runs.len().try_into().unwrap()))?; for (start, lenm1) in runs.iter().copied() { - out.write_u16::(start)?; - out.write_u16::(lenm1)?; + out.write_all(&u16::to_le_bytes(start))?; + out.write_all(&u16::to_le_bytes(lenm1))?; } } } diff --git a/src/tools/jsondocck/Cargo.toml b/src/tools/jsondocck/Cargo.toml index 6326a9b1e79c3..e1eb6d0566513 100644 --- a/src/tools/jsondocck/Cargo.toml +++ b/src/tools/jsondocck/Cargo.toml @@ -10,4 +10,3 @@ regex = "1.4" shlex = "1.0" serde_json = "1.0" fs-err = "2.5.0" -once_cell = "1.0" diff --git a/src/tools/jsondocck/src/main.rs b/src/tools/jsondocck/src/main.rs index e3d05ec83159d..688b403bf0e0a 100644 --- a/src/tools/jsondocck/src/main.rs +++ b/src/tools/jsondocck/src/main.rs @@ -1,8 +1,8 @@ use jsonpath_lib::select; -use once_cell::sync::Lazy; use regex::{Regex, RegexBuilder}; use serde_json::Value; use std::borrow::Cow; +use std::sync::OnceLock; use std::{env, fmt, fs}; mod cache; @@ -95,7 +95,8 @@ impl fmt::Display for CommandKind { } } -static LINE_PATTERN: Lazy = Lazy::new(|| { +static LINE_PATTERN: OnceLock = OnceLock::new(); +fn line_pattern() -> Regex { RegexBuilder::new( r#" \s(?P!?)@(?P!?) @@ -107,7 +108,7 @@ static LINE_PATTERN: Lazy = Lazy::new(|| { .unicode(true) .build() .unwrap() -}); +} fn print_err(msg: &str, lineno: usize) { eprintln!("Invalid command: {} on line {}", msg, lineno) @@ -123,7 +124,7 @@ fn get_commands(template: &str) -> Result, ()> { for (lineno, line) in file.split('\n').enumerate() { let lineno = lineno + 1; - let cap = match LINE_PATTERN.captures(line) { + let cap = match LINE_PATTERN.get_or_init(line_pattern).captures(line) { Some(c) => c, None => continue, }; diff --git a/src/tools/linkchecker/Cargo.toml b/src/tools/linkchecker/Cargo.toml index 318a69ab8359e..05049aabc7d9c 100644 --- a/src/tools/linkchecker/Cargo.toml +++ b/src/tools/linkchecker/Cargo.toml @@ -9,5 +9,4 @@ path = "main.rs" [dependencies] regex = "1" -once_cell = "1" html5ever = "0.26.0" diff --git a/src/tools/linkchecker/main.rs b/src/tools/linkchecker/main.rs index f49c6e79f13c5..32f935de73025 100644 --- a/src/tools/linkchecker/main.rs +++ b/src/tools/linkchecker/main.rs @@ -18,8 +18,6 @@ use html5ever::tendril::ByteTendril; use html5ever::tokenizer::{ BufferQueue, TagToken, Token, TokenSink, TokenSinkResult, Tokenizer, TokenizerOpts, }; -use once_cell::sync::Lazy; -use regex::Regex; use std::cell::RefCell; use std::collections::{HashMap, HashSet}; use std::env; @@ -69,8 +67,12 @@ const INTRA_DOC_LINK_EXCEPTIONS: &[(&str, &[&str])] = &[ ]; -static BROKEN_INTRA_DOC_LINK: Lazy = - Lazy::new(|| Regex::new(r#"\[(.*)\]"#).unwrap()); +macro_rules! static_regex { + ($re:literal) => {{ + static RE: ::std::sync::OnceLock<::regex::Regex> = ::std::sync::OnceLock::new(); + RE.get_or_init(|| ::regex::Regex::new($re).unwrap()) + }}; +} macro_rules! t { ($e:expr) => { @@ -373,7 +375,7 @@ impl Checker { // Search for intra-doc links that rustdoc didn't warn about // NOTE: only looks at one line at a time; in practice this should find most links for (i, line) in source.lines().enumerate() { - for broken_link in BROKEN_INTRA_DOC_LINK.captures_iter(line) { + for broken_link in static_regex!(r#"\[(.*)\]"#).captures_iter(line) { if is_intra_doc_exception(file, &broken_link[1]) { report.intra_doc_exceptions += 1; } else { diff --git a/src/tools/miri/Cargo.toml b/src/tools/miri/Cargo.toml index b00dae784d22c..ac8b4b37e9200 100644 --- a/src/tools/miri/Cargo.toml +++ b/src/tools/miri/Cargo.toml @@ -46,7 +46,6 @@ colored = "2" ui_test = "0.21.1" rustc_version = "0.4" regex = "1.5.5" -lazy_static = "1.4.0" tempfile = "3" [package.metadata.rust-analyzer] diff --git a/src/tools/miri/tests/ui.rs b/src/tools/miri/tests/ui.rs index ace0da0125361..efeefbe29fbce 100644 --- a/src/tools/miri/tests/ui.rs +++ b/src/tools/miri/tests/ui.rs @@ -1,6 +1,7 @@ use std::ffi::OsString; use std::num::NonZeroUsize; use std::path::{Path, PathBuf}; +use std::sync::OnceLock; use std::{env, process::Command}; use colored::*; @@ -67,8 +68,8 @@ fn miri_config(target: &str, path: &str, mode: Mode, with_dependencies: bool) -> let mut config = Config { target: Some(target.to_owned()), - stderr_filters: STDERR.clone(), - stdout_filters: STDOUT.clone(), + stderr_filters: stderr_filters().into(), + stdout_filters: stdout_filters().into(), mode, program, out_dir: PathBuf::from(std::env::var_os("CARGO_TARGET_DIR").unwrap()).join("ui"), @@ -174,15 +175,18 @@ fn run_tests( } macro_rules! regexes { - ($name:ident: $($regex:expr => $replacement:expr,)*) => {lazy_static::lazy_static! { - static ref $name: Vec<(Match, &'static [u8])> = vec![ - $((Regex::new($regex).unwrap().into(), $replacement.as_bytes()),)* - ]; - }}; + ($name:ident: $($regex:expr => $replacement:expr,)*) => { + fn $name() -> &'static [(Match, &'static [u8])] { + static S: OnceLock> = OnceLock::new(); + S.get_or_init(|| vec![ + $((Regex::new($regex).unwrap().into(), $replacement.as_bytes()),)* + ]) + } + }; } regexes! { - STDOUT: + stdout_filters: // Windows file paths r"\\" => "/", // erase borrow tags @@ -191,7 +195,7 @@ regexes! { } regexes! { - STDERR: + stderr_filters: // erase line and column info r"\.rs:[0-9]+:[0-9]+(: [0-9]+:[0-9]+)?" => ".rs:LL:CC", // erase alloc ids diff --git a/src/tools/rustfmt/Cargo.toml b/src/tools/rustfmt/Cargo.toml index 032b9b548104b..b18c80654a077 100644 --- a/src/tools/rustfmt/Cargo.toml +++ b/src/tools/rustfmt/Cargo.toml @@ -44,7 +44,6 @@ dirs = "4.0" getopts = "0.2" ignore = "0.4" itertools = "0.11" -lazy_static = "1.4" regex = "1.7" serde = { version = "1.0.160", features = ["derive"] } serde_json = "1.0" diff --git a/src/tools/rustfmt/src/comment.rs b/src/tools/rustfmt/src/comment.rs index 7d1b0384431be..099e12f86dd26 100644 --- a/src/tools/rustfmt/src/comment.rs +++ b/src/tools/rustfmt/src/comment.rs @@ -3,8 +3,6 @@ use std::{borrow::Cow, iter}; use itertools::{multipeek, MultiPeek}; -use lazy_static::lazy_static; -use regex::Regex; use rustc_span::Span; use crate::config::Config; @@ -17,17 +15,6 @@ use crate::utils::{ }; use crate::{ErrorKind, FormattingError}; -lazy_static! { - /// A regex matching reference doc links. - /// - /// ```markdown - /// /// An [example]. - /// /// - /// /// [example]: this::is::a::link - /// ``` - static ref REFERENCE_LINK_URL: Regex = Regex::new(r"^\[.+\]\s?:").unwrap(); -} - fn is_custom_comment(comment: &str) -> bool { if !comment.starts_with("//") { false @@ -980,11 +967,16 @@ fn trim_custom_comment_prefix(s: &str) -> String { /// Returns `true` if the given string MAY include URLs or alike. fn has_url(s: &str) -> bool { // This function may return false positive, but should get its job done in most cases. + // The regex is indended to capture text such as the below. + // + // /// An [example]. + // /// + // /// [example]: this::is::a::link s.contains("https://") || s.contains("http://") || s.contains("ftp://") || s.contains("file://") - || REFERENCE_LINK_URL.is_match(s) + || static_regex!(r"^\[.+\]\s?:").is_match(s) } /// Returns true if the given string may be part of a Markdown table. diff --git a/src/tools/rustfmt/src/lib.rs b/src/tools/rustfmt/src/lib.rs index 877d057a34baa..e07720f30ca5c 100644 --- a/src/tools/rustfmt/src/lib.rs +++ b/src/tools/rustfmt/src/lib.rs @@ -5,9 +5,6 @@ #![allow(clippy::match_like_matches_macro)] #![allow(unreachable_pub)] -#[cfg(test)] -#[macro_use] -extern crate lazy_static; #[macro_use] extern crate tracing; @@ -62,6 +59,13 @@ pub use crate::rustfmt_diff::{ModifiedChunk, ModifiedLines}; #[macro_use] mod utils; +macro_rules! static_regex { + ($re:literal) => {{ + static RE: ::std::sync::OnceLock<::regex::Regex> = ::std::sync::OnceLock::new(); + RE.get_or_init(|| ::regex::Regex::new($re).unwrap()) + }}; +} + mod attr; mod chains; mod closures; diff --git a/src/tools/rustfmt/src/test/configuration_snippet.rs b/src/tools/rustfmt/src/test/configuration_snippet.rs index 80b61c88a001b..a195b306da386 100644 --- a/src/tools/rustfmt/src/test/configuration_snippet.rs +++ b/src/tools/rustfmt/src/test/configuration_snippet.rs @@ -24,19 +24,6 @@ impl ConfigurationSection { fn get_section>( file: &mut Enumerate, ) -> Option { - lazy_static! { - static ref CONFIG_NAME_REGEX: regex::Regex = - regex::Regex::new(r"^## `([^`]+)`").expect("failed creating configuration pattern"); - // Configuration values, which will be passed to `from_str`: - // - // - must be prefixed with `####` - // - must be wrapped in backticks - // - may by wrapped in double quotes (which will be stripped) - static ref CONFIG_VALUE_REGEX: regex::Regex = - regex::Regex::new(r#"^#### `"?([^`]+?)"?`"#) - .expect("failed creating configuration value pattern"); - } - loop { match file.next() { Some((i, line)) => { @@ -53,9 +40,14 @@ impl ConfigurationSection { let start_line = (i + 2) as u32; return Some(ConfigurationSection::CodeBlock((block, start_line))); - } else if let Some(c) = CONFIG_NAME_REGEX.captures(&line) { + } else if let Some(c) = static_regex!(r"^## `([^`]+)`").captures(&line) { return Some(ConfigurationSection::ConfigName(String::from(&c[1]))); - } else if let Some(c) = CONFIG_VALUE_REGEX.captures(&line) { + } else if let Some(c) = static_regex!(r#"^#### `"?([^`]+?)"?`"#).captures(&line) { + // Configuration values, which will be passed to `from_str` + // + // - must be prefixed with `####` + // - must be wrapped in backticks + // - may by wrapped in double quotes (which will be stripped) return Some(ConfigurationSection::ConfigValue(String::from(&c[1]))); } } diff --git a/src/tools/suggest-tests/Cargo.toml b/src/tools/suggest-tests/Cargo.toml index f4f4d548bb79e..7c048d53a505f 100644 --- a/src/tools/suggest-tests/Cargo.toml +++ b/src/tools/suggest-tests/Cargo.toml @@ -6,4 +6,3 @@ edition = "2021" [dependencies] glob = "0.3.0" build_helper = { version = "0.1.0", path = "../build_helper" } -once_cell = "1.17.1" diff --git a/src/tools/suggest-tests/src/lib.rs b/src/tools/suggest-tests/src/lib.rs index 1c1d9d0333ddb..8932403ac9fd5 100644 --- a/src/tools/suggest-tests/src/lib.rs +++ b/src/tools/suggest-tests/src/lib.rs @@ -5,7 +5,7 @@ use std::{ use dynamic_suggestions::DYNAMIC_SUGGESTIONS; use glob::Pattern; -use static_suggestions::STATIC_SUGGESTIONS; +use static_suggestions::static_suggestions; mod dynamic_suggestions; mod static_suggestions; @@ -33,7 +33,7 @@ pub fn get_suggestions>(modified_files: &[T]) -> Vec { let mut suggestions = Vec::new(); // static suggestions - for (globs, sugs) in STATIC_SUGGESTIONS.iter() { + for (globs, sugs) in static_suggestions().iter() { let globs = globs .iter() .map(|glob| Pattern::new(glob).expect("Found invalid glob pattern!")) diff --git a/src/tools/suggest-tests/src/static_suggestions.rs b/src/tools/suggest-tests/src/static_suggestions.rs index fbd265ea42a2e..a34a4b16ec1b6 100644 --- a/src/tools/suggest-tests/src/static_suggestions.rs +++ b/src/tools/suggest-tests/src/static_suggestions.rs @@ -1,10 +1,14 @@ use crate::{sug, Suggestion}; +use std::sync::OnceLock; // FIXME: perhaps this could use `std::lazy` when it is stablizied macro_rules! static_suggestions { ($( [ $( $glob:expr ),* $(,)? ] => [ $( $suggestion:expr ),* $(,)? ] ),* $(,)? ) => { - pub(crate) const STATIC_SUGGESTIONS: ::once_cell::unsync::Lazy, Vec)>> - = ::once_cell::unsync::Lazy::new(|| vec![ $( (vec![ $($glob),* ], vec![ $($suggestion),* ]) ),*]); + pub(crate) fn static_suggestions() -> &'static [(Vec<&'static str>, Vec)] + { + static S: OnceLock, Vec)>> = OnceLock::new(); + S.get_or_init(|| vec![ $( (vec![ $($glob),* ], vec![ $($suggestion),* ]) ),*]) + } } } diff --git a/src/tools/tidy/Cargo.toml b/src/tools/tidy/Cargo.toml index 96866e7424889..63963b0bd1ced 100644 --- a/src/tools/tidy/Cargo.toml +++ b/src/tools/tidy/Cargo.toml @@ -8,7 +8,6 @@ autobins = false cargo_metadata = "0.15" regex = "1" miropt-test-tools = { path = "../miropt-test-tools" } -lazy_static = "1" walkdir = "2" ignore = "0.4.18" semver = "1.0" diff --git a/src/tools/tidy/src/features.rs b/src/tools/tidy/src/features.rs index d673ce7a736d9..3e84bf3c34be5 100644 --- a/src/tools/tidy/src/features.rs +++ b/src/tools/tidy/src/features.rs @@ -17,8 +17,6 @@ use std::fs; use std::num::NonZeroU32; use std::path::{Path, PathBuf}; -use regex::Regex; - #[cfg(test)] mod tests; @@ -251,16 +249,10 @@ fn format_features<'a>( } fn find_attr_val<'a>(line: &'a str, attr: &str) -> Option<&'a str> { - lazy_static::lazy_static! { - static ref ISSUE: Regex = Regex::new(r#"issue\s*=\s*"([^"]*)""#).unwrap(); - static ref FEATURE: Regex = Regex::new(r#"feature\s*=\s*"([^"]*)""#).unwrap(); - static ref SINCE: Regex = Regex::new(r#"since\s*=\s*"([^"]*)""#).unwrap(); - } - let r = match attr { - "issue" => &*ISSUE, - "feature" => &*FEATURE, - "since" => &*SINCE, + "issue" => static_regex!(r#"issue\s*=\s*"([^"]*)""#), + "feature" => static_regex!(r#"feature\s*=\s*"([^"]*)""#), + "since" => static_regex!(r#"since\s*=\s*"([^"]*)""#), _ => unimplemented!("{attr} not handled"), }; @@ -528,11 +520,8 @@ fn map_lib_features( }}; } - lazy_static::lazy_static! { - static ref COMMENT_LINE: Regex = Regex::new(r"^\s*//").unwrap(); - } // exclude commented out lines - if COMMENT_LINE.is_match(line) { + if static_regex!(r"^\s*//").is_match(line) { continue; } diff --git a/src/tools/tidy/src/fluent_alphabetical.rs b/src/tools/tidy/src/fluent_alphabetical.rs index 9803b6eab2db5..a85367984deef 100644 --- a/src/tools/tidy/src/fluent_alphabetical.rs +++ b/src/tools/tidy/src/fluent_alphabetical.rs @@ -6,8 +6,8 @@ use std::{fs::OpenOptions, io::Write, path::Path}; use regex::Regex; -lazy_static::lazy_static! { - static ref MESSAGE: Regex = Regex::new(r#"(?m)^([a-zA-Z0-9_]+)\s*=\s*"#).unwrap(); +fn message() -> &'static Regex { + static_regex!(r#"(?m)^([a-zA-Z0-9_]+)\s*=\s*"#) } fn filter_fluent(path: &Path) -> bool { @@ -20,7 +20,7 @@ fn check_alphabetic( bad: &mut bool, all_defined_msgs: &mut HashMap, ) { - let mut matches = MESSAGE.captures_iter(fluent).peekable(); + let mut matches = message().captures_iter(fluent).peekable(); while let Some(m) = matches.next() { let name = m.get(1).unwrap(); if let Some(defined_filename) = all_defined_msgs.get(name.as_str()) { @@ -60,7 +60,7 @@ fn sort_messages( let mut chunks = vec![]; let mut cur = String::new(); for line in fluent.lines() { - if let Some(name) = MESSAGE.find(line) { + if let Some(name) = message().find(line) { if let Some(defined_filename) = all_defined_msgs.get(name.as_str()) { tidy_error!( bad, diff --git a/src/tools/tidy/src/fluent_used.rs b/src/tools/tidy/src/fluent_used.rs index b73e79cb38d94..8b6c6c18813d4 100644 --- a/src/tools/tidy/src/fluent_used.rs +++ b/src/tools/tidy/src/fluent_used.rs @@ -1,14 +1,9 @@ //! Checks that all Fluent messages appear at least twice use crate::walk::{filter_dirs, walk}; -use regex::Regex; use std::collections::HashMap; use std::path::Path; -lazy_static::lazy_static! { - static ref WORD: Regex = Regex::new(r"\w+").unwrap(); -} - fn filter_used_messages( contents: &str, msgs_not_appeared_yet: &mut HashMap, @@ -17,7 +12,7 @@ fn filter_used_messages( // we don't just check messages never appear in Rust files, // because messages can be used as parts of other fluent messages in Fluent files, // so we do checking messages appear only once in all Rust and Fluent files. - let mut matches = WORD.find_iter(contents); + let mut matches = static_regex!(r"\w+").find_iter(contents); while let Some(name) = matches.next() { if let Some((name, filename)) = msgs_not_appeared_yet.remove_entry(name.as_str()) { // if one msg appears for the first time, diff --git a/src/tools/tidy/src/lib.rs b/src/tools/tidy/src/lib.rs index 23f303276aa2d..c15081a56d1a8 100644 --- a/src/tools/tidy/src/lib.rs +++ b/src/tools/tidy/src/lib.rs @@ -5,6 +5,13 @@ use termcolor::WriteColor; +macro_rules! static_regex { + ($re:literal) => {{ + static RE: ::std::sync::OnceLock<::regex::Regex> = ::std::sync::OnceLock::new(); + RE.get_or_init(|| ::regex::Regex::new($re).unwrap()) + }}; +} + /// A helper macro to `unwrap` a result except also print out details like: /// /// * The expression that failed diff --git a/src/tools/tidy/src/style.rs b/src/tools/tidy/src/style.rs index a1445ce5896b0..6b664f02b53b9 100644 --- a/src/tools/tidy/src/style.rs +++ b/src/tools/tidy/src/style.rs @@ -18,7 +18,7 @@ // ignore-tidy-dbg use crate::walk::{filter_dirs, walk}; -use regex::{Regex, RegexSet}; +use regex::RegexSet; use rustc_hash::FxHashMap; use std::{ffi::OsStr, path::Path}; @@ -178,20 +178,14 @@ fn should_ignore(line: &str) -> bool { // Matches test annotations like `//~ ERROR text`. // This mirrors the regex in src/tools/compiletest/src/runtest.rs, please // update both if either are changed. - lazy_static::lazy_static! { - static ref ANNOTATION_RE: Regex = Regex::new("\\s*//(\\[.*\\])?~.*").unwrap(); - } + static_regex!("\\s*//(\\[.*\\])?~.*").is_match(line) + || ANNOTATIONS_TO_IGNORE.iter().any(|a| line.contains(a)) + // For `ui_test`-style UI test directives, also ignore // - `//@[rev] compile-flags` // - `//@[rev] normalize-stderr-test` - lazy_static::lazy_static! { - static ref UI_TEST_LONG_DIRECTIVES_RE: Regex = - Regex::new("\\s*//@(\\[.*\\]) (compile-flags|normalize-stderr-test|error-pattern).*") - .unwrap(); - } - ANNOTATION_RE.is_match(line) - || ANNOTATIONS_TO_IGNORE.iter().any(|a| line.contains(a)) - || UI_TEST_LONG_DIRECTIVES_RE.is_match(line) + || static_regex!("\\s*//@(\\[.*\\]) (compile-flags|normalize-stderr-test|error-pattern).*") + .is_match(line) } /// Returns `true` if `line` is allowed to be longer than the normal limit. diff --git a/src/tools/tidy/src/ui_tests.rs b/src/tools/tidy/src/ui_tests.rs index f9985a757030b..94a0eee154d93 100644 --- a/src/tools/tidy/src/ui_tests.rs +++ b/src/tools/tidy/src/ui_tests.rs @@ -2,8 +2,6 @@ //! - the number of entries in each directory must be less than `ENTRY_LIMIT` //! - there are no stray `.stderr` files use ignore::Walk; -use lazy_static::lazy_static; -use regex::Regex; use std::collections::{BTreeSet, HashMap}; use std::ffi::OsStr; use std::fs; @@ -182,12 +180,8 @@ pub fn check(root_path: &Path, bless: bool, bad: &mut bool) { } if ext == "rs" { - lazy_static! { - static ref ISSUE_NAME_REGEX: Regex = - Regex::new(r"^issues?[-_]?(\d{3,})").unwrap(); - } - - if let Some(test_name) = ISSUE_NAME_REGEX.captures(testname) { + if let Some(test_name) = static_regex!(r"^issues?[-_]?(\d{3,})").captures(testname) + { // these paths are always relative to the passed `path` and always UTF8 let stripped_path = file_path .strip_prefix(path)